diff --git a/go.mod b/go.mod index 3931547f3..4ddd4d733 100644 --- a/go.mod +++ b/go.mod @@ -3,21 +3,22 @@ module github.com/openshift-pipelines/pipelines-as-code go 1.19 require ( - code.gitea.io/sdk/gitea v0.15.1 + code.gitea.io/sdk/gitea v0.16.0 github.com/AlecAivazis/survey/v2 v2.3.7 - github.com/bradleyfalzon/ghinstallation/v2 v2.6.0 + github.com/bradleyfalzon/ghinstallation/v2 v2.7.0 github.com/cloudevents/sdk-go/v2 v2.14.0 github.com/fvbommel/sortorder v1.1.0 - github.com/gfleury/go-bitbucket-v1 v0.0.0-20230626192437-8d7be5866751 + github.com/gfleury/go-bitbucket-v1 v0.0.0-20230830121038-6e30c5760c87 github.com/gobwas/glob v0.2.3 - github.com/google/cel-go v0.17.1 - github.com/google/go-cmp v0.5.9 - github.com/google/go-github/scrape v0.0.0-20230803055657-a8da03b06966 - github.com/google/go-github/v53 v53.2.0 + github.com/google/cel-go v0.18.1 + github.com/google/go-cmp v0.6.0 + github.com/google/go-github/scrape v0.0.0-20231014221314-ee3f27345645 + github.com/google/go-github/v55 v55.0.0 + github.com/google/go-github/v56 v56.0.0 github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b github.com/jonboulle/clockwork v0.4.0 github.com/juju/ansiterm v1.0.0 - github.com/ktrysmt/go-bitbucket v0.9.65 + github.com/ktrysmt/go-bitbucket v0.9.70 github.com/mattn/go-colorable v0.1.13 github.com/mattn/go-isatty v0.0.19 github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d @@ -25,40 +26,39 @@ require ( github.com/pkg/errors v0.9.1 github.com/spf13/cobra v1.7.0 github.com/stretchr/testify v1.8.4 - github.com/tektoncd/pipeline v0.50.0 - github.com/xanzy/go-gitlab v0.90.0 + github.com/tektoncd/pipeline v0.52.1 + github.com/xanzy/go-gitlab v0.93.1 go.opencensus.io v0.24.0 - go.uber.org/zap v1.25.0 - golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b - golang.org/x/oauth2 v0.11.0 - golang.org/x/sync v0.3.0 + go.uber.org/zap v1.26.0 + golang.org/x/exp v0.0.0-20231006140011-7918f672742d + golang.org/x/oauth2 v0.13.0 + golang.org/x/sync v0.4.0 golang.org/x/text v0.13.0 gopkg.in/yaml.v2 v2.4.0 - gotest.tools/v3 v3.5.0 - k8s.io/api v0.27.4 - k8s.io/apimachinery v0.27.4 + gotest.tools/v3 v3.5.1 + k8s.io/api v0.28.2 + k8s.io/apimachinery v0.28.2 k8s.io/client-go v1.5.2 k8s.io/utils v0.0.0-20230726121419-3b25d923346b - knative.dev/eventing v0.38.0 - knative.dev/pkg v0.0.0-20230718152110-aef227e72ead + knative.dev/eventing v0.38.4 + knative.dev/pkg v0.0.0-20231016142534-0d0cd4e7dbef sigs.k8s.io/yaml v1.3.0 ) require ( - github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230321174746-8dcc6526cfb1 // indirect github.com/cloudflare/circl v1.3.3 // indirect github.com/davidmz/go-pageant v1.0.2 // indirect github.com/go-fed/httpsig v1.1.0 // indirect - github.com/google/gnostic v0.6.9 // indirect + github.com/google/gnostic v0.7.0 // indirect + github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 // indirect github.com/google/pprof v0.0.0-20230111200839-76d1ae5aea2b // indirect - github.com/onsi/ginkgo/v2 v2.9.7 // indirect - github.com/onsi/gomega v1.27.8 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - go.uber.org/atomic v1.10.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 // indirect - k8s.io/kube-openapi v0.0.0-20230718181711-3c0fae5ee9fd // indirect + go.uber.org/atomic v1.11.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a // indirect + k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 // indirect ) replace ( @@ -77,7 +77,6 @@ require ( contrib.go.opencensus.io/exporter/zipkin v0.1.2 // indirect github.com/PuerkitoBio/goquery v1.8.1 // indirect github.com/andybalholm/cascadia v1.3.2 // indirect - github.com/benbjohnson/clock v1.3.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/blang/semver/v4 v4.0.0 // indirect github.com/blendle/zapdriver v1.3.1 // indirect @@ -85,9 +84,9 @@ require ( github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cloudevents/sdk-go/observability/opencensus/v2 v2.14.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/emicklei/go-restful/v3 v3.10.2 // indirect - github.com/evanphx/json-patch v5.6.0+incompatible // indirect - github.com/evanphx/json-patch/v5 v5.6.0 // indirect + github.com/emicklei/go-restful/v3 v3.11.0 // indirect + github.com/evanphx/json-patch v5.7.0+incompatible // indirect + github.com/evanphx/json-patch/v5 v5.7.0 // indirect github.com/go-kit/log v0.2.1 // indirect github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logr/logr v1.2.4 // indirect @@ -101,14 +100,14 @@ require ( github.com/google/go-containerregistry v0.16.1 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/gofuzz v1.2.0 // indirect - github.com/google/uuid v1.3.0 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.2 // indirect + github.com/google/uuid v1.3.1 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-retryablehttp v0.7.4 // indirect github.com/hashicorp/go-version v1.6.0 // indirect - github.com/hashicorp/golang-lru v0.6.0 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/imdario/mergo v0.3.16 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/josharian/intern v1.0.0 // indirect @@ -122,11 +121,11 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/openzipkin/zipkin-go v0.4.1 // indirect - github.com/prometheus/client_golang v1.16.0 // indirect - github.com/prometheus/client_model v0.4.0 // indirect + github.com/openzipkin/zipkin-go v0.4.2 // indirect + github.com/prometheus/client_golang v1.17.0 // indirect + github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.44.0 // indirect - github.com/prometheus/procfs v0.11.1 // indirect + github.com/prometheus/procfs v0.12.0 // indirect github.com/prometheus/statsd_exporter v0.24.0 // indirect github.com/rickb777/date v1.20.2 // indirect github.com/spf13/pflag v1.0.5 // indirect @@ -139,15 +138,14 @@ require ( golang.org/x/sys v0.13.0 // indirect golang.org/x/term v0.13.0 // indirect golang.org/x/time v0.3.0 // indirect - gomodules.xyz/jsonpatch/v2 v2.3.0 // indirect - google.golang.org/api v0.134.0 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 // indirect - google.golang.org/grpc v1.57.0 // indirect + gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect + google.golang.org/api v0.147.0 // indirect + google.golang.org/appengine v1.6.8 // indirect + google.golang.org/grpc v1.58.3 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v3 v3.0.1 - k8s.io/apiextensions-apiserver v0.27.4 // indirect + k8s.io/apiextensions-apiserver v0.28.2 // indirect k8s.io/klog/v2 v2.100.1 sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/structured-merge-diff/v4 v4.3.0 // indirect diff --git a/go.sum b/go.sum index c83077f5d..3ca5f0162 100644 --- a/go.sum +++ b/go.sum @@ -614,8 +614,8 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63n github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8/go.mod h1:I0gYDMZ6Z5GRU7l58bNFSkPTFN6Yl12dsUlAZ8xy98g= -github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 h1:KLq8BE0KwCL+mmXnjLWEAOYO+2l2AE4YMmqG1ZpZHBs= -github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM= github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= @@ -641,8 +641,6 @@ github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230321174746-8dcc6526cfb1/g github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= -github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= -github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= @@ -653,8 +651,8 @@ github.com/blendle/zapdriver v1.3.1 h1:C3dydBOWYRiOk+B8X9IVZ5IOe+7cl+tGOexN4QqHf github.com/blendle/zapdriver v1.3.1/go.mod h1:mdXfREi6u5MArG4j9fewC+FGnXaBR+T4Ox4J2u4eHCc= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= -github.com/bradleyfalzon/ghinstallation/v2 v2.6.0 h1:IRY7Xy588KylkoycsUhFpW7cdGpy5Y5BPsz4IfuJtGk= -github.com/bradleyfalzon/ghinstallation/v2 v2.6.0/go.mod h1:oQ3etOwN3TRH4EwgW5/7MxSVMGlMlzG/O8TU7eYdoSk= +github.com/bradleyfalzon/ghinstallation/v2 v2.7.0 h1:ranXaC3Zz/F6G/f0Joj3LrFp2OzOKfJZev5Q7OaMc88= +github.com/bradleyfalzon/ghinstallation/v2 v2.7.0/go.mod h1:ymxfmloxXBFXvvF1KpeUhOQM6Dfz9NYtfvTiJyk82UE= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= @@ -706,8 +704,8 @@ github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25Kn github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= -github.com/emicklei/go-restful/v3 v3.10.2 h1:hIovbnmBTLjHXkqEBUz3HGpXZdM7ZrE9fJIZIqlJLqE= -github.com/emicklei/go-restful/v3 v3.10.2/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -724,18 +722,20 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/envoyproxy/protoc-gen-validate v0.6.7/go.mod h1:dyJXwwfPK2VSqiB9Klm1J6romD608Ba7Hij42vrOBCo= github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= github.com/envoyproxy/protoc-gen-validate v0.10.0/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= -github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U= -github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= -github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/evanphx/json-patch v5.7.0+incompatible h1:vgGkfT/9f8zE6tvSCe74nfpAVDQ2tG6yudJd8LBksgI= +github.com/evanphx/json-patch v5.7.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/evanphx/json-patch/v5 v5.7.0 h1:nJqP7uwL84RJInrohHfW0Fx3awjbm8qZeFv0nW9SYGc= +github.com/evanphx/json-patch/v5 v5.7.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fvbommel/sortorder v1.1.0 h1:fUmoe+HLsBTctBDoaBwpQo5N+nrCp8g/BjKb/6ZQmYw= github.com/fvbommel/sortorder v1.1.0/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= -github.com/gfleury/go-bitbucket-v1 v0.0.0-20230626192437-8d7be5866751 h1:Ea58sAu8LX/NS7z3aeL+YX/7+FSd9jsxq6Ecpz7QEDM= -github.com/gfleury/go-bitbucket-v1 v0.0.0-20230626192437-8d7be5866751/go.mod h1:IqOZzks2wlWCIai0esXnZPdPwxF2yOz0HcCYw5I4pCg= +github.com/gfleury/go-bitbucket-v1 v0.0.0-20230830121038-6e30c5760c87 h1:aix/N0cwFcyaYksDTa96rcila54zTQ6Nk6yENtgO7yM= +github.com/gfleury/go-bitbucket-v1 v0.0.0-20230830121038-6e30c5760c87/go.mod h1:6saoZ1uJyRD/w4t5Djj8mik5W9cLjSKvba6ZaryV+98= +github.com/gfleury/go-bitbucket-v1/test/bb-mock-server v0.0.0-20230825095122-9bc1711434ab h1:BeG9dDWckFi/p5Gvqq3wTEDXsUV4G6bdvjEHMOT2B8E= +github.com/gfleury/go-bitbucket-v1/test/bb-mock-server v0.0.0-20230825095122-9bc1711434ab/go.mod h1:VssB0kb1cETNaFFC/0mHVCj+7i5TS2xraYq+tl9JLwE= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI= github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM= @@ -829,11 +829,13 @@ github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/cel-go v0.17.1 h1:s2151PDGy/eqpCI80/8dl4VL3xTkqI/YubXLXCFw0mw= -github.com/google/cel-go v0.17.1/go.mod h1:HXZKzB0LXqer5lHHgfWAnlYwJaQBDKMjxjulNQzhwhY= +github.com/google/cel-go v0.18.1 h1:V/lAXKq4C3BYLDy/ARzMtpkEEYfHQpZzVyzy69nEUjs= +github.com/google/cel-go v0.18.1/go.mod h1:PVAybmSnWkNMUZR/tEWFUiJ1Np4Hz0MHsZJcgC4zln4= github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= -github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= +github.com/google/gnostic v0.7.0 h1:d7EpuFp8vVdML+y0JJJYiKeOLjKTdH/GvVkLOBWqJpw= +github.com/google/gnostic v0.7.0/go.mod h1:IAcUyMl6vtC95f60EZ8oXyqTsOersP6HbwjeG7EyDPM= +github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 h1:0VpGH+cDhbDtdcweoyCVsF3fhN8kejK6rFe/2FFX2nU= +github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49/go.mod h1:BkkQ4L1KS1xMt2aWSPStnn55ChGC0DPOn2FQYj+f25M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -848,14 +850,17 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.16.1 h1:rUEt426sR6nyrL3gt+18ibRcvYpKYdpsa5ZW7MA08dQ= github.com/google/go-containerregistry v0.16.1/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= -github.com/google/go-github/scrape v0.0.0-20230803055657-a8da03b06966 h1:xz6qLEYNjpRvC00fhZJQ97lCy1cJCQhALZ1U9VwgRaw= -github.com/google/go-github/scrape v0.0.0-20230803055657-a8da03b06966/go.mod h1:fMD+SYGWAMrsN+WEABV2m0aEuJU3tbBhpttVdFNaXiM= -github.com/google/go-github/v53 v53.2.0 h1:wvz3FyF53v4BK+AsnvCmeNhf8AkTaeh2SoYu/XUvTtI= -github.com/google/go-github/v53 v53.2.0/go.mod h1:XhFRObz+m/l+UCm9b7KSIC3lT3NWSXGt7mOsAWEloao= +github.com/google/go-github/scrape v0.0.0-20231014221314-ee3f27345645 h1:djSDSIo7F0M2PIghBz5wKxk4aSQJoEaBHBBOp0fpnAc= +github.com/google/go-github/scrape v0.0.0-20231014221314-ee3f27345645/go.mod h1:ZRsIk9WAtYGxsDSL/qsu28437SxJyJlWlCYqVBPnsXw= +github.com/google/go-github/v55 v55.0.0 h1:4pp/1tNMB9X/LuAhs5i0KQAE40NmiR/y6prLNb9x9cg= +github.com/google/go-github/v55 v55.0.0/go.mod h1:JLahOTA1DnXzhxEymmFF5PP2tSS9JVNj68mSZNDwskA= +github.com/google/go-github/v56 v56.0.0 h1:TysL7dMa/r7wsQi44BjqlwaHvwlFlqkK8CtBWCX3gb4= +github.com/google/go-github/v56 v56.0.0/go.mod h1:D8cdcX98YWJvi7TLo7zM4/h8ZTx6u6fwGEkCdisopo0= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -888,8 +893,9 @@ github.com/google/s2a-go v0.1.0/go.mod h1:OJpEgntRZo8ugHpF9hkoLJbS5dSI20XZeXJ9JV github.com/google/s2a-go v0.1.3/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= +github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= @@ -913,12 +919,14 @@ github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+ github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/grpc-ecosystem/grpc-gateway v1.14.6/go.mod h1:zdiPV4Yse/1gnckTHtghG4GkDEdKCRJduHpTxT3/jcw= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks= github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.2 h1:dygLcbEBA+t/P7ck6a8AkXv6juQ4cK0RHBoh32jxhHM= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.2/go.mod h1:Ap9RLCIJVtgQg1/BBgVEfypOAySvvlcpcVQkSzJCH4Y= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 h1:RtRsiaGvWxcwd8y3BiRZxsylPT8hLWZ5SPcfI+3IDNk= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0/go.mod h1:TzP6duP4Py2pHLVPPQp42aoYI92+PCrVotyR5e8Vqlk= github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b h1:wDUNC2eKiL35DbLvsDhiblTUXHxcOPwQSCzi7xpQUN4= github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b/go.mod h1:VzxiSdG6j1pi7rwGm/xYI5RbtpBgM8sARDXlvEvxlu0= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -937,8 +945,8 @@ github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mO github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4= -github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog= github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= @@ -949,7 +957,6 @@ github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -984,7 +991,6 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxv github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -992,8 +998,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/ktrysmt/go-bitbucket v0.9.65 h1:DO+yjAzJSjCsLSJGpdMBD6GPkyv0G/akOJUNZ9a3ug8= -github.com/ktrysmt/go-bitbucket v0.9.65/go.mod h1:b1ij1QwyttnZNT2Fzp8MvPNbSeg652OBVwtGqs2J1zo= +github.com/ktrysmt/go-bitbucket v0.9.70 h1:SWDKD9kKTtWZrVaOMWuaOuYmXGA/lhPRNI8koiKyBFQ= +github.com/ktrysmt/go-bitbucket v0.9.70/go.mod h1:lFsqcfZbrJ+P+h7jggwcrwB2O9kbgLTWTwJoyMI2qAc= github.com/lightstep/tracecontext.go v0.0.0-20181129014701-1757c391b1ac h1:+2b6iGRJe3hvV/yVXrd41yVEjxuFHxasJqDhkIjS4gk= github.com/lunixbochs/vtclean v1.0.0 h1:xu2sLAri4lGiovBDQKxl5mrXyESr3gUr5m5SM5+LVb8= github.com/lunixbochs/vtclean v1.0.0/go.mod h1:pHhQNgMf3btfWnGBVipUOjRYhoOsdGqdm/+2c2E2WMI= @@ -1039,16 +1045,15 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo/v2 v2.9.7 h1:06xGQy5www2oN160RtEZoTvnP2sPhEfePYmCDc2szss= -github.com/onsi/ginkgo/v2 v2.9.7/go.mod h1:cxrmXWykAwTwhQsJOPfdIDiJ+l2RYq7U8hFU+M/1uw0= +github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M= +github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.27.8 h1:gegWiwZjBsf2DgiSbf5hpokZ98JVDMcWkUiigk6/KXc= -github.com/onsi/gomega v1.27.8/go.mod h1:2J8vzI/s+2shY9XHRApDkdgPo1TKT7P2u6fXeJKFnNQ= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= -github.com/openzipkin/zipkin-go v0.4.1 h1:kNd/ST2yLLWhaWrkgchya40TJabe8Hioj9udfPcEO5A= -github.com/openzipkin/zipkin-go v0.4.1/go.mod h1:qY0VqDSN1pOBN94dBc6w2GJlWLiovAyg7Qt6/I9HecM= +github.com/openzipkin/zipkin-go v0.4.2 h1:zjqfqHjUpPmB3c1GlCvvgsM1G4LkvqQbBDueDOCg/jA= +github.com/openzipkin/zipkin-go v0.4.2/go.mod h1:ZeVkFjuuBiSy13y8vpSDCjMi9GoI3hPpCJSBx/EYFhY= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -1072,15 +1077,15 @@ github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqr github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_golang v1.12.2/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_golang v1.13.0/go.mod h1:vTeo+zgvILHsnnj/39Ou/1fPN5nJFOEMgftOUOmlvYQ= -github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8= -github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= -github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= @@ -1095,8 +1100,8 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= -github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI= -github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/prometheus/statsd_exporter v0.22.7/go.mod h1:N/TevpjkIh9ccs6nuzY3jQn9dFqnUakOjnEuMPJJJnI= github.com/prometheus/statsd_exporter v0.24.0 h1:aZmN6CzS2H1Non1JKZdjkQlAkDtGoQBYIESk2SlU1OI= github.com/prometheus/statsd_exporter v0.24.0/go.mod h1:+dQiRTqn9DnPmN5mI5Xond+k8nuRKzdgh1omxh9OgFY= @@ -1145,11 +1150,11 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stvp/go-udp-testing v0.0.0-20201019212854-469649b16807/go.mod h1:7jxmlfBCDBXRzr0eAQJ48XC1hBu1np4CS5+cHEYfwpc= -github.com/tektoncd/pipeline v0.50.0 h1:bH04XChFOYCwr7Gm6f2NiDkieHUiGohKCGsLe0fxYf0= -github.com/tektoncd/pipeline v0.50.0/go.mod h1:OjhCfhPQbVvK6GUmIseL2ipjaQ8ILcUerMk4P4sCcHA= +github.com/tektoncd/pipeline v0.52.1 h1:YN9S/4hRutKQeG5na03V7et8B3hoEUCoWoABU8t8P3c= +github.com/tektoncd/pipeline v0.52.1/go.mod h1:27wryvb8VIPIZ2u5evUphOPgM8Z7hGgzp00E8/3DCls= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= -github.com/xanzy/go-gitlab v0.90.0 h1:j8ZUHfLfXdnC+B8njeNaW/kM44c1zw8fiuNj7D+qQN8= -github.com/xanzy/go-gitlab v0.90.0/go.mod h1:5ryv+MnpZStBH8I/77HuQBsMbBGANtVpLWC15qOjWAw= +github.com/xanzy/go-gitlab v0.93.1 h1:f7J33cw/P9b/8paIOoH0F3H+TFrswvWHs6yUgoTp9LY= +github.com/xanzy/go-gitlab v0.93.1/go.mod h1:5ryv+MnpZStBH8I/77HuQBsMbBGANtVpLWC15qOjWAw= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= @@ -1177,8 +1182,8 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= -go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= +go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= +go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= @@ -1186,8 +1191,8 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/ go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.25.0 h1:4Hvk6GtkucQ790dqmj7l1eEnRdKm3k3ZUrUMS2d5+5c= -go.uber.org/zap v1.25.0/go.mod h1:JIAUzQIH94IC4fOJQm7gMmBJP5k7wQfdcnYdPoEXJYk= +go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= +go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1205,7 +1210,6 @@ golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= -golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= @@ -1224,8 +1228,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220827204233-334a2380cb91/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= -golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b h1:r+vk0EmXNmekl0S0BascoeeoHk/L7wmaW2QF90K+kYI= -golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= @@ -1268,7 +1272,7 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= +golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1336,8 +1340,7 @@ golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= -golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1370,9 +1373,8 @@ golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE= -golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI= -golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= -golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= +golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY= +golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1390,8 +1392,8 @@ golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1483,7 +1485,6 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1497,7 +1498,6 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= @@ -1517,7 +1517,6 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= @@ -1591,7 +1590,7 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= -golang.org/x/tools v0.9.1 h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo= +golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1600,8 +1599,8 @@ golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -gomodules.xyz/jsonpatch/v2 v2.3.0 h1:8NFhfS6gzxNqjLIYnZxg319wZ5Qjnx4m/CcX+Klzazc= -gomodules.xyz/jsonpatch/v2 v2.3.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= +gomodules.xyz/jsonpatch/v2 v2.4.0 h1:Ci3iUJyx9UeRx7CeFN8ARgGbkESwJK+KB9lLcWxY/Zw= +gomodules.xyz/jsonpatch/v2 v2.4.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo= gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0= gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0= @@ -1672,16 +1671,17 @@ google.golang.org/api v0.118.0/go.mod h1:76TtD3vkgmZ66zZzp72bUUklpmQmKlhh6sYtIjY google.golang.org/api v0.122.0/go.mod h1:gcitW0lvnyWjSp9nKxAbdHKIZ6vF4aajGueeslZOyms= google.golang.org/api v0.124.0/go.mod h1:xu2HQurE5gi/3t1aFCvhPD781p0a3p11sdunTJ2BlP4= google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= -google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw= -google.golang.org/api v0.134.0/go.mod h1:sjRL3UnjTx5UqNQS9EWr9N8p7xbHpy1k0XGRLCf3Spk= +google.golang.org/api v0.147.0 h1:Can3FaQo9LlVqxJCodNmeZW/ib3/qKAY3rFeXiHo5gc= +google.golang.org/api v0.147.0/go.mod h1:pQ/9j83DcmPd/5C9e2nFOdjjNkDZ1G+zkbK2uvdkJMs= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1746,7 +1746,6 @@ google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= @@ -1818,22 +1817,22 @@ google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOl google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/genproto v0.0.0-20230525234025-438c736192d0/go.mod h1:9ExIQyXL5hZrHzQceCwuSYwZZ5QZBazOcprJ5rgs3lY= +google.golang.org/genproto v0.0.0-20230526161137-0005af68ea54/go.mod h1:zqTuNwFlFRsw5zIts5VnzLQxSRqh+CGOTVMlYbY0Eyk= google.golang.org/genproto v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:xZnkP7mREFX5MORlOPEzLMr+90PPZQ2QWzrVTWfAq64= -google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 h1:L6iMMGrtzgHsWofoFcihmDEMYeDR9KN/ThbPWGrh++g= -google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5/go.mod h1:oH/ZOT02u4kWEp7oYBGYFFkCdKS/uYR9Z7+0/xuuFp8= +google.golang.org/genproto v0.0.0-20231002182017-d307bd883b97 h1:SeZZZx0cP0fqUyA+oRzP9k7cSwJlvDFiROO72uwD6i0= google.golang.org/genproto/googleapis/api v0.0.0-20230525234020-1aefcd67740a/go.mod h1:ts19tUU+Z0ZShN1y3aPyq2+O3d5FUNNgT6FtOzmrNn8= google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= -google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 h1:nIgk/EEq3/YlnmVVXVnm14rC2oxgs1o0ong4sD/rd44= -google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5/go.mod h1:5DZzOUPCLYL3mNkQ0ms0F3EuUNZ7py1Bqeq6sxzI7/Q= +google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a h1:myvhA4is3vrit1a6NZCWBIwN0kNEnX21DJOJX/NvIfI= +google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:SUBoKXbI1Efip18FClrQVGjWcyd0QZd8KkvdP34t7ww= google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:ylj+BE99M198VPbBh6A8d9n3w8fChvyLK3wwBOjXBFA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234015-3fc162c6f38a/go.mod h1:xURIpW9ES5+/GZhnV6beoEtxQrnkRGIfP5VQG2tCBLc= google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 h1:eSaPbMR4T7WfH9FvABk36NBMacoTUKdWCvV0dx+KfOg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5/go.mod h1:zBEcrKX2ZOcEkHWxBPAIvYUWOKKMIhYcmNiUIu2ji3I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a h1:a2MQQVoTo96JC9PMGtGBymLp7+/RzpFc2yX/9WfFg1c= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a/go.mod h1:4cYg8o5yUbm77w8ZX00LhMVNl/YVBFJRYWDc0uYWMs0= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1876,8 +1875,8 @@ google.golang.org/grpc v1.52.0/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8= -google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= -google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= +google.golang.org/grpc v1.58.3 h1:BjnpXut1btbtgN/6sp+brB2Kbm2LjNXnidYujAVbSoQ= +google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1919,11 +1918,11 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.5.0 h1:Ljk6PdHdOhAb5aDMWXjDLMMhph+BpztA4v1QdqEW2eY= -gotest.tools/v3 v3.5.0/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= +gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= +gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -1934,8 +1933,8 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las= k8s.io/api v0.25.9 h1:XuJ2bz2F52jZmp3YjUcp/pozH8kY1BlBHdXnoOXBP3U= k8s.io/api v0.25.9/go.mod h1:9YRWzD0cRHzfsnf9e5OQsQ4Un6cbZ//Xv3jo44YKm2Y= -k8s.io/apiextensions-apiserver v0.27.4 h1:ie1yZG4nY/wvFMIR2hXBeSVq+HfNzib60FjnBYtPGSs= -k8s.io/apiextensions-apiserver v0.27.4/go.mod h1:KHZaDr5H9IbGEnSskEUp/DsdXe1hMQ7uzpQcYUFt2bM= +k8s.io/apiextensions-apiserver v0.28.2 h1:J6/QRWIKV2/HwBhHRVITMLYoypCoPY1ftigDM0Kn+QU= +k8s.io/apiextensions-apiserver v0.28.2/go.mod h1:5tnkxLGa9nefefYzWuAlWZ7RZYuN/765Au8cWLA6SRg= k8s.io/apimachinery v0.26.5 h1:hTQVhJao2piX7vSgCn4Lwd6E0o/+TJIH4NqRf+q4EmE= k8s.io/apimachinery v0.26.5/go.mod h1:HUvk6wrOP4v22AIYqeCGSQ6xWCHo41J9d6psb3temAg= k8s.io/client-go v0.25.9 h1:U0S3nc71NRfHXiA0utyCkPt3Mv1SWpQw0g5VfBCv5xg= @@ -1946,8 +1945,8 @@ k8s.io/kube-openapi v0.0.0-20230515203736-54b630e78af5 h1:azYPdzztXxPSa8wb+hksEK k8s.io/kube-openapi v0.0.0-20230515203736-54b630e78af5/go.mod h1:kzo02I3kQ4BTtEfVLaPbjvCkX97YqGve33wzlb3fofQ= k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -knative.dev/eventing v0.38.0 h1:n6/k9IJ1kOvpZx4CMLqa1FG7g2iBiyKXwBu1Fy/81q4= -knative.dev/eventing v0.38.0/go.mod h1:JUqEC0zoyfYqhRHFz8VUxjkxH9G1cQ/Y+UvhXTxUXgI= +knative.dev/eventing v0.38.4 h1:eH059bfeLilj2xAN6V7XXOh3wqzz5ssoMS/CIJpJfmk= +knative.dev/eventing v0.38.4/go.mod h1:ct8t+v6nmp1kFCy6ngkDWIEvnjJDNDoKptrfnQVh+z8= knative.dev/pkg v0.0.0-20230418073056-dfad48eaa5d0 h1:EFQcoUo8I4bc+U3y6tR1B3ONYZSHWUdAfI7Vh7dae8g= knative.dev/pkg v0.0.0-20230418073056-dfad48eaa5d0/go.mod h1:2qWPP9Gjh9Q7ETti+WRHnBnGCSCq+6q7m3p/nmUQviE= lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= diff --git a/pkg/adapter/adapter_test.go b/pkg/adapter/adapter_test.go index 43e261d19..790425d18 100644 --- a/pkg/adapter/adapter_test.go +++ b/pkg/adapter/adapter_test.go @@ -8,7 +8,7 @@ import ( "net/http/httptest" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/clients" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" diff --git a/pkg/cli/status/status.go b/pkg/cli/status/status.go index 55684ab02..cfdd93421 100644 --- a/pkg/cli/status/status.go +++ b/pkg/cli/status/status.go @@ -4,7 +4,7 @@ import ( "context" "regexp" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" pacv1alpha1 "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/kubeinteraction" diff --git a/pkg/cli/webhook/github.go b/pkg/cli/webhook/github.go index d88fbbd4e..e26b3c9df 100644 --- a/pkg/cli/webhook/github.go +++ b/pkg/cli/webhook/github.go @@ -8,7 +8,7 @@ import ( "strings" "github.com/AlecAivazis/survey/v2" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/cli" "github.com/openshift-pipelines/pipelines-as-code/pkg/cli/prompt" @@ -180,7 +180,7 @@ func (gh *gitHubConfig) newGHClientByToken(ctx context.Context) (*github.Client, return github.NewClient(oauth2.NewClient(ctx, ts)), nil } - gprovider, err := github.NewEnterpriseClient(gh.APIURL, "", oauth2.NewClient(ctx, ts)) + gprovider, err := github.NewClient(oauth2.NewClient(ctx, ts)).WithEnterpriseURLs(gh.APIURL, gh.APIURL) if err != nil { return nil, err } diff --git a/pkg/cmd/tknpac/bootstrap/github.go b/pkg/cmd/tknpac/bootstrap/github.go index cef2555a9..16eaff2e1 100644 --- a/pkg/cmd/tknpac/bootstrap/github.go +++ b/pkg/cmd/tknpac/bootstrap/github.go @@ -5,7 +5,7 @@ import ( "fmt" "github.com/google/go-github/scrape" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v56/github" ) // generateManifest generate manifest from the given options @@ -43,7 +43,7 @@ func getGHClient(opts *bootstrapOpts) (*github.Client, error) { return github.NewClient(nil), nil } - gprovider, err := github.NewEnterpriseClient(opts.GithubAPIURL, "", nil) + gprovider, err := github.NewClient(nil).WithEnterpriseURLs(opts.GithubAPIURL, "") if err != nil { return nil, err } diff --git a/pkg/cmd/tknpac/bootstrap/install.go b/pkg/cmd/tknpac/bootstrap/install.go index aff09c537..ba42c9895 100644 --- a/pkg/cmd/tknpac/bootstrap/install.go +++ b/pkg/cmd/tknpac/bootstrap/install.go @@ -10,7 +10,7 @@ import ( "strings" "github.com/AlecAivazis/survey/v2" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "github.com/openshift-pipelines/pipelines-as-code/pkg/cli/prompt" diff --git a/pkg/cmd/tknpac/bootstrap/kubestuff.go b/pkg/cmd/tknpac/bootstrap/kubestuff.go index b5ce51edf..2c831cd89 100644 --- a/pkg/cmd/tknpac/bootstrap/kubestuff.go +++ b/pkg/cmd/tknpac/bootstrap/kubestuff.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v56/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" diff --git a/pkg/cmd/tknpac/describe/describe_test.go b/pkg/cmd/tknpac/describe/describe_test.go index baae12758..7b251539e 100644 --- a/pkg/cmd/tknpac/describe/describe_test.go +++ b/pkg/cmd/tknpac/describe/describe_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" diff --git a/pkg/cmd/tknpac/info/install.go b/pkg/cmd/tknpac/info/install.go index 8a63062db..0f69f37b1 100644 --- a/pkg/cmd/tknpac/info/install.go +++ b/pkg/cmd/tknpac/info/install.go @@ -10,7 +10,7 @@ import ( _ "embed" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/juju/ansiterm" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/cli" diff --git a/pkg/cmd/tknpac/list/list_test.go b/pkg/cmd/tknpac/list/list_test.go index a4572e99f..a046d7e26 100644 --- a/pkg/cmd/tknpac/list/list_test.go +++ b/pkg/cmd/tknpac/list/list_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" pacv1alpha1 "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" diff --git a/pkg/customparams/customparams_test.go b/pkg/customparams/customparams_test.go index 9f8d45dce..376bdc9c7 100644 --- a/pkg/customparams/customparams_test.go +++ b/pkg/customparams/customparams_test.go @@ -3,7 +3,7 @@ package customparams import ( "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/events" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/pkg/formatting/repository_test.go b/pkg/formatting/repository_test.go index 154d50257..f4b9919f5 100644 --- a/pkg/formatting/repository_test.go +++ b/pkg/formatting/repository_test.go @@ -4,7 +4,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/cli" diff --git a/pkg/kubeinteraction/events.go b/pkg/kubeinteraction/events.go index 9a79d0af1..8c1b86106 100644 --- a/pkg/kubeinteraction/events.go +++ b/pkg/kubeinteraction/events.go @@ -3,7 +3,7 @@ package kubeinteraction import ( "context" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) diff --git a/pkg/kubeinteraction/pod_logs.go b/pkg/kubeinteraction/pod_logs.go index 88ea837af..846f1cac9 100644 --- a/pkg/kubeinteraction/pod_logs.go +++ b/pkg/kubeinteraction/pod_logs.go @@ -4,7 +4,7 @@ import ( "context" "io" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" corev1 "k8s.io/api/core/v1" ) diff --git a/pkg/matcher/annotation_matcher_test.go b/pkg/matcher/annotation_matcher_test.go index e578fe946..913cd5580 100644 --- a/pkg/matcher/annotation_matcher_test.go +++ b/pkg/matcher/annotation_matcher_test.go @@ -10,7 +10,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" @@ -734,17 +734,7 @@ func TestMatchPipelinerunAnnotationAndRepositories(t *testing.T) { }) } else { for _, v := range tt.args.fileChanged { - commitFiles.Changes = append(commitFiles.Changes, - struct { - OldPath string `json:"old_path"` - NewPath string `json:"new_path"` - AMode string `json:"a_mode"` - BMode string `json:"b_mode"` - Diff string `json:"diff"` - NewFile bool `json:"new_file"` - RenamedFile bool `json:"renamed_file"` - DeletedFile bool `json:"deleted_file"` - }{NewPath: v}) + commitFiles.Changes = append(commitFiles.Changes, &gitlab.MergeRequestDiff{NewPath: v}) } url := fmt.Sprintf("/projects/0/merge_requests/%d/changes", tt.args.runevent.PullRequestNumber) glMux.HandleFunc(url, func(w http.ResponseWriter, r *http.Request) { diff --git a/pkg/pipelineascode/match_test.go b/pkg/pipelineascode/match_test.go index f4201d48a..c1d21f247 100644 --- a/pkg/pipelineascode/match_test.go +++ b/pkg/pipelineascode/match_test.go @@ -4,7 +4,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" apipac "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/consoleui" diff --git a/pkg/pipelineascode/pipelineascode_test.go b/pkg/pipelineascode/pipelineascode_test.go index 74941df6c..01710ec1a 100644 --- a/pkg/pipelineascode/pipelineascode_test.go +++ b/pkg/pipelineascode/pipelineascode_test.go @@ -13,7 +13,7 @@ import ( "sync" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" apipac "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" diff --git a/pkg/pipelineascode/template_test.go b/pkg/pipelineascode/template_test.go index d3a0193b2..5213ceb19 100644 --- a/pkg/pipelineascode/template_test.go +++ b/pkg/pipelineascode/template_test.go @@ -4,7 +4,7 @@ import ( "fmt" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/events" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/pkg/provider/bitbucketserver/acl.go b/pkg/provider/bitbucketserver/acl.go index 128d37788..41452f7b0 100644 --- a/pkg/provider/bitbucketserver/acl.go +++ b/pkg/provider/bitbucketserver/acl.go @@ -44,7 +44,7 @@ func (v *Provider) checkOkToTestCommentFromApprovedMember(event *info.Event) (bo if nextPage > 0 { localVarOptionals["start"] = int(nextPage) } - return v.Client.DefaultApi.GetActivities(v.projectKey, event.Repository, int64(v.pullRequestNumber), localVarOptionals) + return v.Client.DefaultApi.GetActivities(v.projectKey, event.Repository, v.pullRequestNumber, localVarOptionals) }) if err != nil { return false, err diff --git a/pkg/provider/bitbucketserver/bitbucketserver.go b/pkg/provider/bitbucketserver/bitbucketserver.go index b977a6b17..2bb9123a6 100644 --- a/pkg/provider/bitbucketserver/bitbucketserver.go +++ b/pkg/provider/bitbucketserver/bitbucketserver.go @@ -7,7 +7,7 @@ import ( "strings" bbv1 "github.com/gfleury/go-bitbucket-v1" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/mitchellh/mapstructure" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/events" @@ -155,7 +155,7 @@ func (v *Provider) getRaw(runevent *info.Event, revision, path string) (string, localVarOptionals := map[string]interface{}{ "at": revision, } - resp, err := v.Client.DefaultApi.GetContent_11(v.projectKey, runevent.Repository, path, localVarOptionals) + resp, err := v.Client.DefaultApi.GetRawContent(v.projectKey, runevent.Repository, path, localVarOptionals) if err != nil { return "", err } diff --git a/pkg/provider/github/acl.go b/pkg/provider/github/acl.go index eb3add369..911448cc3 100644 --- a/pkg/provider/github/acl.go +++ b/pkg/provider/github/acl.go @@ -6,7 +6,7 @@ import ( "net/http" "strings" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/acl" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" "github.com/openshift-pipelines/pipelines-as-code/pkg/policy" diff --git a/pkg/provider/github/acl_test.go b/pkg/provider/github/acl_test.go index 60275bff0..dc0e87e29 100644 --- a/pkg/provider/github/acl_test.go +++ b/pkg/provider/github/acl_test.go @@ -7,7 +7,7 @@ import ( "net/http" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/settings" diff --git a/pkg/provider/github/app/token.go b/pkg/provider/github/app/token.go index cf3416eb4..740678d9e 100644 --- a/pkg/provider/github/app/token.go +++ b/pkg/provider/github/app/token.go @@ -10,7 +10,7 @@ import ( "time" "github.com/golang-jwt/jwt/v4" - gt "github.com/google/go-github/v53/github" + gt "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/pkg/provider/github/detect.go b/pkg/provider/github/detect.go index ffd431241..cd37ab201 100644 --- a/pkg/provider/github/detect.go +++ b/pkg/provider/github/detect.go @@ -5,7 +5,7 @@ import ( "fmt" "net/http" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" "github.com/openshift-pipelines/pipelines-as-code/pkg/provider" "go.uber.org/zap" diff --git a/pkg/provider/github/detect_test.go b/pkg/provider/github/detect_test.go index de78d266c..43721cdfd 100644 --- a/pkg/provider/github/detect_test.go +++ b/pkg/provider/github/detect_test.go @@ -6,7 +6,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/test/logger" "gotest.tools/v3/assert" ) diff --git a/pkg/provider/github/github.go b/pkg/provider/github/github.go index f7a7bc705..1e5100f98 100644 --- a/pkg/provider/github/github.go +++ b/pkg/provider/github/github.go @@ -11,7 +11,7 @@ import ( "sync" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" @@ -199,7 +199,8 @@ func makeClient(ctx context.Context, apiURL, token string) (*github.Client, stri providerName := "github" if apiURL != "" && apiURL != apiPublicURL { providerName = "github-enterprise" - client, _ = github.NewEnterpriseClient(apiURL, apiURL, tc) + uploadURL := apiURL + "/api/uploads" + client, _ = github.NewClient(tc).WithEnterpriseURLs(apiURL, uploadURL) } else { client = github.NewClient(tc) apiURL = client.BaseURL.String() diff --git a/pkg/provider/github/github_test.go b/pkg/provider/github/github_test.go index bb32bbb78..240e7e3a4 100644 --- a/pkg/provider/github/github_test.go +++ b/pkg/provider/github/github_test.go @@ -16,7 +16,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" diff --git a/pkg/provider/github/parse_payload.go b/pkg/provider/github/parse_payload.go index 8bd2b8ab1..8c03c6a81 100644 --- a/pkg/provider/github/parse_payload.go +++ b/pkg/provider/github/parse_payload.go @@ -12,7 +12,7 @@ import ( "strings" "github.com/bradleyfalzon/ghinstallation/v2" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" @@ -70,7 +70,8 @@ func (v *Provider) GetAppToken(ctx context.Context, kube kubernetes.Interface, g if !strings.HasPrefix(gheURL, "https://") && !strings.HasPrefix(gheURL, "http://") { gheURL = "https://" + gheURL } - v.Client, _ = github.NewEnterpriseClient(gheURL, "", &http.Client{Transport: itr}) + uploadURL := gheURL + "/api/uploads" + v.Client, _ = github.NewClient(&http.Client{Transport: itr}).WithEnterpriseURLs(gheURL, uploadURL) itr.BaseURL = strings.TrimSuffix(v.Client.BaseURL.String(), "/") } else { v.Client = github.NewClient(&http.Client{Transport: itr}) diff --git a/pkg/provider/github/parse_payload_test.go b/pkg/provider/github/parse_payload_test.go index f69cd8131..eafe4c5fc 100644 --- a/pkg/provider/github/parse_payload_test.go +++ b/pkg/provider/github/parse_payload_test.go @@ -9,7 +9,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/clients" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" diff --git a/pkg/provider/github/repository.go b/pkg/provider/github/repository.go index 5b0919ed9..b2c843cc7 100644 --- a/pkg/provider/github/repository.go +++ b/pkg/provider/github/repository.go @@ -6,7 +6,7 @@ import ( "fmt" "net/http" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/formatting" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/pkg/provider/github/repository_test.go b/pkg/provider/github/repository_test.go index a66ca51ff..73d69cf74 100644 --- a/pkg/provider/github/repository_test.go +++ b/pkg/provider/github/repository_test.go @@ -8,7 +8,7 @@ import ( "net/http" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/clients" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" diff --git a/pkg/provider/github/status.go b/pkg/provider/github/status.go index 690720094..20756bd77 100644 --- a/pkg/provider/github/status.go +++ b/pkg/provider/github/status.go @@ -8,7 +8,7 @@ import ( "strings" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/action" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/kubeinteraction" diff --git a/pkg/provider/github/status_test.go b/pkg/provider/github/status_test.go index 003d9f4ce..2491d6342 100644 --- a/pkg/provider/github/status_test.go +++ b/pkg/provider/github/status_test.go @@ -10,7 +10,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" diff --git a/pkg/provider/gitlab/gitlab.go b/pkg/provider/gitlab/gitlab.go index 049ec49df..05f47dd9b 100644 --- a/pkg/provider/gitlab/gitlab.go +++ b/pkg/provider/gitlab/gitlab.go @@ -312,6 +312,7 @@ func (v *Provider) GetFiles(_ context.Context, runevent *info.Event) ([]string, "exiting... (hint: did you forget setting a secret on your repo?)") } if runevent.TriggerTarget == "pull_request" { + //nolint: staticcheck mrchanges, _, err := v.Client.MergeRequests.GetMergeRequestChanges(v.sourceProjectID, runevent.PullRequestNumber, &gitlab.GetMergeRequestChangesOptions{}) if err != nil { return []string{}, err diff --git a/pkg/provider/gitlab/gitlab_test.go b/pkg/provider/gitlab/gitlab_test.go index c84640424..a9babe80e 100644 --- a/pkg/provider/gitlab/gitlab_test.go +++ b/pkg/provider/gitlab/gitlab_test.go @@ -462,17 +462,7 @@ func TestGetFiles(t *testing.T) { PullRequestNumber: 10, }, mrchanges: &gitlab.MergeRequest{ - Changes: append(commitFiles.Changes, - struct { - OldPath string `json:"old_path"` - NewPath string `json:"new_path"` - AMode string `json:"a_mode"` - BMode string `json:"b_mode"` - Diff string `json:"diff"` - NewFile bool `json:"new_file"` - RenamedFile bool `json:"renamed_file"` - DeletedFile bool `json:"deleted_file"` - }{NewPath: "test.txt"}), + Changes: append(commitFiles.Changes, &gitlab.MergeRequestDiff{NewPath: "test.txt"}), }, }, { @@ -498,17 +488,7 @@ func TestGetFiles(t *testing.T) { fakeclient, mux, teardown := thelp.Setup(t) defer teardown() mergeFileChanges := &gitlab.MergeRequest{ - Changes: append(commitFiles.Changes, - struct { - OldPath string `json:"old_path"` - NewPath string `json:"new_path"` - AMode string `json:"a_mode"` - BMode string `json:"b_mode"` - Diff string `json:"diff"` - NewFile bool `json:"new_file"` - RenamedFile bool `json:"renamed_file"` - DeletedFile bool `json:"deleted_file"` - }{NewPath: "test.txt"}), + Changes: append(commitFiles.Changes, &gitlab.MergeRequestDiff{NewPath: "test.txt"}), } if tt.event.TriggerTarget == "pull_request" { mux.HandleFunc(fmt.Sprintf("/projects/0/merge_requests/%d/changes", @@ -526,12 +506,12 @@ func TestGetFiles(t *testing.T) { }, } if tt.event.TriggerTarget == "push" { - mux.HandleFunc(fmt.Sprintf("/projects/0/repository/commits/%s/diff", - tt.event.SHA), func(rw http.ResponseWriter, r *http.Request) { - jeez, err := json.Marshal(pushFileChanges) - assert.NilError(t, err) - _, _ = rw.Write(jeez) - }) + mux.HandleFunc(fmt.Sprintf("/projects/0/repository/commits/%s/diff", tt.event.SHA), + func(rw http.ResponseWriter, r *http.Request) { + jeez, err := json.Marshal(pushFileChanges) + assert.NilError(t, err) + _, _ = rw.Write(jeez) + }) } providerInfo := &Provider{Client: fakeclient} diff --git a/pkg/reconciler/reconciler_test.go b/pkg/reconciler/reconciler_test.go index 9d881ef69..f5ec51f0e 100644 --- a/pkg/reconciler/reconciler_test.go +++ b/pkg/reconciler/reconciler_test.go @@ -7,7 +7,7 @@ import ( "net/http" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/jonboulle/clockwork" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" diff --git a/pkg/reconciler/status.go b/pkg/reconciler/status.go index 58561e74c..034467973 100644 --- a/pkg/reconciler/status.go +++ b/pkg/reconciler/status.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" apipac "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" pacv1a1 "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/formatting" diff --git a/pkg/test/github/github.go b/pkg/test/github/github.go index 44a115b52..01ca1907e 100644 --- a/pkg/test/github/github.go +++ b/pkg/test/github/github.go @@ -13,7 +13,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "gotest.tools/v3/assert" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" diff --git a/test/gitea_test.go b/test/gitea_test.go index 8202ee924..ea2d79706 100644 --- a/test/gitea_test.go +++ b/test/gitea_test.go @@ -14,7 +14,7 @@ import ( "time" "code.gitea.io/sdk/gitea" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" "github.com/tektoncd/pipeline/pkg/names" "gopkg.in/yaml.v2" diff --git a/test/github_config_maxkeepruns_test.go b/test/github_config_maxkeepruns_test.go index c30147cc0..a37204875 100644 --- a/test/github_config_maxkeepruns_test.go +++ b/test/github_config_maxkeepruns_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - ghlib "github.com/google/go-github/v53/github" + ghlib "github.com/google/go-github/v55/github" "gotest.tools/v3/assert" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" diff --git a/test/github_pullrequest_concurrency_test.go b/test/github_pullrequest_concurrency_test.go index 44c0d98c8..eebba0a9b 100644 --- a/test/github_pullrequest_concurrency_test.go +++ b/test/github_pullrequest_concurrency_test.go @@ -11,7 +11,7 @@ import ( "testing" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" tgithub "github.com/openshift-pipelines/pipelines-as-code/test/pkg/github" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/options" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/payload" diff --git a/test/github_pullrequest_oktotest_test.go b/test/github_pullrequest_oktotest_test.go index ea831109a..00f8af1ae 100644 --- a/test/github_pullrequest_oktotest_test.go +++ b/test/github_pullrequest_oktotest_test.go @@ -11,7 +11,7 @@ import ( "strconv" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" tgithub "github.com/openshift-pipelines/pipelines-as-code/test/pkg/github" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/payload" diff --git a/test/github_pullrequest_rerequest_test.go b/test/github_pullrequest_rerequest_test.go index e317c8d09..79c3110dd 100644 --- a/test/github_pullrequest_rerequest_test.go +++ b/test/github_pullrequest_rerequest_test.go @@ -10,7 +10,7 @@ import ( "strconv" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params/info" tgithub "github.com/openshift-pipelines/pipelines-as-code/test/pkg/github" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/payload" diff --git a/test/github_pullrequest_retest_test.go b/test/github_pullrequest_retest_test.go index 11e015a5c..19abc824c 100644 --- a/test/github_pullrequest_retest_test.go +++ b/test/github_pullrequest_retest_test.go @@ -9,7 +9,7 @@ import ( "os" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/keys" tgithub "github.com/openshift-pipelines/pipelines-as-code/test/pkg/github" twait "github.com/openshift-pipelines/pipelines-as-code/test/pkg/wait" diff --git a/test/github_pullrequest_test_comment_test.go b/test/github_pullrequest_test_comment_test.go index 704a819db..f55d44510 100644 --- a/test/github_pullrequest_test_comment_test.go +++ b/test/github_pullrequest_test_comment_test.go @@ -8,7 +8,7 @@ import ( "os" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" tgithub "github.com/openshift-pipelines/pipelines-as-code/test/pkg/github" twait "github.com/openshift-pipelines/pipelines-as-code/test/pkg/wait" "gotest.tools/v3/assert" diff --git a/test/invalid_event_test.go b/test/invalid_event_test.go index e36e29e5a..370c54c20 100644 --- a/test/invalid_event_test.go +++ b/test/invalid_event_test.go @@ -11,7 +11,7 @@ import ( "os" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "gotest.tools/v3/assert" ) diff --git a/test/pkg/bitbucketcloud/pr.go b/test/pkg/bitbucketcloud/pr.go index 68b33cd44..e5d6b79c9 100644 --- a/test/pkg/bitbucketcloud/pr.go +++ b/test/pkg/bitbucketcloud/pr.go @@ -29,11 +29,15 @@ func MakePR(t *testing.T, bprovider bitbucketcloud.Provider, runcnx *params.Run, err = bprovider.Client.Workspaces.Repositories.Repository.WriteFileBlob(&bitbucket.RepositoryBlobWriteOptions{ Owner: opts.Organization, RepoSlug: opts.Repo, - FileName: ".tekton/sub/dir/pr.yaml", - FilePath: tmpfile.Path(), - Message: title, - Branch: targetRefName, - Author: fmt.Sprintf("%s <%s>", commitAuthor, commitEmail), + Files: []bitbucket.File{ + { + Name: ".tekton/sub/dir/pr.yaml", + Path: tmpfile.Path(), + }, + }, + Message: title, + Branch: targetRefName, + Author: fmt.Sprintf("%s <%s>", commitAuthor, commitEmail), }) assert.NilError(t, err) runcnx.Clients.Log.Infof("Using repo %s branch %s", bcrepo.Full_name, targetRefName) diff --git a/test/pkg/gitea/scm.go b/test/pkg/gitea/scm.go index c5da3a374..537bbbb09 100644 --- a/test/pkg/gitea/scm.go +++ b/test/pkg/gitea/scm.go @@ -10,7 +10,7 @@ import ( "testing" "code.gitea.io/sdk/gitea" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "go.uber.org/zap" "gotest.tools/v3/assert" "gotest.tools/v3/env" diff --git a/test/pkg/gitea/setup.go b/test/pkg/gitea/setup.go index f5cd188ad..70697af8e 100644 --- a/test/pkg/gitea/setup.go +++ b/test/pkg/gitea/setup.go @@ -7,7 +7,7 @@ import ( "strings" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "gotest.tools/v3/assert" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/test/pkg/github/crd.go b/test/pkg/github/crd.go index 16017694f..47941ca4f 100644 --- a/test/pkg/github/crd.go +++ b/test/pkg/github/crd.go @@ -5,7 +5,7 @@ import ( "os" "testing" - ghlib "github.com/google/go-github/v53/github" + ghlib "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/apis/pipelinesascode/v1alpha1" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/options" diff --git a/test/pkg/github/pr.go b/test/pkg/github/pr.go index 4c8bb55b5..416d6dc2f 100644 --- a/test/pkg/github/pr.go +++ b/test/pkg/github/pr.go @@ -8,7 +8,7 @@ import ( "path/filepath" "testing" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" ghprovider "github.com/openshift-pipelines/pipelines-as-code/pkg/provider/github" "github.com/openshift-pipelines/pipelines-as-code/test/pkg/options" diff --git a/test/pkg/github/setup.go b/test/pkg/github/setup.go index 91e62bbc0..461890d2d 100644 --- a/test/pkg/github/setup.go +++ b/test/pkg/github/setup.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - ghlib "github.com/google/go-github/v53/github" + ghlib "github.com/google/go-github/v55/github" "gotest.tools/v3/assert" "github.com/openshift-pipelines/pipelines-as-code/pkg/params" diff --git a/test/pkg/logs/pod.go b/test/pkg/logs/pod.go index 26d591c74..7e2a15c5b 100644 --- a/test/pkg/logs/pod.go +++ b/test/pkg/logs/pod.go @@ -5,7 +5,7 @@ import ( "fmt" "io" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" corev1i "k8s.io/client-go/kubernetes/typed/core/v1" diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/x448.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/x448.go index ffdd51513..df04262e9 100644 --- a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/x448.go +++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/x448.go @@ -73,7 +73,9 @@ func (c *x448) GenerateECDH(rand io.Reader) (point []byte, secret []byte, err er func (c *x448) Encaps(rand io.Reader, point []byte) (ephemeral, sharedSecret []byte, err error) { var pk, ss x448lib.Key seed, e, err := c.generateKeyPairBytes(rand) - + if err != nil { + return nil, nil, err + } copy(pk[:], point) x448lib.Shared(&ss, &seed, &pk) diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go index 609e9fc1f..e9bbf0327 100644 --- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go +++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go @@ -24,10 +24,10 @@ type SymmetricallyEncrypted struct { prefix []byte // Specific to version 2 - cipher CipherFunction - mode AEADMode - chunkSizeByte byte - salt [aeadSaltSize]byte + Cipher CipherFunction + Mode AEADMode + ChunkSizeByte byte + Salt [aeadSaltSize]byte } const ( diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go index 969c42236..e96252c19 100644 --- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go +++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go @@ -7,9 +7,10 @@ package packet import ( "crypto/cipher" "crypto/sha256" + "io" + "github.com/ProtonMail/go-crypto/openpgp/errors" "golang.org/x/crypto/hkdf" - "io" ) // parseAead parses a V2 SEIPD packet (AEAD) as specified in @@ -21,26 +22,26 @@ func (se *SymmetricallyEncrypted) parseAead(r io.Reader) error { } // Cipher - se.cipher = CipherFunction(headerData[0]) + se.Cipher = CipherFunction(headerData[0]) // cipherFunc must have block size 16 to use AEAD - if se.cipher.blockSize() != 16 { - return errors.UnsupportedError("invalid aead cipher: " + string(se.cipher)) + if se.Cipher.blockSize() != 16 { + return errors.UnsupportedError("invalid aead cipher: " + string(se.Cipher)) } // Mode - se.mode = AEADMode(headerData[1]) - if se.mode.TagLength() == 0 { - return errors.UnsupportedError("unknown aead mode: " + string(se.mode)) + se.Mode = AEADMode(headerData[1]) + if se.Mode.TagLength() == 0 { + return errors.UnsupportedError("unknown aead mode: " + string(se.Mode)) } // Chunk size - se.chunkSizeByte = headerData[2] - if se.chunkSizeByte > 16 { - return errors.UnsupportedError("invalid aead chunk size byte: " + string(se.chunkSizeByte)) + se.ChunkSizeByte = headerData[2] + if se.ChunkSizeByte > 16 { + return errors.UnsupportedError("invalid aead chunk size byte: " + string(se.ChunkSizeByte)) } // Salt - if n, err := io.ReadFull(r, se.salt[:]); n < aeadSaltSize { + if n, err := io.ReadFull(r, se.Salt[:]); n < aeadSaltSize { return errors.StructuralError("could not read aead salt: " + err.Error()) } @@ -52,19 +53,19 @@ func (se *SymmetricallyEncrypted) associatedData() []byte { return []byte{ 0xD2, symmetricallyEncryptedVersionAead, - byte(se.cipher), - byte(se.mode), - se.chunkSizeByte, + byte(se.Cipher), + byte(se.Mode), + se.ChunkSizeByte, } } // decryptAead decrypts a V2 SEIPD packet (AEAD) as specified in // https://www.ietf.org/archive/id/draft-ietf-openpgp-crypto-refresh-07.html#section-5.13.2 func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, error) { - aead, nonce := getSymmetricallyEncryptedAeadInstance(se.cipher, se.mode, inputKey, se.salt[:], se.associatedData()) + aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData()) // Carry the first tagLen bytes - tagLen := se.mode.TagLength() + tagLen := se.Mode.TagLength() peekedBytes := make([]byte, tagLen) n, err := io.ReadFull(se.Contents, peekedBytes) if n < tagLen || (err != nil && err != io.EOF) { @@ -74,7 +75,7 @@ func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, e return &aeadDecrypter{ aeadCrypter: aeadCrypter{ aead: aead, - chunkSize: decodeAEADChunkSize(se.chunkSizeByte), + chunkSize: decodeAEADChunkSize(se.ChunkSizeByte), initialNonce: nonce, associatedData: se.associatedData(), chunkIndex: make([]byte, 8), diff --git a/vendor/github.com/benbjohnson/clock/LICENSE b/vendor/github.com/benbjohnson/clock/LICENSE deleted file mode 100644 index ce212cb1c..000000000 --- a/vendor/github.com/benbjohnson/clock/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Ben Johnson - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/benbjohnson/clock/README.md b/vendor/github.com/benbjohnson/clock/README.md deleted file mode 100644 index 4f1f82fc6..000000000 --- a/vendor/github.com/benbjohnson/clock/README.md +++ /dev/null @@ -1,105 +0,0 @@ -clock -===== - -[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white&style=flat-square)](https://pkg.go.dev/mod/github.com/benbjohnson/clock) - -Clock is a small library for mocking time in Go. It provides an interface -around the standard library's [`time`][time] package so that the application -can use the realtime clock while tests can use the mock clock. - -The module is currently maintained by @djmitche. - -[time]: https://pkg.go.dev/github.com/benbjohnson/clock - -## Usage - -### Realtime Clock - -Your application can maintain a `Clock` variable that will allow realtime and -mock clocks to be interchangeable. For example, if you had an `Application` type: - -```go -import "github.com/benbjohnson/clock" - -type Application struct { - Clock clock.Clock -} -``` - -You could initialize it to use the realtime clock like this: - -```go -var app Application -app.Clock = clock.New() -... -``` - -Then all timers and time-related functionality should be performed from the -`Clock` variable. - - -### Mocking time - -In your tests, you will want to use a `Mock` clock: - -```go -import ( - "testing" - - "github.com/benbjohnson/clock" -) - -func TestApplication_DoSomething(t *testing.T) { - mock := clock.NewMock() - app := Application{Clock: mock} - ... -} -``` - -Now that you've initialized your application to use the mock clock, you can -adjust the time programmatically. The mock clock always starts from the Unix -epoch (midnight UTC on Jan 1, 1970). - - -### Controlling time - -The mock clock provides the same functions that the standard library's `time` -package provides. For example, to find the current time, you use the `Now()` -function: - -```go -mock := clock.NewMock() - -// Find the current time. -mock.Now().UTC() // 1970-01-01 00:00:00 +0000 UTC - -// Move the clock forward. -mock.Add(2 * time.Hour) - -// Check the time again. It's 2 hours later! -mock.Now().UTC() // 1970-01-01 02:00:00 +0000 UTC -``` - -Timers and Tickers are also controlled by this same mock clock. They will only -execute when the clock is moved forward: - -```go -mock := clock.NewMock() -count := 0 - -// Kick off a timer to increment every 1 mock second. -go func() { - ticker := mock.Ticker(1 * time.Second) - for { - <-ticker.C - count++ - } -}() -runtime.Gosched() - -// Move the clock forward 10 seconds. -mock.Add(10 * time.Second) - -// This prints 10. -fmt.Println(count) -``` diff --git a/vendor/github.com/benbjohnson/clock/clock.go b/vendor/github.com/benbjohnson/clock/clock.go deleted file mode 100644 index 40555b303..000000000 --- a/vendor/github.com/benbjohnson/clock/clock.go +++ /dev/null @@ -1,378 +0,0 @@ -package clock - -import ( - "context" - "sort" - "sync" - "time" -) - -// Re-export of time.Duration -type Duration = time.Duration - -// Clock represents an interface to the functions in the standard library time -// package. Two implementations are available in the clock package. The first -// is a real-time clock which simply wraps the time package's functions. The -// second is a mock clock which will only change when -// programmatically adjusted. -type Clock interface { - After(d time.Duration) <-chan time.Time - AfterFunc(d time.Duration, f func()) *Timer - Now() time.Time - Since(t time.Time) time.Duration - Until(t time.Time) time.Duration - Sleep(d time.Duration) - Tick(d time.Duration) <-chan time.Time - Ticker(d time.Duration) *Ticker - Timer(d time.Duration) *Timer - WithDeadline(parent context.Context, d time.Time) (context.Context, context.CancelFunc) - WithTimeout(parent context.Context, t time.Duration) (context.Context, context.CancelFunc) -} - -// New returns an instance of a real-time clock. -func New() Clock { - return &clock{} -} - -// clock implements a real-time clock by simply wrapping the time package functions. -type clock struct{} - -func (c *clock) After(d time.Duration) <-chan time.Time { return time.After(d) } - -func (c *clock) AfterFunc(d time.Duration, f func()) *Timer { - return &Timer{timer: time.AfterFunc(d, f)} -} - -func (c *clock) Now() time.Time { return time.Now() } - -func (c *clock) Since(t time.Time) time.Duration { return time.Since(t) } - -func (c *clock) Until(t time.Time) time.Duration { return time.Until(t) } - -func (c *clock) Sleep(d time.Duration) { time.Sleep(d) } - -func (c *clock) Tick(d time.Duration) <-chan time.Time { return time.Tick(d) } - -func (c *clock) Ticker(d time.Duration) *Ticker { - t := time.NewTicker(d) - return &Ticker{C: t.C, ticker: t} -} - -func (c *clock) Timer(d time.Duration) *Timer { - t := time.NewTimer(d) - return &Timer{C: t.C, timer: t} -} - -func (c *clock) WithDeadline(parent context.Context, d time.Time) (context.Context, context.CancelFunc) { - return context.WithDeadline(parent, d) -} - -func (c *clock) WithTimeout(parent context.Context, t time.Duration) (context.Context, context.CancelFunc) { - return context.WithTimeout(parent, t) -} - -// Mock represents a mock clock that only moves forward programmically. -// It can be preferable to a real-time clock when testing time-based functionality. -type Mock struct { - mu sync.Mutex - now time.Time // current time - timers clockTimers // tickers & timers -} - -// NewMock returns an instance of a mock clock. -// The current time of the mock clock on initialization is the Unix epoch. -func NewMock() *Mock { - return &Mock{now: time.Unix(0, 0)} -} - -// Add moves the current time of the mock clock forward by the specified duration. -// This should only be called from a single goroutine at a time. -func (m *Mock) Add(d time.Duration) { - // Calculate the final current time. - t := m.now.Add(d) - - // Continue to execute timers until there are no more before the new time. - for { - if !m.runNextTimer(t) { - break - } - } - - // Ensure that we end with the new time. - m.mu.Lock() - m.now = t - m.mu.Unlock() - - // Give a small buffer to make sure that other goroutines get handled. - gosched() -} - -// Set sets the current time of the mock clock to a specific one. -// This should only be called from a single goroutine at a time. -func (m *Mock) Set(t time.Time) { - // Continue to execute timers until there are no more before the new time. - for { - if !m.runNextTimer(t) { - break - } - } - - // Ensure that we end with the new time. - m.mu.Lock() - m.now = t - m.mu.Unlock() - - // Give a small buffer to make sure that other goroutines get handled. - gosched() -} - -// runNextTimer executes the next timer in chronological order and moves the -// current time to the timer's next tick time. The next time is not executed if -// its next time is after the max time. Returns true if a timer was executed. -func (m *Mock) runNextTimer(max time.Time) bool { - m.mu.Lock() - - // Sort timers by time. - sort.Sort(m.timers) - - // If we have no more timers then exit. - if len(m.timers) == 0 { - m.mu.Unlock() - return false - } - - // Retrieve next timer. Exit if next tick is after new time. - t := m.timers[0] - if t.Next().After(max) { - m.mu.Unlock() - return false - } - - // Move "now" forward and unlock clock. - m.now = t.Next() - m.mu.Unlock() - - // Execute timer. - t.Tick(m.now) - return true -} - -// After waits for the duration to elapse and then sends the current time on the returned channel. -func (m *Mock) After(d time.Duration) <-chan time.Time { - return m.Timer(d).C -} - -// AfterFunc waits for the duration to elapse and then executes a function. -// A Timer is returned that can be stopped. -func (m *Mock) AfterFunc(d time.Duration, f func()) *Timer { - t := m.Timer(d) - t.C = nil - t.fn = f - return t -} - -// Now returns the current wall time on the mock clock. -func (m *Mock) Now() time.Time { - m.mu.Lock() - defer m.mu.Unlock() - return m.now -} - -// Since returns time since `t` using the mock clock's wall time. -func (m *Mock) Since(t time.Time) time.Duration { - return m.Now().Sub(t) -} - -// Until returns time until `t` using the mock clock's wall time. -func (m *Mock) Until(t time.Time) time.Duration { - return t.Sub(m.Now()) -} - -// Sleep pauses the goroutine for the given duration on the mock clock. -// The clock must be moved forward in a separate goroutine. -func (m *Mock) Sleep(d time.Duration) { - <-m.After(d) -} - -// Tick is a convenience function for Ticker(). -// It will return a ticker channel that cannot be stopped. -func (m *Mock) Tick(d time.Duration) <-chan time.Time { - return m.Ticker(d).C -} - -// Ticker creates a new instance of Ticker. -func (m *Mock) Ticker(d time.Duration) *Ticker { - m.mu.Lock() - defer m.mu.Unlock() - ch := make(chan time.Time, 1) - t := &Ticker{ - C: ch, - c: ch, - mock: m, - d: d, - next: m.now.Add(d), - } - m.timers = append(m.timers, (*internalTicker)(t)) - return t -} - -// Timer creates a new instance of Timer. -func (m *Mock) Timer(d time.Duration) *Timer { - m.mu.Lock() - defer m.mu.Unlock() - ch := make(chan time.Time, 1) - t := &Timer{ - C: ch, - c: ch, - mock: m, - next: m.now.Add(d), - stopped: false, - } - m.timers = append(m.timers, (*internalTimer)(t)) - return t -} - -func (m *Mock) removeClockTimer(t clockTimer) { - for i, timer := range m.timers { - if timer == t { - copy(m.timers[i:], m.timers[i+1:]) - m.timers[len(m.timers)-1] = nil - m.timers = m.timers[:len(m.timers)-1] - break - } - } - sort.Sort(m.timers) -} - -// clockTimer represents an object with an associated start time. -type clockTimer interface { - Next() time.Time - Tick(time.Time) -} - -// clockTimers represents a list of sortable timers. -type clockTimers []clockTimer - -func (a clockTimers) Len() int { return len(a) } -func (a clockTimers) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a clockTimers) Less(i, j int) bool { return a[i].Next().Before(a[j].Next()) } - -// Timer represents a single event. -// The current time will be sent on C, unless the timer was created by AfterFunc. -type Timer struct { - C <-chan time.Time - c chan time.Time - timer *time.Timer // realtime impl, if set - next time.Time // next tick time - mock *Mock // mock clock, if set - fn func() // AfterFunc function, if set - stopped bool // True if stopped, false if running -} - -// Stop turns off the ticker. -func (t *Timer) Stop() bool { - if t.timer != nil { - return t.timer.Stop() - } - - t.mock.mu.Lock() - registered := !t.stopped - t.mock.removeClockTimer((*internalTimer)(t)) - t.stopped = true - t.mock.mu.Unlock() - return registered -} - -// Reset changes the expiry time of the timer -func (t *Timer) Reset(d time.Duration) bool { - if t.timer != nil { - return t.timer.Reset(d) - } - - t.mock.mu.Lock() - t.next = t.mock.now.Add(d) - defer t.mock.mu.Unlock() - - registered := !t.stopped - if t.stopped { - t.mock.timers = append(t.mock.timers, (*internalTimer)(t)) - } - - t.stopped = false - return registered -} - -type internalTimer Timer - -func (t *internalTimer) Next() time.Time { return t.next } -func (t *internalTimer) Tick(now time.Time) { - // a gosched() after ticking, to allow any consequences of the - // tick to complete - defer gosched() - - t.mock.mu.Lock() - if t.fn != nil { - // defer function execution until the lock is released, and - defer t.fn() - } else { - t.c <- now - } - t.mock.removeClockTimer((*internalTimer)(t)) - t.stopped = true - t.mock.mu.Unlock() -} - -// Ticker holds a channel that receives "ticks" at regular intervals. -type Ticker struct { - C <-chan time.Time - c chan time.Time - ticker *time.Ticker // realtime impl, if set - next time.Time // next tick time - mock *Mock // mock clock, if set - d time.Duration // time between ticks -} - -// Stop turns off the ticker. -func (t *Ticker) Stop() { - if t.ticker != nil { - t.ticker.Stop() - } else { - t.mock.mu.Lock() - t.mock.removeClockTimer((*internalTicker)(t)) - t.mock.mu.Unlock() - } -} - -// Reset resets the ticker to a new duration. -func (t *Ticker) Reset(dur time.Duration) { - if t.ticker != nil { - t.ticker.Reset(dur) - return - } - - t.mock.mu.Lock() - defer t.mock.mu.Unlock() - - t.d = dur - t.next = t.mock.now.Add(dur) -} - -type internalTicker Ticker - -func (t *internalTicker) Next() time.Time { return t.next } -func (t *internalTicker) Tick(now time.Time) { - select { - case t.c <- now: - default: - } - t.next = now.Add(t.d) - gosched() -} - -// Sleep momentarily so that other goroutines can process. -func gosched() { time.Sleep(1 * time.Millisecond) } - -var ( - // type checking - _ Clock = &Mock{} -) diff --git a/vendor/github.com/benbjohnson/clock/context.go b/vendor/github.com/benbjohnson/clock/context.go deleted file mode 100644 index eb67594f2..000000000 --- a/vendor/github.com/benbjohnson/clock/context.go +++ /dev/null @@ -1,86 +0,0 @@ -package clock - -import ( - "context" - "fmt" - "sync" - "time" -) - -func (m *Mock) WithTimeout(parent context.Context, timeout time.Duration) (context.Context, context.CancelFunc) { - return m.WithDeadline(parent, m.Now().Add(timeout)) -} - -func (m *Mock) WithDeadline(parent context.Context, deadline time.Time) (context.Context, context.CancelFunc) { - if cur, ok := parent.Deadline(); ok && cur.Before(deadline) { - // The current deadline is already sooner than the new one. - return context.WithCancel(parent) - } - ctx := &timerCtx{clock: m, parent: parent, deadline: deadline, done: make(chan struct{})} - propagateCancel(parent, ctx) - dur := m.Until(deadline) - if dur <= 0 { - ctx.cancel(context.DeadlineExceeded) // deadline has already passed - return ctx, func() {} - } - ctx.Lock() - defer ctx.Unlock() - if ctx.err == nil { - ctx.timer = m.AfterFunc(dur, func() { - ctx.cancel(context.DeadlineExceeded) - }) - } - return ctx, func() { ctx.cancel(context.Canceled) } -} - -// propagateCancel arranges for child to be canceled when parent is. -func propagateCancel(parent context.Context, child *timerCtx) { - if parent.Done() == nil { - return // parent is never canceled - } - go func() { - select { - case <-parent.Done(): - child.cancel(parent.Err()) - case <-child.Done(): - } - }() -} - -type timerCtx struct { - sync.Mutex - - clock Clock - parent context.Context - deadline time.Time - done chan struct{} - - err error - timer *Timer -} - -func (c *timerCtx) cancel(err error) { - c.Lock() - defer c.Unlock() - if c.err != nil { - return // already canceled - } - c.err = err - close(c.done) - if c.timer != nil { - c.timer.Stop() - c.timer = nil - } -} - -func (c *timerCtx) Deadline() (deadline time.Time, ok bool) { return c.deadline, true } - -func (c *timerCtx) Done() <-chan struct{} { return c.done } - -func (c *timerCtx) Err() error { return c.err } - -func (c *timerCtx) Value(key interface{}) interface{} { return c.parent.Value(key) } - -func (c *timerCtx) String() string { - return fmt.Sprintf("clock.WithDeadline(%s [%s])", c.deadline, c.deadline.Sub(c.clock.Now())) -} diff --git a/vendor/github.com/bradleyfalzon/ghinstallation/v2/README.md b/vendor/github.com/bradleyfalzon/ghinstallation/v2/README.md index 9c3071ec6..cf5ea50b0 100644 --- a/vendor/github.com/bradleyfalzon/ghinstallation/v2/README.md +++ b/vendor/github.com/bradleyfalzon/ghinstallation/v2/README.md @@ -1,6 +1,6 @@ # ghinstallation -[![GoDoc](https://godoc.org/github.com/bradleyfalzon/ghinstallation?status.svg)](https://godoc.org/github.com/bradleyfalzon/ghinstallation) +[![GoDoc](https://godoc.org/github.com/bradleyfalzon/ghinstallation?status.svg)](https://godoc.org/github.com/bradleyfalzon/ghinstallation/v2) `ghinstallation` provides `Transport`, which implements `http.RoundTripper` to provide authentication as an installation for GitHub Apps. @@ -39,6 +39,8 @@ func main() { } ``` +You can also use [`New()`](https://pkg.go.dev/github.com/bradleyfalzon/ghinstallation/v2#New) to load a key directly from a `[]byte`. + # GitHub Enterprise Example For clients using GitHub Enterprise, set the base URL as follows: diff --git a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go index 84012a00d..33229ada3 100644 --- a/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go +++ b/vendor/github.com/bradleyfalzon/ghinstallation/v2/transport.go @@ -13,7 +13,7 @@ import ( "sync" "time" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v55/github" ) const ( diff --git a/vendor/github.com/emicklei/go-restful/v3/CHANGES.md b/vendor/github.com/emicklei/go-restful/v3/CHANGES.md index 352018e70..5edd5a7ca 100644 --- a/vendor/github.com/emicklei/go-restful/v3/CHANGES.md +++ b/vendor/github.com/emicklei/go-restful/v3/CHANGES.md @@ -1,11 +1,15 @@ # Change history of go-restful -## [v3.10.2] - 2023-03-09 +## [v3.11.0] - 2023-08-19 + +- restored behavior as <= v3.9.0 with option to change path strategy using TrimRightSlashEnabled. + +## [v3.10.2] - 2023-03-09 - DO NOT USE - introduced MergePathStrategy to be able to revert behaviour of path concatenation to 3.9.0 see comment in Readme how to customize this behaviour. -## [v3.10.1] - 2022-11-19 +## [v3.10.1] - 2022-11-19 - DO NOT USE - fix broken 3.10.0 by using path package for joining paths diff --git a/vendor/github.com/emicklei/go-restful/v3/README.md b/vendor/github.com/emicklei/go-restful/v3/README.md index 85da90128..e3e30080e 100644 --- a/vendor/github.com/emicklei/go-restful/v3/README.md +++ b/vendor/github.com/emicklei/go-restful/v3/README.md @@ -79,7 +79,7 @@ func (u UserResource) findUser(request *restful.Request, response *restful.Respo - Content encoding (gzip,deflate) of request and response payloads - Automatic responses on OPTIONS (using a filter) - Automatic CORS request handling (using a filter) -- API declaration for Swagger UI ([go-restful-openapi](https://github.com/emicklei/go-restful-openapi), see [go-restful-swagger12](https://github.com/emicklei/go-restful-swagger12)) +- API declaration for Swagger UI ([go-restful-openapi](https://github.com/emicklei/go-restful-openapi)) - Panic recovery to produce HTTP 500, customizable using RecoverHandler(...) - Route errors produce HTTP 404/405/406/415 errors, customizable using ServiceErrorHandler(...) - Configurable (trace) logging @@ -96,10 +96,7 @@ There are several hooks to customize the behavior of the go-restful package. - Compression - Encoders for other serializers - Use [jsoniter](https://github.com/json-iterator/go) by building this package using a build tag, e.g. `go build -tags=jsoniter .` -- Use the variable `MergePathStrategy` to change the behaviour of composing the Route path given a root path and a local route path - - versions >= 3.10.1 has set the value to `PathJoinStrategy` that fixes a reported [security issue](https://github.com/advisories/GHSA-r48q-9g5r-8q2h) but may cause your services not to work correctly anymore. - - versions <= 3.9 had the behaviour that can be restored in newer versions by setting the value to `TrimSlashStrategy`. - - you can set value to a custom implementation (must implement MergePathStrategyFunc) +- Use the package variable `TrimRightSlashEnabled` (default true) to control the behavior of matching routes that end with a slash `/` ## Resources @@ -112,4 +109,4 @@ There are several hooks to customize the behavior of the go-restful package. Type ```git shortlog -s``` for a full list of contributors. -© 2012 - 2022, http://ernestmicklei.com. MIT License. Contributions are welcome. +© 2012 - 2023, http://ernestmicklei.com. MIT License. Contributions are welcome. diff --git a/vendor/github.com/emicklei/go-restful/v3/route.go b/vendor/github.com/emicklei/go-restful/v3/route.go index ea05b3da8..306c44be7 100644 --- a/vendor/github.com/emicklei/go-restful/v3/route.go +++ b/vendor/github.com/emicklei/go-restful/v3/route.go @@ -40,7 +40,8 @@ type Route struct { ParameterDocs []*Parameter ResponseErrors map[int]ResponseError DefaultResponse *ResponseError - ReadSample, WriteSample interface{} // structs that model an example request or response payload + ReadSample, WriteSample interface{} // structs that model an example request or response payload + WriteSamples []interface{} // if more than one return types is possible (oneof) then this will contain multiple values // Extra information used to store custom information about the route. Metadata map[string]interface{} @@ -164,7 +165,13 @@ func tokenizePath(path string) []string { if "/" == path { return nil } - return strings.Split(strings.TrimLeft(path, "/"), "/") + if TrimRightSlashEnabled { + // 3.9.0 + return strings.Split(strings.Trim(path, "/"), "/") + } else { + // 3.10.2 + return strings.Split(strings.TrimLeft(path, "/"), "/") + } } // for debugging @@ -177,4 +184,8 @@ func (r *Route) EnableContentEncoding(enabled bool) { r.contentEncodingEnabled = &enabled } -var TrimRightSlashEnabled = false +// TrimRightSlashEnabled controls whether +// - path on route building is using path.Join +// - the path of the incoming request is trimmed of its slash suffux. +// Value of true matches the behavior of <= 3.9.0 +var TrimRightSlashEnabled = true diff --git a/vendor/github.com/emicklei/go-restful/v3/route_builder.go b/vendor/github.com/emicklei/go-restful/v3/route_builder.go index 827f471de..75168c12e 100644 --- a/vendor/github.com/emicklei/go-restful/v3/route_builder.go +++ b/vendor/github.com/emicklei/go-restful/v3/route_builder.go @@ -31,17 +31,18 @@ type RouteBuilder struct { typeNameHandleFunc TypeNameHandleFunction // required // documentation - doc string - notes string - operation string - readSample, writeSample interface{} - parameters []*Parameter - errorMap map[int]ResponseError - defaultResponse *ResponseError - metadata map[string]interface{} - extensions map[string]interface{} - deprecated bool - contentEncodingEnabled *bool + doc string + notes string + operation string + readSample interface{} + writeSamples []interface{} + parameters []*Parameter + errorMap map[int]ResponseError + defaultResponse *ResponseError + metadata map[string]interface{} + extensions map[string]interface{} + deprecated bool + contentEncodingEnabled *bool } // Do evaluates each argument with the RouteBuilder itself. @@ -135,9 +136,9 @@ func (b RouteBuilder) ParameterNamed(name string) (p *Parameter) { return p } -// Writes tells what resource type will be written as the response payload. Optional. -func (b *RouteBuilder) Writes(sample interface{}) *RouteBuilder { - b.writeSample = sample +// Writes tells which one of the resource types will be written as the response payload. Optional. +func (b *RouteBuilder) Writes(samples ...interface{}) *RouteBuilder { + b.writeSamples = samples // oneof return b } @@ -342,39 +343,29 @@ func (b *RouteBuilder) Build() Route { ResponseErrors: b.errorMap, DefaultResponse: b.defaultResponse, ReadSample: b.readSample, - WriteSample: b.writeSample, + WriteSamples: b.writeSamples, Metadata: b.metadata, Deprecated: b.deprecated, contentEncodingEnabled: b.contentEncodingEnabled, allowedMethodsWithoutContentType: b.allowedMethodsWithoutContentType, } + // set WriteSample if one specified + if len(b.writeSamples) == 1 { + route.WriteSample = b.writeSamples[0] + } route.Extensions = b.extensions route.postBuild() return route } -type MergePathStrategyFunc func(rootPath, routePath string) string - -var ( - // behavior >= 3.10 - PathJoinStrategy = func(rootPath, routePath string) string { - return path.Join(rootPath, routePath) - } +// merge two paths using the current (package global) merge path strategy. +func concatPath(rootPath, routePath string) string { - // behavior <= 3.9 - TrimSlashStrategy = func(rootPath, routePath string) string { + if TrimRightSlashEnabled { return strings.TrimRight(rootPath, "/") + "/" + strings.TrimLeft(routePath, "/") + } else { + return path.Join(rootPath, routePath) } - - // MergePathStrategy is the active strategy for merging a Route path when building the routing of all WebServices. - // The value is set to PathJoinStrategy - // PathJoinStrategy is a strategy that is more strict [Security - PRISMA-2022-0227] - MergePathStrategy = PathJoinStrategy -) - -// merge two paths using the current (package global) merge path strategy. -func concatPath(rootPath, routePath string) string { - return MergePathStrategy(rootPath, routePath) } var anonymousFuncCount int32 diff --git a/vendor/github.com/evanphx/json-patch/README.md b/vendor/github.com/evanphx/json-patch/README.md index 28e351693..97e319b21 100644 --- a/vendor/github.com/evanphx/json-patch/README.md +++ b/vendor/github.com/evanphx/json-patch/README.md @@ -4,7 +4,7 @@ well as for calculating & applying [RFC7396 JSON merge patches](https://tools.ietf.org/html/rfc7396). [![GoDoc](https://godoc.org/github.com/evanphx/json-patch?status.svg)](http://godoc.org/github.com/evanphx/json-patch) -[![Build Status](https://travis-ci.org/evanphx/json-patch.svg?branch=master)](https://travis-ci.org/evanphx/json-patch) +[![Build Status](https://github.com/evanphx/json-patch/actions/workflows/go.yml/badge.svg)](https://github.com/evanphx/json-patch/actions/workflows/go.yml) [![Report Card](https://goreportcard.com/badge/github.com/evanphx/json-patch)](https://goreportcard.com/report/github.com/evanphx/json-patch) # Get It! @@ -314,4 +314,4 @@ go test -cover ./... ``` Builds for pull requests are tested automatically -using [TravisCI](https://travis-ci.org/evanphx/json-patch). +using [GitHub Actions](https://github.com/evanphx/json-patch/actions/workflows/go.yml). diff --git a/vendor/github.com/evanphx/json-patch/patch.go b/vendor/github.com/evanphx/json-patch/patch.go index 4bce5936d..cd0274e1e 100644 --- a/vendor/github.com/evanphx/json-patch/patch.go +++ b/vendor/github.com/evanphx/json-patch/patch.go @@ -359,7 +359,7 @@ func findObject(pd *container, path string) (container, string) { next, ok := doc.get(decodePatchKey(part)) - if next == nil || ok != nil { + if next == nil || ok != nil || next.raw == nil { return nil, "" } @@ -568,6 +568,29 @@ func (p Patch) replace(doc *container, op Operation) error { return errors.Wrapf(err, "replace operation failed to decode path") } + if path == "" { + val := op.value() + + if val.which == eRaw { + if !val.tryDoc() { + if !val.tryAry() { + return errors.Wrapf(err, "replace operation value must be object or array") + } + } + } + + switch val.which { + case eAry: + *doc = &val.ary + case eDoc: + *doc = &val.doc + case eRaw: + return errors.Wrapf(err, "replace operation hit impossible case") + } + + return nil + } + con, key := findObject(doc, path) if con == nil { @@ -634,6 +657,25 @@ func (p Patch) test(doc *container, op Operation) error { return errors.Wrapf(err, "test operation failed to decode path") } + if path == "" { + var self lazyNode + + switch sv := (*doc).(type) { + case *partialDoc: + self.doc = *sv + self.which = eDoc + case *partialArray: + self.ary = *sv + self.which = eAry + } + + if self.equal(op.value()) { + return nil + } + + return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) + } + con, key := findObject(doc, path) if con == nil { @@ -646,7 +688,7 @@ func (p Patch) test(doc *container, op Operation) error { } if val == nil { - if op.value().raw == nil { + if op.value() == nil || op.value().raw == nil { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) diff --git a/vendor/github.com/evanphx/json-patch/v5/patch.go b/vendor/github.com/evanphx/json-patch/v5/patch.go index 117f2c00d..73ff2c515 100644 --- a/vendor/github.com/evanphx/json-patch/v5/patch.go +++ b/vendor/github.com/evanphx/json-patch/v5/patch.go @@ -180,7 +180,7 @@ func (n *partialDoc) UnmarshalJSON(data []byte) error { if t, err := d.Token(); err != nil { return err } else if t != startObject { - return &syntaxError{fmt.Sprintf("unexpected JSON token in document node: %s", t)} + return &syntaxError{fmt.Sprintf("unexpected JSON token in document node: %v", t)} } for d.More() { k, err := d.Token() @@ -454,7 +454,11 @@ func (o Operation) value() *lazyNode { // ValueInterface decodes the operation value into an interface. func (o Operation) ValueInterface() (interface{}, error) { - if obj, ok := o["value"]; ok && obj != nil { + if obj, ok := o["value"]; ok { + if obj == nil { + return nil, nil + } + var v interface{} err := json.Unmarshal(*obj, &v) @@ -816,6 +820,43 @@ func ensurePathExists(pd *container, path string, options *ApplyOptions) error { return nil } +func validateOperation(op Operation) error { + switch op.Kind() { + case "add", "replace": + if _, err := op.ValueInterface(); err != nil { + return errors.Wrapf(err, "failed to decode 'value'") + } + case "move", "copy": + if _, err := op.From(); err != nil { + return errors.Wrapf(err, "failed to decode 'from'") + } + case "remove", "test": + default: + return fmt.Errorf("unsupported operation") + } + + if _, err := op.Path(); err != nil { + return errors.Wrapf(err, "failed to decode 'path'") + } + + return nil +} + +func validatePatch(p Patch) error { + for _, op := range p { + if err := validateOperation(op); err != nil { + opData, infoErr := json.Marshal(op) + if infoErr != nil { + return errors.Wrapf(err, "invalid operation") + } + + return errors.Wrapf(err, "invalid operation %s", opData) + } + } + + return nil +} + func (p Patch) remove(doc *container, op Operation, options *ApplyOptions) error { path, err := op.Path() if err != nil { @@ -965,7 +1006,7 @@ func (p Patch) test(doc *container, op Operation, options *ApplyOptions) error { } if val == nil { - if op.value().raw == nil { + if op.value() == nil || op.value().raw == nil { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) @@ -1044,6 +1085,10 @@ func DecodePatch(buf []byte) (Patch, error) { return nil, err } + if err := validatePatch(p); err != nil { + return nil, err + } + return p, nil } diff --git a/vendor/github.com/gfleury/go-bitbucket-v1/.swagger-codegen-ignore b/vendor/github.com/gfleury/go-bitbucket-v1/.swagger-codegen-ignore deleted file mode 100644 index c5fa491b4..000000000 --- a/vendor/github.com/gfleury/go-bitbucket-v1/.swagger-codegen-ignore +++ /dev/null @@ -1,23 +0,0 @@ -# Swagger Codegen Ignore -# Generated by swagger-codegen https://github.com/swagger-api/swagger-codegen - -# Use this file to prevent files from being overwritten by the generator. -# The patterns follow closely to .gitignore or .dockerignore. - -# As an example, the C# client generator defines ApiClient.cs. -# You can make changes and tell Swagger Codgen to ignore just this file by uncommenting the following line: -#ApiClient.cs - -# You can match any string of characters against a directory, file or extension with a single asterisk (*): -#foo/*/qux -# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux - -# You can recursively match patterns against a directory, file or extension with a double asterisk (**): -#foo/**/qux -# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux - -# You can also negate patterns with an exclamation (!). -# For example, you can ignore all files in a docs folder with the file extension .md: -#docs/*.md -# Then explicitly reverse the ignore rule for a single file: -#!docs/README.md diff --git a/vendor/github.com/gfleury/go-bitbucket-v1/.travis.yml b/vendor/github.com/gfleury/go-bitbucket-v1/.travis.yml deleted file mode 100644 index 7541030c7..000000000 --- a/vendor/github.com/gfleury/go-bitbucket-v1/.travis.yml +++ /dev/null @@ -1,26 +0,0 @@ -language: go -go: - - 1.11.x - - 1.13.x - - 1.14.x - - master -env: - - GO111MODULE=on - GOLANGCI_LINT_VERSION=1.35.2 - -services: - - docker - -before_install: - - docker pull gfleury/bb-demo:demo - - docker run -e BITBUCKET_HOME=/mnt --rm --name="bitbucket" -d -p 7990:7990 -p 7999:7999 gfleury/bb-demo:demo - - docker ps -a - - curl http://localhost:7990/mvc/login?nextUrl=/dashboard |grep "Log in"; while [ $? != 0 ]; do sleep 2; curl http://localhost:7990/mvc/login?nextUrl=/dashboard |grep "Log in"; done - -script: - - go test -coverprofile=coverage.txt -covermode=atomic - - INTEGRATION=TRUE go test - - go build -v ./ - - if [[ "$(go version)" =~ "go version go1.11" ]]; then exit 0; else curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin "v${GOLANGCI_LINT_VERSION}" && golangci-lint run -v; fi -after_success: - - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/gfleury/go-bitbucket-v1/api_response.go b/vendor/github.com/gfleury/go-bitbucket-v1/api_response.go index 6d7cfcd78..cecc35711 100644 --- a/vendor/github.com/gfleury/go-bitbucket-v1/api_response.go +++ b/vendor/github.com/gfleury/go-bitbucket-v1/api_response.go @@ -8,7 +8,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "net/http" "strings" @@ -539,12 +538,35 @@ func (p PermissionRepository) String() string { return string(p) } +type AccessTokenResponse struct { + ID string `json:"id"` + CreatedDate int64 `json:"createdDate"` + LastAuthenticated int64 `json:"lastAuthenticated"` + Name string `json:"name"` + Permissions []string `json:"permissions"` + User User `json:"user"` + Token string `json:"token"` +} + +// GetAccessTokenResponse cast AccessTokenResponse into structure +func GetAccessTokenResponse(r *APIResponse) (AccessTokenResponse, error) { + var m AccessTokenResponse + err := mapstructure.Decode(r.Values, &m) + return m, err +} + func (k *SSHKey) String() string { parts := make([]string, 1, 2) parts[0] = strings.TrimSpace(k.Text) return strings.Join(parts, " ") } +func GetProjectsResponse(r *APIResponse) ([]Project, error) { + var m []Project + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + // GetCommitsResponse cast Commits into structure func GetCommitsResponse(r *APIResponse) ([]Commit, error) { var m []Commit @@ -708,7 +730,7 @@ func NewBitbucketAPIResponse(r *http.Response) (*APIResponse, error) { if decoder.More() { // there's more data in the stream, so discard whatever is left - _, _ = io.Copy(ioutil.Discard, r.Body) + _, _ = io.Copy(io.Discard, r.Body) } return response, err @@ -717,7 +739,7 @@ func NewBitbucketAPIResponse(r *http.Response) (*APIResponse, error) { // NewRawAPIResponse create new API response from http.response with raw data func NewRawAPIResponse(r *http.Response) (*APIResponse, error) { response := &APIResponse{Response: r} - raw, err := ioutil.ReadAll(r.Body) + raw, err := io.ReadAll(r.Body) if err != nil { return nil, err } diff --git a/vendor/github.com/gfleury/go-bitbucket-v1/default_api.go b/vendor/github.com/gfleury/go-bitbucket-v1/default_api.go index 65bb4239b..e664649f1 100644 --- a/vendor/github.com/gfleury/go-bitbucket-v1/default_api.go +++ b/vendor/github.com/gfleury/go-bitbucket-v1/default_api.go @@ -7,7 +7,7 @@ package bitbucketv1 import ( "encoding/json" "fmt" - "io/ioutil" + "io" "net/url" "strings" @@ -17,9 +17,11 @@ import ( // DefaultAPIService default service type DefaultApiService service -/*AddGroupToUser <strong>Deprecated since 2.10</strong>. Use /rest/users/add-groups instead. <p> Add a user to a group. This is very similar to <code>groups/add-user</code>, but with the <em>context</em> and <em>itemName</em> attributes of the supplied request entity reversed. On the face of it this may appear redundant, but it facilitates a specific UI component in Stash. <p> In the request entity, the <em>context</em> attribute is the user and the <em>itemName</em> is the group. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. +/* +AddGroupToUser <strong>Deprecated since 2.10</strong>. Use /rest/users/add-groups instead. <p> Add a user to a group. This is very similar to <code>groups/add-user</code>, but with the <em>context</em> and <em>itemName</em> attributes of the supplied request entity reversed. On the face of it this may appear redundant, but it facilitates a specific UI component in Stash. <p> In the request entity, the <em>context</em> attribute is the user and the <em>itemName</em> is the group. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) AddGroupToUser() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -63,17 +65,20 @@ func (a *DefaultApiService) AddGroupToUser() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* AddUserToGroup +/* + AddUserToGroup + <strong>Deprecated since 2.10</strong>. Use /rest/users/add-groups instead. <p> Add a user to a group. <p> In the request entity, the <em>context</em> attribute is the group and the <em>itemName</em> is the user. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) AddUserToGroup() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -117,17 +122,20 @@ func (a *DefaultApiService) AddUserToGroup() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* AddUserToGroups +/* + AddUserToGroups + Add a user to one or more groups. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) AddUserToGroups(name string, groups []string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -176,17 +184,20 @@ func (a *DefaultApiService) AddUserToGroups(name string, groups []string) (*APIR } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService +/* + DefaultApiService + Add multiple users to a group. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) AddUsersToGroup() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -230,19 +241,22 @@ func (a *DefaultApiService) AddUsersToGroup() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Approve a pull request as the current user. Implicitly adds the user as a participant if they are not already. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. <p> <strong>Deprecated since 4.2</strong>. Use /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/participants/{userSlug} instead @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) Approve(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Approve(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") localVarPostBody interface{} @@ -288,19 +302,22 @@ func (a *DefaultApiService) Approve(projectKey, repositorySlug string, pullReque } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Assigns a participant to an explicit role in pull request. Currently only the REVIEWER role may be assigned. <p> If the user is not yet a participant in the pull request, they are made one and assigned the supplied role. <p> If the user is already a participant in the pull request, their previous role is replaced with the supplied role unless they are already assigned the AUTHOR role which cannot be changed and will result in a Bad Request (400) response code. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) AssignParticipantRole(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) AssignParticipantRole(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") localVarPostBody interface{} @@ -346,19 +363,22 @@ func (a *DefaultApiService) AssignParticipantRole(projectKey, repositorySlug str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Test whether a pull request can be merged. <p> A pull request may not be merged if: <ul> <li>there are conflicts that need to be manually resolved before merging; and/or</li> <li>one or more merge checks have vetoed the merge.</li> </ul> <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) CanMerge(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) CanMerge(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -404,17 +424,20 @@ func (a *DefaultApiService) CanMerge(projectKey, repositorySlug string, pullRequ } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Clears the server email address. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) ClearSenderAddress() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -458,19 +481,21 @@ func (a *DefaultApiService) ClearSenderAddress() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Clears any CAPTCHA challenge that may constrain the user with the supplied username when they authenticate. Additionally any counter or metric that contributed towards the user being issued the CAPTCHA challenge (for instance too many consecutive failed logins) will also be reset. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource, and may not clear the CAPTCHA of a user with greater permissions than themselves. +/* + DefaultApiService + Clears any CAPTCHA challenge that may constrain the user with the supplied username when they authenticate. Additionally any counter or metric that contributed towards the user being issued the CAPTCHA challenge (for instance too many consecutive failed logins) will also be reset. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource, and may not clear the CAPTCHA of a user with greater permissions than themselves. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) + @return +*/ func (a *DefaultApiService) ClearUserCaptchaChallenge(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -521,18 +546,21 @@ func (a *DefaultApiService) ClearUserCaptchaChallenge(localVarOptionals map[stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the total number of {@link TaskState#OPEN open} and {@link TaskState#RESOLVED resolved} tasks associated with a pull request. -@return */ -func (a *DefaultApiService) CountPullRequestTasks(projectKey, repositorySlug string, pullRequestId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) CountPullRequestTasks(projectKey, repositorySlug string, pullRequestId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -578,17 +606,20 @@ func (a *DefaultApiService) CountPullRequestTasks(projectKey, repositorySlug str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Create a new pull request between two branches. The branches may be in the same repository, or different ones. When using different repositories, they must still be in the same {@link Repository#getHierarchyId() hierarchy}. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the \"from\" and \"to\"repositories to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) Create(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var pullRequest PullRequest @@ -600,10 +631,13 @@ func (a *DefaultApiService) Create(projectKey, repositorySlug string, localVarOp return a.CreatePullRequest(projectKey, repositorySlug, pullRequest) } -/* DefaultApiService +/* + DefaultApiService + CreatePullRequest a new pull request between two branches. The branches may be in the same repository, or different ones. When using different repositories, they must still be in the same {@link Repository#getHierarchyId() hierarchy}. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the \"from\" and \"to\"repositories to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreatePullRequest(projectKey, repositorySlug string, pullRequest PullRequest) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -654,7 +688,7 @@ func (a *DefaultApiService) CreatePullRequest(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -716,17 +750,20 @@ func (a *DefaultApiService) UpdatePullRequest(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Description: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Creates a branch using the information provided in the {@link RestCreateBranchRequest request} <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the context repository to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreateBranch(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -772,20 +809,22 @@ func (a *DefaultApiService) CreateBranch(projectKey, repositorySlug string) (*AP } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Add a new comment. <p> Comments can be added in a few places by setting different attributes: <p> General commit comment: <pre> { \"text\": \"An insightful general comment on a commit.\" } </pre> Reply to a comment: <pre> { \"text\": \"A measured reply.\", \"parent\": { \"id\": 1 } } </pre> General file comment: <pre> { \"text\": \"An insightful general comment on a file.\", \"anchor\": { \"diffType\": \"COMMIT\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> File line comment: <pre> { \"text\": \"A pithy comment on a particular line within a file.\", \"anchor\": { \"diffType\": \"COMMIT\", \"line\": 1, \"lineType\": \"CONTEXT\", \"fileType\": \"FROM\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> <strong>Note: general file comments are an experimental feature and may change in the near future!</strong> <p> For file and line comments, 'path' refers to the path of the file to which the comment should be applied and 'srcPath' refers to the path the that file used to have (only required for copies and moves). Also, fromHash and toHash refer to the sinceId / untilId (respectively) used to produce the diff on which the comment was added. Finally diffType refers to the type of diff the comment was added on. <p> For line comments, 'line' refers to the line in the diff that the comment should apply to. 'lineType' refers to the type of diff hunk, which can be: <ul> <li>'ADDED' - for an added line;</li> <li>'REMOVED' - for a removed line; or</li> <li>'CONTEXT' - for a line that was unmodified but is in the vicinity of the diff.</li> </ul> 'fileType' refers to the file of the diff to which the anchor should be attached - which is of relevance when displaying the diff in a side-by-side way. Currently the supported values are: <ul> <li>'FROM' - the source file of the diff</li> <li>'TO' - the destination file of the diff</li> </ul> If the current user is not a participant the user is added as one and updated to watch the commit. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. +/* + DefaultApiService + Add a new comment. <p> Comments can be added in a few places by setting different attributes: <p> General commit comment: <pre> { \"text\": \"An insightful general comment on a commit.\" } </pre> Reply to a comment: <pre> { \"text\": \"A measured reply.\", \"parent\": { \"id\": 1 } } </pre> General file comment: <pre> { \"text\": \"An insightful general comment on a file.\", \"anchor\": { \"diffType\": \"COMMIT\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> File line comment: <pre> { \"text\": \"A pithy comment on a particular line within a file.\", \"anchor\": { \"diffType\": \"COMMIT\", \"line\": 1, \"lineType\": \"CONTEXT\", \"fileType\": \"FROM\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> <strong>Note: general file comments are an experimental feature and may change in the near future!</strong> <p> For file and line comments, 'path' refers to the path of the file to which the comment should be applied and 'srcPath' refers to the path the that file used to have (only required for copies and moves). Also, fromHash and toHash refer to the sinceId / untilId (respectively) used to produce the diff on which the comment was added. Finally diffType refers to the type of diff the comment was added on. <p> For line comments, 'line' refers to the line in the diff that the comment should apply to. 'lineType' refers to the type of diff hunk, which can be: <ul> <li>'ADDED' - for an added line;</li> <li>'REMOVED' - for a removed line; or</li> <li>'CONTEXT' - for a line that was unmodified but is in the vicinity of the diff.</li> </ul> 'fileType' refers to the file of the diff to which the anchor should be attached - which is of relevance when displaying the diff in a side-by-side way. Currently the supported values are: <ul> <li>'FROM' - the source file of the diff</li> <li>'TO' - the destination file of the diff</li> </ul> If the current user is not a participant the user is added as one and updated to watch the commit. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. - @param commitId the commit to which the comments must be anchored - @param optional (nil or map[string]interface{}) with one or more of: - @param "since" (string) For a merge commit, a parent can be provided to specify which diff the comments should be on. For a commit range, a {@code sinceId} can be provided to specify where the comments should be anchored from. - @return */ + @param commitId the commit to which the comments must be anchored + @param optional (nil or map[string]interface{}) with one or more of: + @param "since" (string) For a merge commit, a parent can be provided to specify which diff the comments should be on. For a commit range, a {@code sinceId} can be provided to specify where the comments should be anchored from. + @return +*/ func (a *DefaultApiService) CreateComment(projectKey, repositorySlug string, commitId string, localVarOptionals map[string]interface{}) (*APIResponse, error) { return a.CreateCommentWithComment(projectKey, repositorySlug, commitId, Comment{}, localVarOptionals) } @@ -849,17 +888,20 @@ func (a *DefaultApiService) CreateCommentWithComment(projectKey, repositorySlug } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Add a new comment. <p> Comments can be added in a few places by setting different attributes: <p> General pull request comment: <pre> { \"text\": \"An insightful general comment on a pull request.\" } </pre> Reply to a comment: <pre> { \"text\": \"A measured reply.\", \"parent\": { \"id\": 1 } } </pre> General file comment: <pre> { \"text\": \"An insightful general comment on a file.\", \"anchor\": { \"diffType\": \"RANGE\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> File line comment: <pre> { \"text\": \"A pithy comment on a particular line within a file.\", \"anchor\": { \"diffType\": \"COMMIT\", \"line\": 1, \"lineType\": \"CONTEXT\", \"fileType\": \"FROM\", \"fromHash\": \"6df3858eeb9a53a911cd17e66a9174d44ffb02cd\", \"path\": \"path/to/file\", \"srcPath\": \"path/to/file\", \"toHash\": \"04c7c5c931b9418ca7b66f51fe934d0bd9b2ba4b\" } } </pre> <p> For file and line comments, 'path' refers to the path of the file to which the comment should be applied and 'srcPath' refers to the path the that file used to have (only required for copies and moves). Also, fromHash and toHash refer to the sinceId / untilId (respectively) used to produce the diff on which the comment was added. Finally diffType refers to the type of diff the comment was added on. For backwards compatibility purposes if no diffType is provided and no fromHash/toHash pair is provided the diffType will be resolved to 'EFFECTIVE'. In any other cases the diffType is REQUIRED. <p> For line comments, 'line' refers to the line in the diff that the comment should apply to. 'lineType' refers to the type of diff hunk, which can be: <ul> <li>'ADDED' - for an added line;</li> <li>'REMOVED' - for a removed line; or</li> <li>'CONTEXT' - for a line that was unmodified but is in the vicinity of the diff.</li> </ul> 'fileType' refers to the file of the diff to which the anchor should be attached - which is of relevance when displaying the diff in a side-by-side way. Currently the supported values are: <ul> <li>'FROM' - the source file of the diff</li> <li>'TO' - the destination file of the diff</li> </ul> If the current user is not a participant the user is added as a watcher of the pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreatePullRequestComment(projectKey, repositorySlug string, pullRequestID int, comment Comment, localVarHTTPContentTypes []string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -907,19 +949,21 @@ func (a *DefaultApiService) CreatePullRequestComment(projectKey, repositorySlug } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Create a new group. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Create a new group. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) Name of the group. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) Name of the group. + @return +*/ func (a *DefaultApiService) CreateGroup(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -970,17 +1014,20 @@ func (a *DefaultApiService) CreateGroup(localVarOptionals map[string]interface{} } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Create a new project. <p> To include a custom avatar for the project, the project definition should contain an additional attribute with the key <code>avatar</code> and the value a data URI containing Base64-encoded image data. The URI should be in the following format: <pre> data:(content type, e.g. image/png);base64,(data) </pre> If the data is not Base64-encoded, or if a character set is defined in the URI, or the URI is otherwise invalid, <em>project creation will fail</em>. <p> The authenticated user must have <strong>PROJECT_CREATE</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreateProject(localVarPostBody Project) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -1023,18 +1070,21 @@ func (a *DefaultApiService) CreateProject(localVarPostBody Project) (*APIRespons } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Create a new repository. Requires an existing project in which this repository will be created. The only parameters which will be used are name and scmId. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the context project to call this resource. @param projectKey the parent project key -@return */ +@return +*/ func (a *DefaultApiService) CreateRepository(projectKey string, repository Repository) (*APIResponse, error) { localVarPostBody, err := json.Marshal(repository) @@ -1085,7 +1135,7 @@ func (a *DefaultApiService) CreateRepositoryWithOptions(projectKey string, local } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -1134,17 +1184,20 @@ func (a *DefaultApiService) CreatePullRequestWithOptions(projectKey, repo string } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Creates a tag using the information provided in the {@link RestCreateTagRequest request} <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the context repository to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreateTag(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -1190,17 +1243,20 @@ func (a *DefaultApiService) CreateTag(projectKey, repositorySlug string) (*APIRe } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Create a new task. -@return */ +@return +*/ func (a *DefaultApiService) CreateTask() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -1244,24 +1300,26 @@ func (a *DefaultApiService) CreateTask() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Creates a new user from the assembled query parameters. <p> The default group can be used to control initial permissions for new users, such as granting users the ability to login or providing read access to certain projects or repositories. If the user is not added to the default group, they may not be able to login after their account is created until explicit permissions are configured. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. +/* + DefaultApiService + Creates a new user from the assembled query parameters. <p> The default group can be used to control initial permissions for new users, such as granting users the ability to login or providing read access to certain projects or repositories. If the user is not added to the default group, they may not be able to login after their account is created until explicit permissions are configured. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the username for the new user - @param "password" (string) the password for the new user - @param "displayName" (string) the display name for the new user - @param "emailAddress" (string) the e-mail address for the new user - @param "addToDefaultGroup" (bool) <code>true</code> to add the user to the default group, which can be used to grant them a set of initial permissions; otherwise, <code>false</code> to not add them to a group - @param "notify" (string) if present and not <code>false</code> instead of requiring a password, the create user will be notified via email their account has been created and requires a password to be reset. This option can only be used if a mail server has been configured - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the username for the new user + @param "password" (string) the password for the new user + @param "displayName" (string) the display name for the new user + @param "emailAddress" (string) the e-mail address for the new user + @param "addToDefaultGroup" (bool) <code>true</code> to add the user to the default group, which can be used to grant them a set of initial permissions; otherwise, <code>false</code> to not add them to a group + @param "notify" (string) if present and not <code>false</code> instead of requiring a password, the create user will be notified via email their account has been created and requires a password to be reset. This option can only be used if a mail server has been configured + @return +*/ func (a *DefaultApiService) CreateUser(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -1342,7 +1400,7 @@ func (a *DefaultApiService) CreateUser(localVarOptionals map[string]interface{}) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -1350,10 +1408,13 @@ func (a *DefaultApiService) CreateUser(localVarOptionals map[string]interface{}) return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService +/* + DefaultApiService + Create a webhook for the repository specified via the URL. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) CreateWebhook(projectKey, repositorySlug string, localVarPostBody interface{}, localVarHTTPContentTypes []string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -1395,21 +1456,23 @@ func (a *DefaultApiService) CreateWebhook(projectKey, repositorySlug string, loc } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Decline a pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Decline a pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param pullRequestId - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. - @return */ -func (a *DefaultApiService) Decline(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param pullRequestId + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. + @return +*/ +func (a *DefaultApiService) Decline(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") localVarPostBody interface{} @@ -1462,37 +1525,46 @@ func (a *DefaultApiService) Decline(projectKey, repositorySlug string, pullReque } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Deletes a pull request. <p> To call this resource, users must be authenticated and have permission to view the pull request. Additionally, they must: <ul> <li> be the pull request author, if the system is configured to allow authors to delete their own pull requests (this is the default) OR </li> <li>have repository administrator permission for the repository the pull request is targeting</li> </ul> A body containing the version of the pull request must be provided with this request. <pre><code>{ \"version\": 1 }</code></pre> @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) DeletePullRequest(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) DeletePullRequest(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { return a.Delete(projectKey, repositorySlug, pullRequestID) } -/* DefaultApiService +/* + DefaultApiService + Deletes a pull request. <p> To call this resource, users must be authenticated and have permission to view the pull request. Additionally, they must: <ul> <li> be the pull request author, if the system is configured to allow authors to delete their own pull requests (this is the default) OR </li> <li>have repository administrator permission for the repository the pull request is targeting</li> </ul> A body containing the version of the pull request must be provided with this request. <pre><code>{ \"version\": 1 }</code></pre> @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) Delete(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Delete(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { return a.DeleteWithVersion(projectKey, repositorySlug, pullRequestID, 0) } -/* DefaultApiService +/* + DefaultApiService + Deletes a pull request. <p> To call this resource, users must be authenticated and have permission to view the pull request. Additionally, they must: <ul> <li> be the pull request author, if the system is configured to allow authors to delete their own pull requests (this is the default) OR </li> <li>have repository administrator permission for the repository the pull request is targeting</li> </ul> A body containing the version of the pull request must be provided with this request. <pre><code>{ \"version\": 1 }</code></pre> @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) DeleteWithVersion(projectKey, repositorySlug string, pullRequestID, version int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) DeleteWithVersion(projectKey, repositorySlug string, pullRequestID, version int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -1542,17 +1614,20 @@ func (a *DefaultApiService) DeleteWithVersion(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService +/* + DefaultApiService + Delete the avatar associated to a user. <p> Users are always allowed to delete their own avatar. To delete someone else's avatar the authenticated user must have global <strong>ADMIN</strong> permission, or global <strong>SYS_ADMIN</strong> permission to update a <strong>SYS_ADMIN</strong> user's avatar. -@return */ +@return +*/ func (a *DefaultApiService) DeleteAvatar(userSlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -1597,23 +1672,25 @@ func (a *DefaultApiService) DeleteAvatar(userSlug string) (*APIResponse, error) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Delete a commit comment. Anyone can delete their own comment. Only users with <strong>REPO_ADMIN</strong> and above may delete comments created by other users. Comments which have replies <i>may not be deleted</i>, regardless of the user's granted permissions. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. +/* + DefaultApiService + Delete a commit comment. Anyone can delete their own comment. Only users with <strong>REPO_ADMIN</strong> and above may delete comments created by other users. Comments which have replies <i>may not be deleted</i>, regardless of the user's granted permissions. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. - @param commitId the commit to which the comments must be anchored - @param commentId the ID of the comment to retrieve - @param commitId2 the <i>full {@link Commit#getId() ID}</i> of the commit within the repository - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (int32) The expected version of the comment. This must match the server's version of the comment or the delete will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the delete. Look for the 'version' attribute in the returned JSON structure. - @return */ -func (a *DefaultApiService) DeleteComment(projectKey, repositorySlug string, commitId string, commentId int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param commitId the commit to which the comments must be anchored + @param commentId the ID of the comment to retrieve + @param commitId2 the <i>full {@link Commit#getId() ID}</i> of the commit within the repository + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (int32) The expected version of the comment. This must match the server's version of the comment or the delete will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the delete. Look for the 'version' attribute in the returned JSON structure. + @return +*/ +func (a *DefaultApiService) DeleteComment(projectKey, repositorySlug string, commitId string, commentId int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -1667,21 +1744,23 @@ func (a *DefaultApiService) DeleteComment(projectKey, repositorySlug string, com } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Delete a pull request comment. Anyone can delete their own comment. Only users with <strong>REPO_ADMIN</strong> and above may delete comments created by other users. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Delete a pull request comment. Anyone can delete their own comment. Only users with <strong>REPO_ADMIN</strong> and above may delete comments created by other users. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param commentId the id of the comment to retrieve - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (int32) The expected version of the comment. This must match the server's version of the comment or the delete will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the delete. Look for the 'version' attribute in the returned JSON structure. - @return */ -func (a *DefaultApiService) DeleteComment_2(projectKey, repositorySlug string, pullRequestId, commentId int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param commentId the id of the comment to retrieve + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (int32) The expected version of the comment. This must match the server's version of the comment or the delete will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the delete. Look for the 'version' attribute in the returned JSON structure. + @return +*/ +func (a *DefaultApiService) DeleteComment_2(projectKey, repositorySlug string, pullRequestId, commentId int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -1735,19 +1814,21 @@ func (a *DefaultApiService) DeleteComment_2(projectKey, repositorySlug string, p } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Deletes the specified group, removing them from the system. This also removes any permissions that may have been granted to the group. <p> A user may not delete the last group that is granting them administrative permissions, or a group with greater permissions than themselves. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. +/* + DefaultApiService + Deletes the specified group, removing them from the system. This also removes any permissions that may have been granted to the group. <p> A user may not delete the last group that is granting them administrative permissions, or a group with greater permissions than themselves. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name identifying the group to delete - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name identifying the group to delete + @return +*/ func (a *DefaultApiService) DeleteGroup(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -1798,17 +1879,20 @@ func (a *DefaultApiService) DeleteGroup(localVarOptionals map[string]interface{} } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Deletes the current mail configuration. <p> The authenticated user must have the <strong>SYS_ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) DeleteMailConfig() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -1852,17 +1936,20 @@ func (a *DefaultApiService) DeleteMailConfig() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Delete the project matching the supplied <strong>projectKey</strong>. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) DeleteProject(projectKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -1907,20 +1994,23 @@ func (a *DefaultApiService) DeleteProject(projectKey string) (*APIResponse, erro } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Schedule the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong> to be deleted. If the request repository is not present <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param projectKey2 the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) DeleteRepository(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -1966,18 +2056,21 @@ func (a *DefaultApiService) DeleteRepository(projectKey, repositorySlug string) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Delete repository hook configuration for the supplied <strong>hookKey</strong> and <strong>repositorySlug</strong> <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) DeleteRepositoryHook(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -2024,19 +2117,22 @@ func (a *DefaultApiService) DeleteRepositoryHook(projectKey, repositorySlug stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Delete a task. <p> Note that only the task's creator, the context's author or an admin of the context's repository can delete a task. (For a pull request task, those are the task's creator, the pull request's author or an admin on the repository containing the pull request). Additionally a task cannot be deleted if it has already been resolved. @param taskId the id identifying the task to delete -@return */ -func (a *DefaultApiService) DeleteTask(taskId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) DeleteTask(taskId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -2080,19 +2176,21 @@ func (a *DefaultApiService) DeleteTask(taskId int64) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Deletes the specified user, removing them from the system. This also removes any permissions that may have been granted to the user. <p> A user may not delete themselves, and a user with <strong>ADMIN</strong> permissions may not delete a user with <strong>SYS_ADMIN</strong>permissions. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. +/* + DefaultApiService + Deletes the specified user, removing them from the system. This also removes any permissions that may have been granted to the user. <p> A user may not delete themselves, and a user with <strong>ADMIN</strong> permissions may not delete a user with <strong>SYS_ADMIN</strong>permissions. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the username identifying the user to delete - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the username identifying the user to delete + @return +*/ func (a *DefaultApiService) DeleteUser(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -2143,18 +2241,21 @@ func (a *DefaultApiService) DeleteUser(localVarOptionals map[string]interface{}) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Delete a webhook for the repository specified via the URL. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param webhookId the existing webhook id -@return */ +@return +*/ func (a *DefaultApiService) DeleteWebhook(projectKey, repositorySlug string, webhookId int32) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -2201,18 +2302,21 @@ func (a *DefaultApiService) DeleteWebhook(projectKey, repositorySlug string, web } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService +/* + DefaultApiService + Disable a repository hook for this repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) DisableHook(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -2259,18 +2363,21 @@ func (a *DefaultApiService) DisableHook(projectKey, repositorySlug string, hookK } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Disable a repository hook for this project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) DisableHook_3(projectKey, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -2316,18 +2423,21 @@ func (a *DefaultApiService) DisableHook_3(projectKey, hookKey string) (*APIRespo } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the content of <code>path</code>, on the given <code>repository</code> and <code>branch</code>. <p> This resource accepts PUT multipart form data, containing the file in a form-field named <code>content</code>. <p> An example <a href=\"http://curl.haxx.se/\">curl</a> request to update 'README.md' would be: <pre> curl -X PUT -u username:password -F content=@README.md -F 'message=Updated using file-edit REST API' -F branch=master -F sourceCommitId=5636641a50b http://example.com/rest/api/latest/projects/PROJECT_1/repos/repo_1/browse/README.md </pre> <p> <ui> <li>branch: the branch on which the <code>path</code> should be modified or created</li> <li>content: the full content of the file at <code>path</code> </li> <li>message: the message associated with this change, to be used as the commit message. Or null if the default message should be used.</li> <li>sourceCommitId: the commit ID of the file before it was edited, used to identify if content has changed. Or null if this is a new file</li> </ui> <p> The file can be updated or created on a new branch. In this case, the <code>sourceBranch</code> parameter should be provided to identify the starting point for the new branch and the <code>branch</code> parameter identifies the branch to create the new commit on. @param path the file path to retrieve content from -@return */ +@return +*/ func (a *DefaultApiService) EditFile(projectKey, repositorySlug, path string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -2374,20 +2484,22 @@ func (a *DefaultApiService) EditFile(projectKey, repositorySlug, path string) (* } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Enable a repository hook for this repository and optionally apply new configuration. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. <p> A JSON document may be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. +/* + DefaultApiService + Enable a repository hook for this repository and optionally apply new configuration. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. <p> A JSON document may be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. - @param hookKey - @param optional (nil or map[string]interface{}) with one or more of: - @param "contentLength" (int32) - @return */ + @param hookKey + @param optional (nil or map[string]interface{}) with one or more of: + @param "contentLength" (int32) + @return +*/ func (a *DefaultApiService) EnableHook(projectKey, repositorySlug, hookKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -2441,29 +2553,34 @@ func (a *DefaultApiService) EnableHook(projectKey, repositorySlug, hookKey strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Enable a repository hook for this project and optionally apply new configuration. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. <p> A JSON document may be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. +/* + DefaultApiService + Enable a repository hook for this project and optionally apply new configuration. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. <p> A JSON document may be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. - @param hookKey - @param optional (nil or map[string]interface{}) with one or more of: - @param "contentLength" (int32) - @return */ + @param hookKey + @param optional (nil or map[string]interface{}) with one or more of: + @param "contentLength" (int32) + @return +*/ func (a *DefaultApiService) EnableHook_4(projectKey, repositorySlug, hookKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { return a.EnableHook_4_WithOptions(projectKey, hookKey, localVarOptionals, nil) } -/* DefaultApiService +/* + DefaultApiService + For certain hooks (like "com.atlassian.bitbucket.server.bitbucket-bundled-hooks:requiredApproversMergeHook"), a request body with "requiredCount" and an integer value is necessary. @param localVarPostBody (nil or map[string]interface{}) -@return */ +@return +*/ func (a *DefaultApiService) EnableHook_4_WithOptions(projectKey, hookKey string, localVarOptionals map[string]interface{}, localVarPostBody interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -2516,20 +2633,22 @@ func (a *DefaultApiService) EnableHook_4_WithOptions(projectKey, hookKey string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves a list of groups the specified user is a member of. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. +/* + DefaultApiService + Retrieves a list of groups the specified user is a member of. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "context" (string) the user which should be used to locate groups - @param "filter" (string) if specified only groups with names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "context" (string) the user which should be used to locate groups + @param "filter" (string) if specified only groups with names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) FindGroupsForUser(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2586,20 +2705,22 @@ func (a *DefaultApiService) FindGroupsForUser(localVarOptionals map[string]inter } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves a list of groups the specified user is <em>not</em> a member of. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. +/* + DefaultApiService + Retrieves a list of groups the specified user is <em>not</em> a member of. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "context" (string) the user which should be used to locate groups - @param "filter" (string) if specified only groups with names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "context" (string) the user which should be used to locate groups + @param "filter" (string) if specified only groups with names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) FindOtherGroupsForUser(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2656,20 +2777,22 @@ func (a *DefaultApiService) FindOtherGroupsForUser(localVarOptionals map[string] } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves a list of users that are members of a specified group. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. +/* + DefaultApiService + Retrieves a list of users that are members of a specified group. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "context" (string) the group which should be used to locate members - @param "filter" (string) if specified only users with usernames, display names or email addresses containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "context" (string) the group which should be used to locate members + @param "filter" (string) if specified only users with usernames, display names or email addresses containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) FindUsersInGroup(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2726,20 +2849,22 @@ func (a *DefaultApiService) FindUsersInGroup(localVarOptionals map[string]interf } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves a list of users that are <em>not</em> members of a specified group. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. +/* + DefaultApiService + Retrieves a list of users that are <em>not</em> members of a specified group. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "context" (string) the group which should be used to locate non-members - @param "filter" (string) if specified only users with usernames, display names or email addresses containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "context" (string) the group which should be used to locate non-members + @param "filter" (string) if specified only users with usernames, display names or email addresses containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) FindUsersNotInGroup(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2796,20 +2921,22 @@ func (a *DefaultApiService) FindUsersNotInGroup(localVarOptionals map[string]int } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Find webhooks in this repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Find webhooks in this repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "event" (string) list of {@link com.atlassian.webhooks.WebhookEvent} ids to filter for - @param "statistics" (bool) {@code true} if statistics should be provided for all found webhooks - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "event" (string) list of {@link com.atlassian.webhooks.WebhookEvent} ids to filter for + @param "statistics" (bool) {@code true} if statistics should be provided for all found webhooks + @return +*/ func (a *DefaultApiService) FindWebhooks(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2868,19 +2995,22 @@ func (a *DefaultApiService) FindWebhooks(projectKey, repositorySlug string, loca } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Create a new repository forked from an existing repository. <p> The JSON body for this {@code POST} is not required to contain <i>any</i> properties. Even the name may be omitted. The following properties will be used, if provided: <ul> <li>{@code \"name\":\"Fork name\"} - Specifies the forked repository's name <ul> <li>Defaults to the name of the origin repository if not specified</li> </ul> </li> <li>{@code \"project\":{\"key\":\"TARGET_KEY\"}} - Specifies the forked repository's target project by key <ul> <li>Defaults to the current user's personal project if not specified</li> </ul> </li> </ul> <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository and <strong>PROJECT_ADMIN</strong> on the target project to call this resource. Note that users <i>always</i> have <b>PROJECT_ADMIN</b> permission on their personal projects. @param projectKey the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) ForkRepository(projectKey, repositorySlug string, localVarPostBody interface{}, localVarHTTPContentTypes []string, @@ -2925,17 +3055,20 @@ func (a *DefaultApiService) ForkRepository(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) - return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %s, Url: %s, Body: %s", localVarHTTPResponse.Status, localVarPath, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieves details about the current license, as well as the current status of the system with regards to the installed license. The status includes the current number of users applied toward the license limit, as well as any status messages about the license (warnings about expiry or user counts exceeding license limits). <p> The authenticated user must have <b>ADMIN</b> permission. Unauthenticated users, and non-administrators, are not permitted to access license details. -@return */ +@return +*/ func (a *DefaultApiService) GetLicense() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -2979,22 +3112,24 @@ func (a *DefaultApiService) GetLicense() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of activity associated with a pull request. <p> Activity items include comments, approvals, rescopes (i.e. adding and removing of commits), merges and more. <p> Different types of activity items may be introduced in newer versions of Stash or by user installed plugins, so clients should be flexible enough to handle unexpected entity shapes in the returned page. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Retrieve a page of activity associated with a pull request. <p> Activity items include comments, approvals, rescopes (i.e. adding and removing of commits), merges and more. <p> Different types of activity items may be introduced in newer versions of Stash or by user installed plugins, so clients should be flexible enough to handle unexpected entity shapes in the returned page. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param pullRequestId the id of the pull request within the repository - @param optional (nil or map[string]interface{}) with one or more of: - @param "fromId" (int64) (optional) the id of the activity item to use as the first item in the returned page - @param "fromType" (string) (required if <strong>fromId</strong> is present) the type of the activity item specified by <strong>fromId</strong> (either <strong>COMMENT</strong> or <strong>ACTIVITY</strong>) - @return */ -func (a *DefaultApiService) GetActivities(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param pullRequestId the id of the pull request within the repository + @param optional (nil or map[string]interface{}) with one or more of: + @param "fromId" (int) (optional) the id of the activity item to use as the first item in the returned page + @param "fromType" (string) (required if <strong>fromId</strong> is present) the type of the activity item specified by <strong>fromId</strong> (either <strong>COMMENT</strong> or <strong>ACTIVITY</strong>) + @return +*/ +func (a *DefaultApiService) GetActivities(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3012,7 +3147,7 @@ func (a *DefaultApiService) GetActivities(projectKey, repositorySlug string, pul localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if err := typeCheckParameter(localVarOptionals["fromId"], "int64", "fromId"); err != nil { + if err := typeCheckParameter(localVarOptionals["fromId"], "int", "fromId"); err != nil { return nil, err } if err := typeCheckParameter(localVarOptionals["fromType"], "string", "fromType"); err != nil { @@ -3031,7 +3166,7 @@ func (a *DefaultApiService) GetActivities(projectKey, repositorySlug string, pul if localVarTempParam, localVarOk := localVarOptionals["start"].(int); localVarOk { localVarQueryParams.Add("start", parameterToString(localVarTempParam, "")) } - if localVarTempParam, localVarOk := localVarOptionals["fromId"].(int64); localVarOk { + if localVarTempParam, localVarOk := localVarOptionals["fromId"].(int); localVarOk { localVarQueryParams.Add("fromId", parameterToString(localVarTempParam, "")) } if localVarTempParam, localVarOk := localVarOptionals["fromType"].(string); localVarOk { @@ -3065,17 +3200,20 @@ func (a *DefaultApiService) GetActivities(projectKey, repositorySlug string, pul } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewRawAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve version information and other application properties. <p> No authentication is required to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetApplicationProperties() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3119,24 +3257,30 @@ func (a *DefaultApiService) GetApplicationProperties() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams an archive of the repository's contents at the requested commit. If no <code>at=</code> commit is requested, an archive of the default branch is streamed. <p> The <code>filename=</code> query parameter may be used to specify the exact filename to include in the <code>\"Content-Disposition\"</code> header. If an explicit filename is not provided, one will be automatically generated based on what is being archived. Its format depends on the <code>at=</code> value: <ul> <li>No <code>at=</code> commit: <code>&lt;slug&gt;-&lt;default-branch-name&gt;@&lt;commit&gt;.&lt;format&gt;</code>; e.g. example-master@43c2f8a0fe8.zip</li> <li><code>at=sha</code>: <code>&lt;slug&gt;-&lt;at&gt;.&lt;format&gt;</code>; e.g. example-09bcbb00100cfbb5310fb6834a1d5ce6cac253e9.tar.gz</li> <li><code>at=branchOrTag</code>: <code>&lt;slug&gt;-&lt;branchOrTag&gt;@&lt;commit&gt;.&lt;format&gt;</code>; e.g. example-feature@bbb225f16e1.tar <ul> <li>If the branch or tag is qualified (e.g. <code>refs/heads/master</code>, the short name (<code>master</code>) will be included in the filename</li> <li>If the branch or tag's <i>short name</i> includes slashes (e.g. <code>release/4.6</code>), they will be converted to hyphens in the filename (<code>release-4.5</code>)</li> </ul> </li> </ul> <p> Archives may be requested in the following formats by adding the <code>format=</code> query parameter: <ul> <li><code>zip</code>: A zip file using standard compression (Default)</li> <li><code>tar</code>: An uncompressed tarball</li> <li><code>tar.gz</code> or <code>tgz</code>: A GZip-compressed tarball</li> </ul> The contents of the archive may be filtered by using the <code>path=</code> query parameter to specify paths to include. <code>path=</code> may be specified multiple times to include multiple paths. <p> The <code>prefix=</code> query parameter may be used to define a directory (or multiple directories) where the archive's contents should be placed. If the prefix does not end with <code>/</code>, one will be added automatically. The prefix is <i>always</i> treated as a directory; it is not possible to use it to prepend characters to the entries in the archive. <p> Archives of public repositories may be streamed by any authenticated or anonymous user. Streaming archives for non-public repositories requires an <i>authenticated user</i> with at least <b>REPO_READ</b> permission. +/* + DefaultApiService + Streams an archive of the repository's contents at the requested commit. If no <code>at=</code> commit is requested, an archive of the default branch is streamed. <p> The <code>filename=</code> query parameter may be used to specify the exact filename to include in the <code>\"Content-Disposition\"</code> header. If an explicit filename is not provided, one will be automatically generated based on what is being archived. Its format depends on the <code>at=</code> value: <ul> <li>No <code>at=</code> commit: <code>&lt;slug&gt;-&lt;default-branch-name&gt;@&lt;commit&gt;.&lt;format&gt;</code>; e.g. example-master@43c2f8a0fe8.zip</li> <li><code>at=sha</code>: <code>&lt;slug&gt;-&lt;at&gt;.&lt;format&gt;</code>; e.g. example-09bcbb00100cfbb5310fb6834a1d5ce6cac253e9.tar.gz</li> <li><code>at=branchOrTag</code>: <code>&lt;slug&gt;-&lt;branchOrTag&gt;@&lt;commit&gt;.&lt;format&gt;</code>; e.g. example-feature@bbb225f16e1.tar <ul> <li>If the branch or tag is qualified (e.g. <code>refs/heads/master</code>, the short name (<code>master</code>) will be included in the filename</li> <li>If the branch or tag's <i>short name</i> includes slashes (e.g. <code>release/4.6</code>), they will be converted to hyphens in the filename (<code>release-4.5</code>)</li> </ul> </li> </ul> <p> Archives may be requested in the following formats by adding the <code>format=</code> query parameter: <ul> <li><code>zip</code>: A zip file using standard compression (Default)</li> <li><code>tar</code>: An uncompressed tarball</li> <li><code>tar.gz</code> or <code>tgz</code>: A GZip-compressed tarball</li> </ul> The contents of the archive may be filtered by using the <code>path=</code> query parameter to specify paths to include. <code>path=</code> may be specified multiple times to include multiple paths. <p> The <code>prefix=</code> query parameter may be used to define a directory (or multiple directories) where the archive's contents should be placed. If the prefix does not end with <code>/</code>, one will be added automatically. The prefix is <i>always</i> treated as a directory; it is not possible to use it to prepend characters to the entries in the archive. <p> Archives of public repositories may be streamed by any authenticated or anonymous user. Streaming archives for non-public repositories requires an <i>authenticated user</i> with at least <b>REPO_READ</b> permission. + * @param ctx context.Context for authentication, logging, tracing, etc. + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit to stream an archive of; if not supplied, an archive of the default branch is streamed + @param "filename" (string) a filename to include the \"Content-Disposition\" header + @param "format" (string) the format to stream the archive in; must be one of: zip, tar, tar.gz or tgz + @param "path" (string) paths to include in the streamed archive; may be repeated to include multiple paths + @param "prefix" (string) a prefix to apply to all entries in the streamed archive; if the supplied prefix does not end with a trailing <code>/</code>, one will be added automatically + @param "apibasepath" (string) defaults to "/api/1.0" - in some installations, the value is "/archive/latest". Used as part of the URL endpoint. Not used as an actual parameter + + @param writer Where the archive's content is streamed out. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit to stream an archive of; if not supplied, an archive of the default branch is streamed - @param "filename" (string) a filename to include the \"Content-Disposition\" header - @param "format" (string) the format to stream the archive in; must be one of: zip, tar, tar.gz or tgz - @param "path" (string) paths to include in the streamed archive; may be repeated to include multiple paths - @param "prefix" (string) a prefix to apply to all entries in the streamed archive; if the supplied prefix does not end with a trailing <code>/</code>, one will be added automatically - @return */ -func (a *DefaultApiService) GetArchive(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { +@return the size of the archive written out +*/ +func (a *DefaultApiService) GetArchive(project, repository string, localVarOptionals map[string]interface{}, writer io.Writer) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3145,7 +3289,16 @@ func (a *DefaultApiService) GetArchive(project, repository string, localVarOptio ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/archive" + if err := typeCheckParameter(localVarOptionals["apibasepath"], "string", "apibasepath"); err != nil { + return nil, err + } + apibasepath, _ := localVarOptionals["apibasepath"].(string) + if apibasepath == "" { + apibasepath = "/api/1.0" + } + + //localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/archive" + localVarPath := a.client.cfg.BasePath + apibasepath + "/projects/{projectKey}/repos/{repositorySlug}/archive" localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", project), -1) localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repository), -1) @@ -3179,7 +3332,9 @@ func (a *DefaultApiService) GetArchive(project, repository string, localVarOptio localVarQueryParams.Add("format", parameterToString(localVarTempParam, "")) } if localVarTempParam, localVarOk := localVarOptionals["path"].(string); localVarOk { - localVarQueryParams.Add("path", parameterToString(localVarTempParam, "")) + for _, path := range strings.Split(parameterToString(localVarTempParam, ""), ",") { + localVarQueryParams.Add("path", path) + } } if localVarTempParam, localVarOk := localVarOptionals["prefix"].(string); localVarOk { localVarQueryParams.Add("prefix", parameterToString(localVarTempParam, "")) @@ -3212,20 +3367,27 @@ func (a *DefaultApiService) GetArchive(project, repository string, localVarOptio } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } + _, err = io.Copy(writer, localVarHTTPResponse.Body) + if err != nil { + return NewAPIResponseWithError(localVarHTTPResponse, nil, err) + } + return NewRawAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the avatar for the project matching the supplied <strong>moduleKey</strong>. +/* + DefaultApiService + Retrieve the avatar for the project matching the supplied <strong>moduleKey</strong>. - @param hookKey the complete module key of the hook module - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (string) optional version used for HTTP caching only - any non-blank version will result in a large max-age Cache-Control header. Note that this does not affect the Last-Modified header. - @return */ + @param hookKey the complete module key of the hook module + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (string) optional version used for HTTP caching only - any non-blank version will result in a large max-age Cache-Control header. Note that this does not affect the Last-Modified header. + @return +*/ func (a *DefaultApiService) GetAvatar(hookKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3277,22 +3439,24 @@ func (a *DefaultApiService) GetAvatar(hookKey string, localVarOptionals map[stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the branches matching the supplied <strong>filterText</strong> param. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the branches matching the supplied <strong>filterText</strong> param. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "base" (string) base branch or tag to compare each branch to (for the metadata providers that uses that information) - @param "details" (bool) whether to retrieve plugin-provided metadata about each branch - @param "filterText" (string) the text to match on - @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "base" (string) base branch or tag to compare each branch to (for the metadata providers that uses that information) + @param "details" (bool) whether to retrieve plugin-provided metadata about each branch + @param "filterText" (string) the text to match on + @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) + @return +*/ func (a *DefaultApiService) GetBranches(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3375,20 +3539,22 @@ func (a *DefaultApiService) GetBranches(project, repository string, localVarOpti } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of changes made in a specified commit. <p> <strong>Note:</strong> The implementation will apply a hard cap ({@code page.max.changes}) and it is not possible to request subsequent content when that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of changes made in a specified commit. <p> <strong>Note:</strong> The implementation will apply a hard cap ({@code page.max.changes}) and it is not possible to request subsequent content when that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "since" (string) the commit to which <code>until</code> should be compared to produce a page of changes. If not specified the commit's first parent is assumed (if one exists) - @param "until" (string) the commit to retrieve changes for - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "since" (string) the commit to which <code>until</code> should be compared to produce a page of changes. If not specified the commit's first parent is assumed (if one exists) + @param "until" (string) the commit to retrieve changes for + @return +*/ func (a *DefaultApiService) GetChanges(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3459,21 +3625,23 @@ func (a *DefaultApiService) GetChanges(projectKey, repositorySlug string, localV } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of changes made in a specified commit. <p> <strong>Note:</strong> The implementation will apply a hard cap ({@code page.max.changes}) and it is not possible to request subsequent content when that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of changes made in a specified commit. <p> <strong>Note:</strong> The implementation will apply a hard cap ({@code page.max.changes}) and it is not possible to request subsequent content when that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param commitId the commit to retrieve changes for - @param optional (nil or map[string]interface{}) with one or more of: - @param "since" (string) the commit to which <code>until</code> should be compared to produce a page of changes. If not specified the commit's first parent is assumed (if one exists) - @param "withComments" (bool) {@code true} to apply comment counts in the changes (the default); otherwise, {@code false} to stream changes without comment counts - @return */ + @param commitId the commit to retrieve changes for + @param optional (nil or map[string]interface{}) with one or more of: + @param "since" (string) the commit to which <code>until</code> should be compared to produce a page of changes. If not specified the commit's first parent is assumed (if one exists) + @param "withComments" (bool) {@code true} to apply comment counts in the changes (the default); otherwise, {@code false} to stream changes without comment counts + @return +*/ func (a *DefaultApiService) GetChanges_5(projectKey, repositorySlug, commitId string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3545,21 +3713,24 @@ func (a *DefaultApiService) GetChanges_5(projectKey, repositorySlug, commitId st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieves a commit discussion comment. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. @param commitId the commit to which the comments must be anchored @param commentId the ID of the comment to retrieve @param commitId2 the <i>full {@link Commit#getId() ID}</i> of the commit within the repository -@return */ -func (a *DefaultApiService) GetComment(projectKey, repositorySlug, commitId string, commentId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) GetComment(projectKey, repositorySlug, commitId string, commentId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3606,19 +3777,22 @@ func (a *DefaultApiService) GetComment(projectKey, repositorySlug, commitId stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieves a pull request comment. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param commentId the id of the comment to retrieve -@return */ -func (a *DefaultApiService) GetComment_6(projectKey, repositorySlug string, pullRequestId, commentId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) GetComment_6(projectKey, repositorySlug string, pullRequestId, commentId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3665,21 +3839,23 @@ func (a *DefaultApiService) GetComment_6(projectKey, repositorySlug string, pull } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves the commit discussion comments that match the specified search criteria. <p> It is possible to retrieve commit discussion comments that are anchored to a range of commits by providing the {@code sinceId} that the comments anchored from. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. +/* + DefaultApiService + Retrieves the commit discussion comments that match the specified search criteria. <p> It is possible to retrieve commit discussion comments that are anchored to a range of commits by providing the {@code sinceId} that the comments anchored from. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. - @param commitId the commit to which the comments must be anchored - @param optional (nil or map[string]interface{}) with one or more of: - @param "path" (string) the path to the file on which comments were made - @param "since" (string) For a merge commit, a parent can be provided to specify which diff the comments are on. For a commit range, a {@code sinceId} can be provided to specify where the comments are anchored from. - @return */ + @param commitId the commit to which the comments must be anchored + @param optional (nil or map[string]interface{}) with one or more of: + @param "path" (string) the path to the file on which comments were made + @param "since" (string) For a merge commit, a parent can be provided to specify which diff the comments are on. For a commit range, a {@code sinceId} can be provided to specify where the comments are anchored from. + @return +*/ func (a *DefaultApiService) GetComments(projectKey, repositorySlug, commitId string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3739,24 +3915,26 @@ func (a *DefaultApiService) GetComments(projectKey, repositorySlug, commitId str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Gets comments for the specified PullRequest. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Gets comments for the specified PullRequest. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "anchorState" (string) {@code ACTIVE} to stream the active comments; {@code ORPHANED} to stream the orphaned comments; {@code ALL} to stream both the active and the orphaned comments; - @param "diffType" (string) {@code EFFECTIVE} to stream the comments related to the effective diff of the pull request; {@code RANGE} to stream comments related to a commit range between two arbitrary commits (requires {@code fromHash} and {@code toHash}); {@code COMMIT} to stream comments related to a commit between two arbitrary commits (requires {@code fromHash} and {@code toHash}) - @param "fromHash" (string) the from commit hash to stream comments for a {@code RANGE} or {@code COMMIT} arbitrary change scope - @param "path" (string) the path to stream comments for a given path - @param "toHash" (string) the to commit hash to stream comments for a {@code RANGE} or {@code COMMIT} arbitrary change scope - @return */ -func (a *DefaultApiService) GetComments_7(projectKey, repositorySlug string, pullRequestId int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param optional (nil or map[string]interface{}) with one or more of: + @param "anchorState" (string) {@code ACTIVE} to stream the active comments; {@code ORPHANED} to stream the orphaned comments; {@code ALL} to stream both the active and the orphaned comments; + @param "diffType" (string) {@code EFFECTIVE} to stream the comments related to the effective diff of the pull request; {@code RANGE} to stream comments related to a commit range between two arbitrary commits (requires {@code fromHash} and {@code toHash}); {@code COMMIT} to stream comments related to a commit between two arbitrary commits (requires {@code fromHash} and {@code toHash}) + @param "fromHash" (string) the from commit hash to stream comments for a {@code RANGE} or {@code COMMIT} arbitrary change scope + @param "path" (string) the path to stream comments for a given path + @param "toHash" (string) the to commit hash to stream comments for a {@code RANGE} or {@code COMMIT} arbitrary change scope + @return +*/ +func (a *DefaultApiService) GetComments_7(projectKey, repositorySlug string, pullRequestId int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3833,20 +4011,22 @@ func (a *DefaultApiService) GetComments_7(projectKey, repositorySlug string, pul } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a single commit <i>identified by its ID</i>>. In general, that ID is a SHA1. <u>From 2.11, ref names like \"refs/heads/master\" are no longer accepted by this resource.</u> <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a single commit <i>identified by its ID</i>>. In general, that ID is a SHA1. <u>From 2.11, ref names like \"refs/heads/master\" are no longer accepted by this resource.</u> <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param commitId the commit ID to retrieve - @param optional (nil or map[string]interface{}) with one or more of: - @param "path" (string) an optional path to filter the commit by. If supplied the details returned <i>may not</i> be for the specified commit. Instead, starting from the specified commit, they will be the details for the first commit affecting the specified path. - @return */ + @param commitId the commit ID to retrieve + @param optional (nil or map[string]interface{}) with one or more of: + @param "path" (string) an optional path to filter the commit by. If supplied the details returned <i>may not</i> be for the specified commit. Instead, starting from the specified commit, they will be the details for the first commit affecting the specified path. + @return +*/ func (a *DefaultApiService) GetCommit(projectKey, repositorySlug, commitId string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -3900,25 +4080,27 @@ func (a *DefaultApiService) GetCommit(projectKey, repositorySlug, commitId strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of commits from a given starting commit or \"between\" two commits. If no explicit commit is specified, the tip of the repository's default branch is assumed. commits may be identified by branch or tag name or by ID. A path may be supplied to restrict the returned commits to only those which affect that path. <p> The authenticated user must have <b>REPO_READ</b> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of commits from a given starting commit or \"between\" two commits. If no explicit commit is specified, the tip of the repository's default branch is assumed. commits may be identified by branch or tag name or by ID. A path may be supplied to restrict the returned commits to only those which affect that path. <p> The authenticated user must have <b>REPO_READ</b> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "followRenames" (bool) if <code>true</code>, the commit history of the specified file will be followed past renames. Only valid for a path to a single file. - @param "ignoreMissing" (bool) <code>true</code> to ignore missing commits, <code>false</code> otherwise - @param "merges" (string) if present, controls how merge commits should be filtered. Can be either <code>exclude</code>, to exclude merge commits, <code>include</code>, to include both merge commits and non-merge commits or <code>only</code>, to only return merge commits. - @param "path" (string) an optional path to filter commits by - @param "since" (string) the commit ID or ref (exclusively) to retrieve commits after - @param "until" (string) the commit ID (SHA1) or ref (inclusively) to retrieve commits before - @param "withCounts" (bool) optionally include the total number of commits and total number of unique authors - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "followRenames" (bool) if <code>true</code>, the commit history of the specified file will be followed past renames. Only valid for a path to a single file. + @param "ignoreMissing" (bool) <code>true</code> to ignore missing commits, <code>false</code> otherwise + @param "merges" (string) if present, controls how merge commits should be filtered. Can be either <code>exclude</code>, to exclude merge commits, <code>include</code>, to include both merge commits and non-merge commits or <code>only</code>, to only return merge commits. + @param "path" (string) an optional path to filter commits by + @param "since" (string) the commit ID or ref (exclusively) to retrieve commits after + @param "until" (string) the commit ID (SHA1) or ref (inclusively) to retrieve commits before + @param "withCounts" (bool) optionally include the total number of commits and total number of unique authors + @return +*/ func (a *DefaultApiService) GetCommits(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4020,21 +4202,23 @@ func (a *DefaultApiService) GetCommits(project, repository string, localVarOptio } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve commits for the specified pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Retrieve commits for the specified pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param pullRequestId - @param optional (nil or map[string]interface{}) with one or more of: - @param "withCounts" (bool) if set to true, the service will add \"authorCount\" and \"totalCount\" at the end of the page. \"authorCount\" is the number of different authors and \"totalCount\" is the total number of commits. - @return */ -func (a *DefaultApiService) GetCommits_8(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param pullRequestId + @param optional (nil or map[string]interface{}) with one or more of: + @param "withCounts" (bool) if set to true, the service will add \"authorCount\" and \"totalCount\" at the end of the page. \"authorCount\" is the number of different authors and \"totalCount\" is the total number of commits. + @return +*/ +func (a *DefaultApiService) GetCommits_8(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -4087,22 +4271,24 @@ func (a *DefaultApiService) GetCommits_8(projectKey, repositorySlug string, pull } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref to retrieve the content for. - @param "type_" (bool) if true only the type will be returned for the file path instead of the contents. - @param "blame" (string) if present the blame will be returned for the file as well. - @param "noContent" (string) if present and used with blame only the blame is retrieved instead of the contents. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref to retrieve the content for. + @param "type_" (bool) if true only the type will be returned for the file path instead of the contents. + @param "blame" (string) if present the blame will be returned for the file as well. + @param "noContent" (string) if present and used with blame only the blame is retrieved instead of the contents. + @return +*/ func (a *DefaultApiService) GetContent(projectKey string, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { return a.getContentWithPath(projectKey, repositorySlug, "", localVarOptionals) } @@ -4196,23 +4382,25 @@ func (a *DefaultApiService) getContentWithPath(projectKey string, repositorySlug } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param path the file path to retrieve content from - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref to retrieve the content for. - @param "type_" (bool) if true only the type will be returned for the file path instead of the contents. - @param "blame" (string) if present the blame will be returned for the file as well. - @param "noContent" (string) if present and used with blame only the blame is retrieved instead of the contents. - @return */ + @param path the file path to retrieve content from + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref to retrieve the content for. + @param "type_" (bool) if true only the type will be returned for the file path instead of the contents. + @param "blame" (string) if present the blame will be returned for the file as well. + @param "noContent" (string) if present and used with blame only the blame is retrieved instead of the contents. + @return +*/ func (a *DefaultApiService) GetContent_9(projectKey, repositorySlug, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4296,22 +4484,24 @@ func (a *DefaultApiService) GetContent_9(projectKey, repositorySlug, path string } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the raw content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the raw content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref to retrieve the content for. - @param "markup" (string) if present or <code>\"true\"</code>, triggers the raw content to be markup-rendered and returned as HTML; otherwise, if not specified, or any value other than <code>\"true\"</code>, the content is streamed without markup - @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. If not specified, {@link MarkupService} will use the value of the <code>markup.render.hardwrap</code> property, which is <code>true</code> by default - @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. If not specified, {@link MarkupService} will use the value of the <code>markup.render.html.escape</code> property, which is <code>true</code> by default - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref to retrieve the content for. + @param "markup" (string) if present or <code>\"true\"</code>, triggers the raw content to be markup-rendered and returned as HTML; otherwise, if not specified, or any value other than <code>\"true\"</code>, the content is streamed without markup + @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. If not specified, {@link MarkupService} will use the value of the <code>markup.render.hardwrap</code> property, which is <code>true</code> by default + @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. If not specified, {@link MarkupService} will use the value of the <code>markup.render.html.escape</code> property, which is <code>true</code> by default + @return +*/ func (a *DefaultApiService) GetContent_10(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4382,38 +4572,37 @@ func (a *DefaultApiService) GetContent_10(projectKey, repositorySlug string, loc } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the raw content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - - @param path the file path to retrieve content from - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref to retrieve the content for. - @param "markup" (string) if present or <code>\"true\"</code>, triggers the raw content to be markup-rendered and returned as HTML; otherwise, if not specified, or any value other than <code>\"true\"</code>, the content is streamed without markup - @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. If not specified, {@link MarkupService} will use the value of the <code>markup.render.hardwrap</code> property, which is <code>true</code> by default - @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. If not specified, {@link MarkupService} will use the value of the <code>markup.render.html.escape</code> property, which is <code>true</code> by default - @return */ -func (a *DefaultApiService) GetContent_11(projectKey, repositorySlug, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { +/* + DefaultApiService + Retrieve the raw content for a file path at a specified revision. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. + * @param ctx context.Context for authentication, logging, tracing, etc. + @param path the file path to retrieve content from + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref to retrieve the content for. + @param "markup" (string) if present or <code>\"true\"</code>, triggers the raw content to be markup-rendered and returned as HTML; otherwise, if not specified, or any value other than <code>\"true\"</code>, the content is streamed without markup + @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. If not specified, {@link MarkupService} will use the value of the <code>markup.render.hardwrap</code> property, which is <code>true</code> by default + @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. If not specified, {@link MarkupService} will use the value of the <code>markup.render.html.escape</code> property, which is <code>true</code> by default + @return +*/ +func (a *DefaultApiService) GetRawContent(projectKey, repositorySlug, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte ) - // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/raw/{path}" localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) localVarPath = strings.Replace(localVarPath, "{"+"path"+"}", fmt.Sprintf("%v", path), -1) - localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) - localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -4472,17 +4661,22 @@ func (a *DefaultApiService) GetContent_11(projectKey, repositorySlug, path strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } - return NewRawAPIResponse(localVarHTTPResponse) + result := NewAPIResponse(localVarHTTPResponse) + result.Payload, err = io.ReadAll(localVarHTTPResponse.Body) + return result, err } -/* DefaultApiService +/* + DefaultApiService + Get the default branch of the repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetDefaultBranch(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4528,18 +4722,21 @@ func (a *DefaultApiService) GetDefaultBranch(projectKey, repositorySlug string) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve repositories which have been forked from this one. Unlike {@link #getRelatedRepositories(Repository, PageRequest) related repositories}, this only looks at a given repository's direct forks. If those forks have themselves been the origin of more forks, such \"grandchildren\" repositories will not be retrieved. <p> Only repositories to which the authenticated user has <b>REPO_READ</b> permission will be included, even if other repositories have been forked from this one. @param projectKey the parent project key -@return */ +@return +*/ func (a *DefaultApiService) GetForkedRepositories(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4585,19 +4782,21 @@ func (a *DefaultApiService) GetForkedRepositories(projectKey, repositorySlug str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups. <p> The authenticated user must have <strong>LICENSED_USER</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of groups. <p> The authenticated user must have <strong>LICENSED_USER</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroups(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4660,19 +4859,21 @@ func (a *DefaultApiService) GetGroups(localVarOptionals map[string]interface{}) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have been granted at least one global permission. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of groups that have been granted at least one global permission. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithAnyPermission(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4735,19 +4936,21 @@ func (a *DefaultApiService) GetGroupsWithAnyPermission(localVarOptionals map[str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have been granted at least one permission for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. +/* + DefaultApiService + Retrieve a page of groups that have been granted at least one permission for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithAnyPermission_12(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4811,19 +5014,21 @@ func (a *DefaultApiService) GetGroupsWithAnyPermission_12(projectKey string, loc } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have been granted at least one permission for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. +/* + DefaultApiService + Retrieve a page of groups that have been granted at least one permission for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithAnyPermission_13(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4888,19 +5093,21 @@ func (a *DefaultApiService) GetGroupsWithAnyPermission_13(projectKey, repository } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have no granted global permissions. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of groups that have no granted global permissions. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithoutAnyPermission(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -4963,19 +5170,21 @@ func (a *DefaultApiService) GetGroupsWithoutAnyPermission(localVarOptionals map[ } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have no granted permissions for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. +/* + DefaultApiService + Retrieve a page of groups that have no granted permissions for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithoutAnyPermission_14(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5039,19 +5248,83 @@ func (a *DefaultApiService) GetGroupsWithoutAnyPermission_14(projectKey string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of groups that have no granted permissions for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. +func (a *DefaultApiService) SetRepositoryPermissionGroups(projectKey, repositorySlug, permission string, groupNames []string, localVarHTTPContentTypes []string) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Put") + localVarFileName string + localVarFileBytes []byte + localVarPostBody interface{} + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/permissions/groups" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{ + "name": groupNames, + "permission": []string{permission}, + } + localVarFormParams := url.Values{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil { + if localVarHTTPResponse != nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) + } + return nil, err + } + if localVarHTTPResponse == nil { + return nil, nil + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + if localVarHTTPResponse.StatusCode == 204 { + return nil, nil + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ +/* + DefaultApiService + Retrieve a page of groups that have no granted permissions for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. + * @param ctx context.Context for authentication, logging, tracing, etc. + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroupsWithoutAnyPermission_15(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5116,19 +5389,21 @@ func (a *DefaultApiService) GetGroupsWithoutAnyPermission_15(projectKey, reposit } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of group names. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of group names. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetGroups_16(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5191,17 +5466,20 @@ func (a *DefaultApiService) GetGroups_16(localVarOptionals map[string]interface{ } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Gets information about the nodes that currently make up the stash cluster. <p> The authenticated user must have the <strong>SYS_ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetInformation() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5245,21 +5523,23 @@ func (a *DefaultApiService) GetInformation() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Get the latest invocations for a specific webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Get the latest invocations for a specific webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. - @param webhookId id of the webhook - @param optional (nil or map[string]interface{}) with one or more of: - @param "event" (string) the string id of a specific event to retrieve the last invocation for. - @param "outcome" (string) the outcome to filter for. Can be SUCCESS, FAILURE, ERROR. None specified means that the all will be considered - @return */ + @param webhookId id of the webhook + @param optional (nil or map[string]interface{}) with one or more of: + @param "event" (string) the string id of a specific event to retrieve the last invocation for. + @param "outcome" (string) the outcome to filter for. Can be SUCCESS, FAILURE, ERROR. None specified means that the all will be considered + @return +*/ func (a *DefaultApiService) GetLatestInvocation(projectKey, repositorySlug string, webhookId int32, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5319,18 +5599,21 @@ func (a *DefaultApiService) GetLatestInvocation(projectKey, repositorySlug strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the current log level for a given logger. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. @param loggerName the name of the logger. -@return */ +@return +*/ func (a *DefaultApiService) GetLevel(loggerName string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5375,17 +5658,20 @@ func (a *DefaultApiService) GetLevel(loggerName string) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieves the current mail configuration. The authenticated user must have the <strong>SYS_ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetMailConfig() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5429,18 +5715,21 @@ func (a *DefaultApiService) GetMailConfig() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the merge strategies available for this instance. <p> The user must be authenticated to call this resource. @param scmId the id of the scm to get strategies for -@return */ +@return +*/ func (a *DefaultApiService) GetMergeConfig(scmId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5485,24 +5774,26 @@ func (a *DefaultApiService) GetMergeConfig(scmId string) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of pull requests to or from the specified repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. Optionally clients can specify PR participant filters. Each filter has a mandatory {@code username.N} parameter, and the optional {@code role.N} and {@code approved.N} parameters. <ul> <li> {@code username.N} - the \"root\" of a single participant filter, where \"N\" is a natural number starting from 1. This allows clients to specify multiple participant filters, by providing consecutive filters as {@code username.1}, {@code username.2} etc. Note that the filters numbering has to start with 1 and be continuous for all filters to be processed. The total allowed number of participant filters is 10 and all filters exceeding that limit will be dropped. </li> <li> {@code role.N}(optional) the role associated with {@code username.N}. This must be one of {@code AUTHOR}, {@code REVIEWER}, or{@code PARTICIPANT} </li> <li> {@code approved.N}(optional) the approved status associated with {@code username.N}. That is whether {@code username.N} has approved the PR. Either {@code true}, or {@code false} </li> </ul> +/* + DefaultApiService + Retrieve a page of pull requests to or from the specified repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. Optionally clients can specify PR participant filters. Each filter has a mandatory {@code username.N} parameter, and the optional {@code role.N} and {@code approved.N} parameters. <ul> <li> {@code username.N} - the \"root\" of a single participant filter, where \"N\" is a natural number starting from 1. This allows clients to specify multiple participant filters, by providing consecutive filters as {@code username.1}, {@code username.2} etc. Note that the filters numbering has to start with 1 and be continuous for all filters to be processed. The total allowed number of participant filters is 10 and all filters exceeding that limit will be dropped. </li> <li> {@code role.N}(optional) the role associated with {@code username.N}. This must be one of {@code AUTHOR}, {@code REVIEWER}, or{@code PARTICIPANT} </li> <li> {@code approved.N}(optional) the approved status associated with {@code username.N}. That is whether {@code username.N} has approved the PR. Either {@code true}, or {@code false} </li> </ul> - @param optional (nil or map[string]interface{}) with one or more of: - @param "direction" (string) (optional, defaults to <strong>INCOMING</strong>) the direction relative to the specified repository. Either <strong>INCOMING</strong> or <strong>OUTGOING</strong>. - @param "at" (string) (optional) a <i>fully-qualified</i> branch ID to find pull requests to or from, such as {@code refs/heads/master} - @param "state" (string) (optional, defaults to <strong>OPEN</strong>). Supply <strong>ALL</strong> to return pull request in any state. If a state is supplied only pull requests in the specified state will be returned. Either <strong>OPEN</strong>, <strong>DECLINED</strong> or <strong>MERGED</strong>. - @param "order" (string) (optional, defaults to <strong>NEWEST</strong>) the order to return pull requests in, either <strong>OLDEST</strong> (as in: \"oldest first\") or <strong>NEWEST</strong>. - @param "withAttributes" (bool) (optional) defaults to true, whether to return additional pull request attributes - @param "withProperties" (bool) (optional) defaults to true, whether to return additional pull request properties - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "direction" (string) (optional, defaults to <strong>INCOMING</strong>) the direction relative to the specified repository. Either <strong>INCOMING</strong> or <strong>OUTGOING</strong>. + @param "at" (string) (optional) a <i>fully-qualified</i> branch ID to find pull requests to or from, such as {@code refs/heads/master} + @param "state" (string) (optional, defaults to <strong>OPEN</strong>). Supply <strong>ALL</strong> to return pull request in any state. If a state is supplied only pull requests in the specified state will be returned. Either <strong>OPEN</strong>, <strong>DECLINED</strong> or <strong>MERGED</strong>. + @param "order" (string) (optional, defaults to <strong>NEWEST</strong>) the order to return pull requests in, either <strong>OLDEST</strong> (as in: \"oldest first\") or <strong>NEWEST</strong>. + @param "withAttributes" (bool) (optional) defaults to true, whether to return additional pull request attributes + @param "withProperties" (bool) (optional) defaults to true, whether to return additional pull request properties + @return +*/ func (a *DefaultApiService) GetPullRequestsPage(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5597,17 +5888,20 @@ func (a *DefaultApiService) GetPullRequestsPage(projectKey, repositorySlug strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the project matching the supplied <strong>projectKey</strong>. <p> The authenticated user must have <strong>PROJECT_VIEW</strong> permission for the specified project to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetProject(projectKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5652,19 +5946,21 @@ func (a *DefaultApiService) GetProject(projectKey string) (*APIResponse, error) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the avatar for the project matching the supplied <strong>projectKey</strong>. <p> The authenticated user must have <strong>PROJECT_VIEW</strong> permission for the specified project to call this resource. +/* + DefaultApiService + Retrieve the avatar for the project matching the supplied <strong>projectKey</strong>. <p> The authenticated user must have <strong>PROJECT_VIEW</strong> permission for the specified project to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "s" (int32) The desired size of the image. The server will return an image as close as possible to the specified size. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "s" (int32) The desired size of the image. The server will return an image as close as possible to the specified size. + @return +*/ func (a *DefaultApiService) GetProjectAvatar(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5716,20 +6012,22 @@ func (a *DefaultApiService) GetProjectAvatar(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of projects. <p> Only projects for which the authenticated user has the <strong>PROJECT_VIEW</strong> permission will be returned. +/* + DefaultApiService + Retrieve a page of projects. <p> Only projects for which the authenticated user has the <strong>PROJECT_VIEW</strong> permission will be returned. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) - @param "permission" (string) - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) + @param "permission" (string) + @return +*/ func (a *DefaultApiService) GetProjects(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5800,16 +6098,18 @@ func (a *DefaultApiService) GetProjects(localVarOptionals map[string]interface{} } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestCount() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5853,17 +6153,20 @@ func (a *DefaultApiService) GetPullRequestCount() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the pull request settings for the context repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the context repository to call this resource. <p> This resource will call all RestFragments that are registered with the key <strong>bitbucket.repository.settings.pullRequests</strong>. If any fragment fails validations by returning a non-empty Map of errors, then no fragments will execute. <p> The property keys for the settings that are bundled with the application are <ul> <li>mergeConfig - the merge strategy configuration for pull requests</li> <li>requiredApprovers - (Deprecated, please use com.atlassian.bitbucket.server.bundled-hooks.requiredApproversMergeHook instead) the number of approvals required on a pull request for it to be mergeable, or 0 if the merge check is disabled</li> <li>com.atlassian.bitbucket.server.bundled-hooks.requiredApproversMergeHook - the merge check configuration for required approvers</li> <li>requiredAllApprovers - whether or not all approvers must approve a pull request for it to be mergeable</li> <li>requiredAllTasksComplete - whether or not all tasks on a pull request need to be completed for it to be mergeable</li> <li>requiredSuccessfulBuilds - (Deprecated, please use com.atlassian.bitbucket.server.bitbucket-build.requiredBuildsMergeCheck instead) the number of successful builds on a pull request for it to be mergeable, or 0 if the merge check is disabled</li> <li>com.atlassian.bitbucket.server.bitbucket-build.requiredBuildsMergeCheck - the merge check configuration for required builds</li> </ul> -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestSettings(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5909,18 +6212,21 @@ func (a *DefaultApiService) GetPullRequestSettings(projectKey, repositorySlug st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the merge strategy configuration for this project and SCM. <p> The authenticated user must have <strong>PROJECT_READ</strong> permission for the context repository to call this resource. @param scmId the SCM to get strategies for -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestSettings_17(projectKey, repositorySlug string, scmId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -5967,20 +6273,22 @@ func (a *DefaultApiService) GetPullRequestSettings_17(projectKey, repositorySlug } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieves a page of suggestions for pull requests that the currently authenticated user may wish to raise. Such suggestions are based on ref changes occurring and so contain the ref change that prompted the suggestion plus the time the change event occurred. Changes will be returned in descending order based on the time the change that prompted the suggestion occurred. <p> Note that although the response is a page object, the interface does not support paging, however a limit can be applied to the size of the returned page. +/* + DefaultApiService + Retrieves a page of suggestions for pull requests that the currently authenticated user may wish to raise. Such suggestions are based on ref changes occurring and so contain the ref change that prompted the suggestion plus the time the change event occurred. Changes will be returned in descending order based on the time the change that prompted the suggestion occurred. <p> Note that although the response is a page object, the interface does not support paging, however a limit can be applied to the size of the returned page. - @param optional (nil or map[string]interface{}) with one or more of: - @param "changesSince" (string) restrict pull request suggestions to be based on events that occurred since some time in the past. This is expressed in seconds since \"now\". So to return suggestions based only on activity within the past 48 hours, pass a value of 172800. - @param "limit" (int32) restricts the result set to return at most this many suggestions. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "changesSince" (string) restrict pull request suggestions to be based on events that occurred since some time in the past. This is expressed in seconds since \"now\". So to return suggestions based only on activity within the past 48 hours, pass a value of 172800. + @param "limit" (int32) restricts the result set to return at most this many suggestions. + @return +*/ func (a *DefaultApiService) GetPullRequestSuggestions(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6037,18 +6345,21 @@ func (a *DefaultApiService) GetPullRequestSuggestions(localVarOptionals map[stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the tasks associated with a pull request. -@return */ -func (a *DefaultApiService) GetPullRequestTasks(projectKey, repositorySlug string, pullRequestId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) GetPullRequestTasks(projectKey, repositorySlug string, pullRequestId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -6094,23 +6405,25 @@ func (a *DefaultApiService) GetPullRequestTasks(projectKey, repositorySlug strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of pull requests where the current authenticated user is involved as either a reviewer, author or a participant. The request may be filtered by pull request state, role or participant status. +/* + DefaultApiService + Retrieve a page of pull requests where the current authenticated user is involved as either a reviewer, author or a participant. The request may be filtered by pull request state, role or participant status. - @param optional (nil or map[string]interface{}) with one or more of: - @param "state" (string) (optional, defaults to returning pull requests in any state). If a state is supplied only pull requests in the specified state will be returned. Either <strong>OPEN</strong>, <strong>DECLINED</strong> or <strong>MERGED</strong>. Omit this parameter to return pull request in any state. - @param "role" (string) (optional, defaults to returning pull requests for any role). If a role is supplied only pull requests where the authenticated user is a participant in the given role will be returned. Either <strong>REVIEWER</strong>, <strong>AUTHOR</strong> or <strong>PARTICIPANT</strong>. - @param "participantStatus" (string) (optional, defaults to returning pull requests with any participant status). A comma separated list of participant status. That is, one or more of <strong>UNAPPROVED</strong>, <strong>NEEDS_WORK</strong>, or <strong>APPROVED</strong>. - @param "order" (string) (optional, defaults to <strong>NEWEST</strong>) the order to return pull requests in, either <strong>OLDEST</strong> (as in: \"oldest first\"), <strong>NEWEST</strong>, <strong>PARTICIPANT_STATUS</strong>, or <strong>CLOSED_DATE</strong>. Where <strong>CLOSED_DATE</strong> is specified and the result set includes pull requests that are not in the closed state, these pull requests will appear first in the result set, followed by most recently closed pull requests. - @param "closedSince" (string) (optional, defaults to returning pull requests regardless of closed since date). Permits returning only pull requests with a closed timestamp set more recently that (now - closedSince). Units are in seconds. So for example if closed since 86400 is set only pull requests closed in the previous 24 hours will be returned. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "state" (string) (optional, defaults to returning pull requests in any state). If a state is supplied only pull requests in the specified state will be returned. Either <strong>OPEN</strong>, <strong>DECLINED</strong> or <strong>MERGED</strong>. Omit this parameter to return pull request in any state. + @param "role" (string) (optional, defaults to returning pull requests for any role). If a role is supplied only pull requests where the authenticated user is a participant in the given role will be returned. Either <strong>REVIEWER</strong>, <strong>AUTHOR</strong> or <strong>PARTICIPANT</strong>. + @param "participantStatus" (string) (optional, defaults to returning pull requests with any participant status). A comma separated list of participant status. That is, one or more of <strong>UNAPPROVED</strong>, <strong>NEEDS_WORK</strong>, or <strong>APPROVED</strong>. + @param "order" (string) (optional, defaults to <strong>NEWEST</strong>) the order to return pull requests in, either <strong>OLDEST</strong> (as in: \"oldest first\"), <strong>NEWEST</strong>, <strong>PARTICIPANT_STATUS</strong>, or <strong>CLOSED_DATE</strong>. Where <strong>CLOSED_DATE</strong> is specified and the result set includes pull requests that are not in the closed state, these pull requests will appear first in the result set, followed by most recently closed pull requests. + @param "closedSince" (string) (optional, defaults to returning pull requests regardless of closed since date). Permits returning only pull requests with a closed timestamp set more recently that (now - closedSince). Units are in seconds. So for example if closed since 86400 is set only pull requests closed in the previous 24 hours will be returned. + @return +*/ func (a *DefaultApiService) GetPullRequests(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6197,20 +6510,22 @@ func (a *DefaultApiService) GetPullRequests(localVarOptionals map[string]interfa } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService - @param optional (nil or map[string]interface{}) with one or more of: - @param "start" (int32) - @param "limit" (int32) - @param "role" (string) - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "start" (int32) + @param "limit" (int32) + @param "role" (string) + @return +*/ func (a *DefaultApiService) GetPullRequests_18(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6273,18 +6588,21 @@ func (a *DefaultApiService) GetPullRequests_18(localVarOptionals map[string]inte } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve repositories which are related to this one. Related repositories are from the same {@link Repository#getHierarchyId() hierarchy} as this repository. <p> Only repositories to which the authenticated user has <b>REPO_READ</b> permission will be included, even if more repositories are part of this repository's hierarchy. @param projectKey the parent project key -@return */ +@return +*/ func (a *DefaultApiService) GetRelatedRepositories(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6330,18 +6648,21 @@ func (a *DefaultApiService) GetRelatedRepositories(projectKey, repositorySlug st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve repositories from the project corresponding to the supplied <strong>projectKey</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified project to call this resource. @param projectKey the parent project key -@return */ +@return +*/ func (a *DefaultApiService) GetRepositories(projectKey string) (*APIResponse, error) { return a.GetRepositoriesWithOptions(projectKey, make(map[string]interface{})) } @@ -6404,19 +6725,21 @@ func (a *DefaultApiService) GetRepositoriesWithOptions(projectKey string, localV } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of recently accessed repositories for the currently authenticated user. <p> Repositories are ordered from most recently to least recently accessed. <p> Only authenticated users may call this resource. +/* + DefaultApiService + Retrieve a page of recently accessed repositories for the currently authenticated user. <p> Repositories are ordered from most recently to least recently accessed. <p> Only authenticated users may call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "permission" (string) (optional) if specified, it must be a valid repository permission level name and will limit the resulting repository list to ones that the requesting user has the specified permission level to. If not specified, the default <code>REPO_READ</code> permission level will be assumed. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "permission" (string) (optional) if specified, it must be a valid repository permission level name and will limit the resulting repository list to ones that the requesting user has the specified permission level to. If not specified, the default <code>REPO_READ</code> permission level will be assumed. + @return +*/ func (a *DefaultApiService) GetRepositoriesRecentlyAccessed(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6479,22 +6802,24 @@ func (a *DefaultApiService) GetRepositoriesRecentlyAccessed(localVarOptionals ma } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of repositories based on query parameters that control the search. See the documentation of the parameters for more details. <p> This resource is anonymously accessible. <p> <b>Note on permissions.</b> In absence of the {@code permission} query parameter the implicit 'read' permission is assumed. Please note that this permission is lower than the REPO_READ permission rather than being equal to it. The implicit 'read' permission for a given repository is assigned to any user that has any of the higher permissions, such as <tt>REPO_READ</tt>, as well as to anonymous users if the repository is marked as public. The important implication of the above is that an anonymous request to this resource with a permission level <tt>REPO_READ</tt> is guaranteed to receive an empty list of repositories as a result. For anonymous requests it is therefore recommended to not specify the <tt>permission</tt> parameter at all. - * @param for authentication, logging, tracing, etc. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) (optional) if specified, this will limit the resulting repository list to ones whose name matches this parameter's value. The match will be done case-insensitive and any leading and/or trailing whitespace characters on the <code>name</code> parameter will be stripped. - @param "projectname" (string) (optional) if specified, this will limit the resulting repository list to ones whose project's name matches this parameter's value. The match will be done case-insensitive and any leading and/or trailing whitespace characters on the <code>projectname</code> parameter will be stripped. - @param "permission" (string) (optional) if specified, it must be a valid repository permission level name and will limit the resulting repository list to ones that the requesting user has the specified permission level to. If not specified, the default implicit 'read' permission level will be assumed. The currently supported explicit permission values are <tt>REPO_READ</tt>, <tt>REPO_WRITE</tt> and <tt>REPO_ADMIN</tt>. - @param "visibility" (string) (optional) if specified, this will limit the resulting repository list based on the repositories visibility. Valid values are <em>public</em> or <em>private</em>. - @return */ +/* + DefaultApiService + Retrieve a page of repositories based on query parameters that control the search. See the documentation of the parameters for more details. <p> This resource is anonymously accessible. <p> <b>Note on permissions.</b> In absence of the {@code permission} query parameter the implicit 'read' permission is assumed. Please note that this permission is lower than the REPO_READ permission rather than being equal to it. The implicit 'read' permission for a given repository is assigned to any user that has any of the higher permissions, such as <tt>REPO_READ</tt>, as well as to anonymous users if the repository is marked as public. The important implication of the above is that an anonymous request to this resource with a permission level <tt>REPO_READ</tt> is guaranteed to receive an empty list of repositories as a result. For anonymous requests it is therefore recommended to not specify the <tt>permission</tt> parameter at all. + * @param for authentication, logging, tracing, etc. + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) (optional) if specified, this will limit the resulting repository list to ones whose name matches this parameter's value. The match will be done case-insensitive and any leading and/or trailing whitespace characters on the <code>name</code> parameter will be stripped. + @param "projectname" (string) (optional) if specified, this will limit the resulting repository list to ones whose project's name matches this parameter's value. The match will be done case-insensitive and any leading and/or trailing whitespace characters on the <code>projectname</code> parameter will be stripped. + @param "permission" (string) (optional) if specified, it must be a valid repository permission level name and will limit the resulting repository list to ones that the requesting user has the specified permission level to. If not specified, the default implicit 'read' permission level will be assumed. The currently supported explicit permission values are <tt>REPO_READ</tt>, <tt>REPO_WRITE</tt> and <tt>REPO_ADMIN</tt>. + @param "visibility" (string) (optional) if specified, this will limit the resulting repository list based on the repositories visibility. Valid values are <em>public</em> or <em>private</em>. + @return +*/ func (a *DefaultApiService) GetRepositories_19(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6575,20 +6900,23 @@ func (a *DefaultApiService) GetRepositories_19(localVarOptionals map[string]inte } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param projectKey2 the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetRepository(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6634,20 +6962,23 @@ func (a *DefaultApiService) GetRepository(projectKey, repositorySlug string) (*A } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param projectKey2 the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetUserRepository(username, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6693,18 +7024,21 @@ func (a *DefaultApiService) GetUserRepository(username, repositorySlug string) ( } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequest(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6751,18 +7085,21 @@ func (a *DefaultApiService) GetPullRequest(projectKey, repositorySlug string, pu } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestActivity(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6809,18 +7146,21 @@ func (a *DefaultApiService) GetPullRequestActivity(projectKey, repositorySlug st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestActivityWithOptions(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -6881,18 +7221,21 @@ func (a *DefaultApiService) GetPullRequestActivityWithOptions(projectKey, reposi } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the repository matching the supplied <strong>projectKey</strong> and <strong>repositorySlug</strong>. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) GetPullRequestCommits(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { return a.GetPullRequestCommitsWithOptions(projectKey, repositorySlug, pullRequestID, make(map[string]interface{})) } @@ -6958,7 +7301,7 @@ func (a *DefaultApiService) GetPullRequestCommitsWithOptions(projectKey, reposit } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -7009,18 +7352,21 @@ func (a *DefaultApiService) GetCommitBuildStatuses(commitSHA string) (*APIRespon } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a repository hook for this repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) GetRepositoryHook(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7067,18 +7413,21 @@ func (a *DefaultApiService) GetRepositoryHook(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a repository hook for this project. <p> The authenticated user must have <strong>PROJECT_READ</strong> permission for the specified project to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) GetRepositoryHook_20(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7125,19 +7474,21 @@ func (a *DefaultApiService) GetRepositoryHook_20(projectKey, repositorySlug stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of repository hooks for this repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of repository hooks for this repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "type_" (string) the optional type to filter by. Valid values are <code>PRE_RECEIVE</code> or <code>POST_RECEIVE</code> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "type_" (string) the optional type to filter by. Valid values are <code>PRE_RECEIVE</code> or <code>POST_RECEIVE</code> + @return +*/ func (a *DefaultApiService) GetRepositoryHooks(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7202,19 +7553,21 @@ func (a *DefaultApiService) GetRepositoryHooks(projectKey, repositorySlug string } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of repository hooks for this project. <p> The authenticated user must have <strong>PROJECT_READ</strong> permission for the specified project to call this resource. +/* + DefaultApiService + Retrieve a page of repository hooks for this project. <p> The authenticated user must have <strong>PROJECT_READ</strong> permission for the specified project to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "type_" (string) the optional type to filter by. Valid values are <code>PRE_RECEIVE</code> or <code>POST_RECEIVE</code> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "type_" (string) the optional type to filter by. Valid values are <code>PRE_RECEIVE</code> or <code>POST_RECEIVE</code> + @return +*/ func (a *DefaultApiService) GetRepositoryHooks_21(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7278,17 +7631,20 @@ func (a *DefaultApiService) GetRepositoryHooks_21(projectKey string, localVarOpt } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the current log level for the root logger. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) GetRootLevel() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7332,17 +7688,20 @@ func (a *DefaultApiService) GetRootLevel() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService -Retrieves the server email address +/* + DefaultApiService + +# Retrieves the server email address -@return */ +@return +*/ func (a *DefaultApiService) GetSenderAddress() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7386,18 +7745,21 @@ func (a *DefaultApiService) GetSenderAddress() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService -Retrieve the settings for a repository hook for this repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - +/* + DefaultApiService + +Retrieve the settings for a repository hook for this repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. + @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) GetSettings(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7444,18 +7806,21 @@ func (a *DefaultApiService) GetSettings(projectKey, repositorySlug string, hookK } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the settings for a repository hook for this project. <p> The authenticated user must have <strong>PROJECT_READ</strong> permission for the specified project to call this resource. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) GetSettings_22(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7502,20 +7867,22 @@ func (a *DefaultApiService) GetSettings_22(projectKey, repositorySlug string, ho } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Get the statistics for a specific webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Get the statistics for a specific webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. - @param webhookId id of the webhook - @param optional (nil or map[string]interface{}) with one or more of: - @param "event" (string) the string id of a specific event to retrieve the last invocation for. May be empty, in which case all events are considered - @return */ + @param webhookId id of the webhook + @param optional (nil or map[string]interface{}) with one or more of: + @param "event" (string) the string id of a specific event to retrieve the last invocation for. May be empty, in which case all events are considered + @return +*/ func (a *DefaultApiService) GetStatistics(projectKey, repositorySlug string, webhookId int32, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7569,18 +7936,21 @@ func (a *DefaultApiService) GetStatistics(projectKey, repositorySlug string, web } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Get the statistics summary for a specific webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param webhookId id of the webhook -@return */ +@return +*/ func (a *DefaultApiService) GetStatisticsSummary(projectKey, repositorySlug string, webhookId int32) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7627,18 +7997,21 @@ func (a *DefaultApiService) GetStatisticsSummary(projectKey, repositorySlug stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a tag in the specified repository. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the context repository to call this resource. @param name the name of the tag to be retrieved -@return */ +@return +*/ func (a *DefaultApiService) GetTag(projectKey, repositorySlug string, name string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7685,20 +8058,22 @@ func (a *DefaultApiService) GetTag(projectKey, repositorySlug string, name strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the tags matching the supplied <strong>filterText</strong> param. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the context repository to call this resource. +/* + DefaultApiService + Retrieve the tags matching the supplied <strong>filterText</strong> param. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the context repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filterText" (string) the text to match on - @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filterText" (string) the text to match on + @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) + @return +*/ func (a *DefaultApiService) GetTags(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7757,19 +8132,22 @@ func (a *DefaultApiService) GetTags(project, repository string, localVarOptional } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a existing task. @param taskId the id identifying the task to delete -@return */ -func (a *DefaultApiService) GetTask(taskId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) GetTask(taskId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -7813,7 +8191,7 @@ func (a *DefaultApiService) GetTask(taskId int64) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -7868,7 +8246,132 @@ func (a *DefaultApiService) GetSSHKeys(user string) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} + +func (a *DefaultApiService) GetSSHRepoKeys(projectKey, repositorySlug, sshKeyFilter string, repoKeysOnly, writeKeysOnly bool) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/keys/1.0/projects/{projectKey}/repos/{repositorySlug}/ssh" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if writeKeysOnly { + localVarQueryParams.Set("permission", "REPO_WRITE") + } + if repoKeysOnly { + localVarQueryParams.Add("effective", "true") + } + if sshKeyFilter != "" { + localVarQueryParams.Add("filter", sshKeyFilter) + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewAPIResponseWithError(localVarHTTPResponse, nil, err) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} + +func (a *DefaultApiService) CreateRepoSSHKey(projectKey, repositorySlug, sshPubKey string, isWrite bool) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/keys/1.0/projects/{projectKey}/repos/{repositorySlug}/ssh" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + permission := "REPO_READ" + if isWrite { + permission = "REPO_WRITE" + } + localVarPostBody = map[string]interface{}{ + "key": map[string]interface{}{ + "text": sshPubKey, + }, + "permission": permission, + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -7902,7 +8405,9 @@ func (a *DefaultApiService) CreateSSHKey(localVarOptionals map[string]interface{ localVarQueryParams.Add("user", parameterToString(localVarTempParam, "")) } if localVarTempParam, localVarOk := localVarOptionals["text"].(string); localVarOk { - localVarFormParams.Add("text", parameterToString(localVarTempParam, "")) + localVarPostBody = map[string]interface{}{ + "text": localVarTempParam, + } } // to determine the Content-Type header @@ -7933,17 +8438,20 @@ func (a *DefaultApiService) CreateSSHKey(localVarOptionals map[string]interface{ } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve the user matching the supplied <strong>userSlug</strong>. <p> -@return */ +@return +*/ func (a *DefaultApiService) GetUser(username string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -7989,17 +8497,20 @@ func (a *DefaultApiService) GetUser(username string) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a map of user setting key values for a specific user identified by the user slug. <p> -@return */ +@return +*/ func (a *DefaultApiService) GetUserSettings(userSlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8044,19 +8555,21 @@ func (a *DefaultApiService) GetUserSettings(userSlug string) (*APIResponse, erro } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of users. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. +/* + DefaultApiService + Retrieve a page of users. <p> The authenticated user must have the <strong>LICENSED_USER</strong> permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only users with usernames, display name or email addresses containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only users with usernames, display name or email addresses containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsers(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8119,19 +8632,21 @@ func (a *DefaultApiService) GetUsers(localVarOptionals map[string]interface{}) ( } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of users that have been granted at least one global permission. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of users that have been granted at least one global permission. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only user names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only user names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithAnyPermission(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8194,19 +8709,21 @@ func (a *DefaultApiService) GetUsersWithAnyPermission(localVarOptionals map[stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of users that have been granted at least one permission for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. +/* + DefaultApiService + Retrieve a page of users that have been granted at least one permission for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithAnyPermission_23(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8270,19 +8787,21 @@ func (a *DefaultApiService) GetUsersWithAnyPermission_23(projectKey string, loca } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of users that have been granted at least one permission for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. +/* + DefaultApiService + Retrieve a page of users that have been granted at least one permission for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithAnyPermission_24(projectKey string, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8347,19 +8866,21 @@ func (a *DefaultApiService) GetUsersWithAnyPermission_24(projectKey string, repo } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of users that have no granted global permissions. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. +/* + DefaultApiService + Retrieve a page of users that have no granted global permissions. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only user names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only user names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithoutAnyPermission(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8422,19 +8943,21 @@ func (a *DefaultApiService) GetUsersWithoutAnyPermission(localVarOptionals map[s } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of <i>licensed</i> users that have no granted permissions for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. +/* + DefaultApiService + Retrieve a page of <i>licensed</i> users that have no granted permissions for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithoutPermission(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8498,19 +9021,21 @@ func (a *DefaultApiService) GetUsersWithoutPermission(projectKey string, localVa } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of <i>licensed</i> users that have no granted permissions for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. +/* + DefaultApiService + Retrieve a page of <i>licensed</i> users that have no granted permissions for the specified repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "filter" (string) if specified only group names containing the supplied string will be returned - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "filter" (string) if specified only group names containing the supplied string will be returned + @return +*/ func (a *DefaultApiService) GetUsersWithoutPermission_25(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8575,17 +9100,20 @@ func (a *DefaultApiService) GetUsersWithoutPermission_25(projectKey, repositoryS } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a page of users, optionally run through provided filters. <p> Only authenticated users may call this resource. <h3>Supported Filters</h3> <p> Filters are provided in query parameters in a standard <code>name=value</code> fashion. The following filters are currently supported: <ul> <li> {@code filter} - return only users, whose username, name or email address <i>contain</i> the {@code filter} value </li> <li> {@code group} - return only users who are members of the given group </li> <li> {@code permission} - the \"root\" of a permission filter, whose value must be a valid global, project, or repository permission. Additional filter parameters referring to this filter that specify the resource (project or repository) to apply the filter to must be prefixed with <code>permission.</code>. See the section \"Permission Filters\" below for more details. </li> <li> {@code permission.N} - the \"root\" of a single permission filter, similar to the {@code permission} parameter, where \"N\" is a natural number starting from 1. This allows clients to specify multiple permission filters, by providing consecutive filters as {@code permission.1}, {@code permission.2} etc. Note that the filters numbering has to start with 1 and be continuous for all filters to be processed. The total allowed number of permission filters is 50 and all filters exceeding that limit will be dropped. See the section \"Permission Filters\" below for more details on how the permission filters are processed. </li> </ul> <h3>Permission Filters</h3> <p> The following three sub-sections list parameters supported for permission filters (where <code>[root]</code> is the root permission filter name, e.g. {@code permission}, {@code permission.1} etc.) depending on the permission resource. The system determines which filter to apply (Global, Project or Repository permission) based on the <code>[root]</code> permission value. E.g. {@code ADMIN} is a global permission, {@code PROJECT_ADMIN} is a project permission and {@code REPO_ADMIN} is a repository permission. Note that the parameters for a given resource will be looked up in the order as they are listed below, that is e.g. for a project resource, if both {@code projectId} and {@code projectKey} are provided, the system will use {@code projectId} for the lookup. <h4>Global permissions</h4> <p> The permission value under <code>[root]</code> is the only required and recognized parameter, as global permissions do not apply to a specific resource. <p> Example valid filter: <code>permission=ADMIN</code>. <h4>Project permissions</h4> <ul> <li><code>[root]</code>- specifies the project permission</li> <li><code>[root].projectId</code> - specifies the project ID to lookup the project by</li> <li><code>[root].projectKey</code> - specifies the project key to lookup the project by</li> </ul> <p> Example valid filter: <code>permission.1=PROJECT_ADMIN&permission.1.projectKey=TEST_PROJECT</code>. <h4>Repository permissions</h4> <ul> <li><code>[root]</code>- specifies the repository permission</li> <li><code>[root].projectId</code> - specifies the repository ID to lookup the repository by</li> <li><code>[root].projectKey</code> and <code>[root].repositorySlug</code>- specifies the project key and repository slug to lookup the repository by; both values <i>need to</i> be provided for this look up to be triggered</li> </ul> Example valid filter: <code>permission.2=REPO_ADMIN&permission.2.projectKey=TEST_PROJECT&permission.2.repositorySlug=test_repo</code>. -@return */ +@return +*/ func (a *DefaultApiService) GetUsers_26(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8643,20 +9171,22 @@ func (a *DefaultApiService) GetUsers_26(localVarOptionals map[string]interface{} } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Get a webhook by id. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Get a webhook by id. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. - @param webhookId the existing webhook id - @param optional (nil or map[string]interface{}) with one or more of: - @param "statistics" (bool) - @return */ + @param webhookId the existing webhook id + @param optional (nil or map[string]interface{}) with one or more of: + @param "statistics" (bool) + @return +*/ func (a *DefaultApiService) GetWebhook(projectKey, repositorySlug string, webhookId int32, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8710,19 +9240,22 @@ func (a *DefaultApiService) GetWebhook(projectKey, repositorySlug string, webhoo } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieve a pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) Get_27(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Get_27(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -8768,18 +9301,21 @@ func (a *DefaultApiService) Get_27(projectKey, repositorySlug string, pullReques } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Check whether the specified permission is the default permission (granted to all users) for a project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. @param permission the permission to grant Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> -@return */ +@return +*/ func (a *DefaultApiService) HasAllUserPermission(projectKey, permission string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -8825,19 +9361,22 @@ func (a *DefaultApiService) HasAllUserPermission(projectKey, permission string) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Retrieves a page of the participants for a given pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) ListParticipants(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) ListParticipants(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -8883,20 +9422,22 @@ func (a *DefaultApiService) ListParticipants(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Merge the specified pull request. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Merge the specified pull request. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. - @param pullRequestId the ID of the pull request within the repository - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. - @return */ + @param pullRequestId the ID of the pull request within the repository + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. + @return +*/ func (a *DefaultApiService) Merge(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}, localVarPostBody interface{}, localVarHTTPContentTypes []string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -8947,20 +9488,22 @@ func (a *DefaultApiService) Merge(projectKey, repositorySlug string, pullRequest } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Grant or revoke a project permission to all users, i.e. set the default permission. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. +/* + DefaultApiService + Grant or revoke a project permission to all users, i.e. set the default permission. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. - @param permission the permission to grant Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> - @param optional (nil or map[string]interface{}) with one or more of: - @param "allow" (bool) <em>true</em> to grant the specified permission to all users, or <em>false</em> to revoke it - @return */ + @param permission the permission to grant Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> + @param optional (nil or map[string]interface{}) with one or more of: + @param "allow" (bool) <em>true</em> to grant the specified permission to all users, or <em>false</em> to revoke it + @return +*/ func (a *DefaultApiService) ModifyAllUserPermission(projectKey, repositorySlug string, permission string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9014,21 +9557,23 @@ func (a *DefaultApiService) ModifyAllUserPermission(projectKey, repositorySlug s } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Preview the generated html for given markdown contents. <p> Only authenticated users may call this resource. +/* + DefaultApiService + Preview the generated html for given markdown contents. <p> Only authenticated users may call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "urlMode" (string) (Optional) The UrlMode used when building the url. One of: ABSOLUTE, RELATIVE and CONFIGURED By default this is RELATIVE. - @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. By default this is false which reflects the standard markdown specification. - @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "urlMode" (string) (Optional) The UrlMode used when building the url. One of: ABSOLUTE, RELATIVE and CONFIGURED By default this is RELATIVE. + @param "hardwrap" (bool) (Optional) Whether the markup implementation should convert newlines to breaks. By default this is false which reflects the standard markdown specification. + @param "htmlEscape" (bool) (Optional) true if HTML should be escaped in the input markup, false otherwise. + @return +*/ func (a *DefaultApiService) Preview(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9091,17 +9636,20 @@ func (a *DefaultApiService) Preview(localVarOptionals map[string]interface{}) (* } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Remove a user from a group. This is very similar to <code>groups/remove-user</code>, but with the <em>context</em> and <em>itemName</em> attributes of the supplied request entity reversed. On the face of it this may appear redundant, but it facilitates a specific UI component in Stash. <p> In the request entity, the <em>context</em> attribute is the user and the <em>itemName</em> is the group. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) RemoveGroupFromUser() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9145,17 +9693,20 @@ func (a *DefaultApiService) RemoveGroupFromUser() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + <strong>Deprecated since 2.10</strong>. Use /rest/users/remove-groups instead. <p> Remove a user from a group. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. <p> In the request entity, the <em>context</em> attribute is the group and the <em>itemName</em> is the user. -@return */ +@return +*/ func (a *DefaultApiService) RemoveUserFromGroup() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9199,17 +9750,20 @@ func (a *DefaultApiService) RemoveUserFromGroup() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Rename a user. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) RenameUser() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9253,21 +9807,23 @@ func (a *DefaultApiService) RenameUser() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Re-open a declined pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Re-open a declined pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param pullRequestId the id of the pull request within the repository - @param optional (nil or map[string]interface{}) with one or more of: - @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. - @return */ -func (a *DefaultApiService) Reopen(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param pullRequestId the id of the pull request within the repository + @param optional (nil or map[string]interface{}) with one or more of: + @param "version" (int32) the current version of the pull request. If the server's version isn't the same as the specified version the operation will fail. To determine the current version of the pull request it should be fetched from the server prior to this operation. Look for the 'version' attribute in the returned JSON structure. + @return +*/ +func (a *DefaultApiService) Reopen(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") localVarPostBody interface{} @@ -9320,18 +9876,21 @@ func (a *DefaultApiService) Reopen(projectKey, repositorySlug string, pullReques } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + If a create or fork operation fails, calling this method will clean up the broken repository and try again. The repository must be in an INITIALISATION_FAILED state. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. @param projectKey the parent project key -@return */ +@return +*/ func (a *DefaultApiService) RetryCreateRepository(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -9377,19 +9936,21 @@ func (a *DefaultApiService) RetryCreateRepository(projectKey, repositorySlug str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Revoke all global permissions for a group. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>greater or equal permissions than the current permission level of the group (a user may not demote the permission level of a group with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not revoke a group's permissions if their own permission level would be reduced as a result. +/* + DefaultApiService + Revoke all global permissions for a group. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>greater or equal permissions than the current permission level of the group (a user may not demote the permission level of a group with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not revoke a group's permissions if their own permission level would be reduced as a result. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the group - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the group + @return +*/ func (a *DefaultApiService) RevokePermissionsForGroup(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9440,19 +10001,21 @@ func (a *DefaultApiService) RevokePermissionsForGroup(localVarOptionals map[stri } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Revoke all permissions for the specified project for a group. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. <p> In addition, a user may not revoke a group's permissions if it will reduce their own permission level. +/* + DefaultApiService + Revoke all permissions for the specified project for a group. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. <p> In addition, a user may not revoke a group's permissions if it will reduce their own permission level. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the group - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the group + @return +*/ func (a *DefaultApiService) RevokePermissionsForGroup_28(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9504,19 +10067,21 @@ func (a *DefaultApiService) RevokePermissionsForGroup_28(projectKey string, loca } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Revoke all permissions for the specified repository for a group. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. <p> In addition, a user may not revoke a group's permissions if it will reduce their own permission level. +/* + DefaultApiService + Revoke all permissions for the specified repository for a group. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. <p> In addition, a user may not revoke a group's permissions if it will reduce their own permission level. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the group - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the group + @return +*/ func (a *DefaultApiService) RevokePermissionsForGroup_29(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9569,19 +10134,21 @@ func (a *DefaultApiService) RevokePermissionsForGroup_29(projectKey, repositoryS } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Revoke all global permissions for a user. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>greater or equal permissions than the current permission level of the user (a user may not demote the permission level of a user with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote their own permission level. +/* + DefaultApiService + Revoke all global permissions for a user. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>greater or equal permissions than the current permission level of the user (a user may not demote the permission level of a user with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote their own permission level. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the user - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the user + @return +*/ func (a *DefaultApiService) RevokePermissionsForUser(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9632,19 +10199,21 @@ func (a *DefaultApiService) RevokePermissionsForUser(localVarOptionals map[strin } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Revoke all permissions for the specified project for a user. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. <p> In addition, a user may not revoke their own project permissions if they do not have a higher global permission. +/* + DefaultApiService + Revoke all permissions for the specified project for a user. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. <p> In addition, a user may not revoke their own project permissions if they do not have a higher global permission. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the user - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the user + @return +*/ func (a *DefaultApiService) RevokePermissionsForUser_30(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9696,19 +10265,21 @@ func (a *DefaultApiService) RevokePermissionsForUser_30(projectKey string, local } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Revoke all permissions for the specified repository for a user. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. <p> In addition, a user may not revoke their own repository permissions if they do not have a higher project or global permission. +/* + DefaultApiService + Revoke all permissions for the specified repository for a user. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. <p> In addition, a user may not revoke their own repository permissions if they do not have a higher project or global permission. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the name of the user - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the name of the user + @return +*/ func (a *DefaultApiService) RevokePermissionsForUser_31(projectKey string, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -9761,21 +10332,23 @@ func (a *DefaultApiService) RevokePermissionsForUser_31(projectKey string, repos } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Retrieve a page of participant users for all the pull requests to or from the specified repository. <p> <p> Optionally clients can specify following filters. +/* + DefaultApiService + Retrieve a page of participant users for all the pull requests to or from the specified repository. <p> <p> Optionally clients can specify following filters. - @param optional (nil or map[string]interface{}) with one or more of: - @param "direction" (string) (optional, defaults to <strong>INCOMING</strong>) the direction relative to the specified repository. Either <strong>INCOMING</strong> or <strong>OUTGOING</strong>. - @param "filter" (string) (optional) return only users, whose username, name or email address <i>contain</i> the {@code filter} value - @param "role" (string) (optional) The role associated with the pull request participant. This must be one of {@code AUTHOR}, {@code REVIEWER}, or{@code PARTICIPANT} - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "direction" (string) (optional, defaults to <strong>INCOMING</strong>) the direction relative to the specified repository. Either <strong>INCOMING</strong> or <strong>OUTGOING</strong>. + @param "filter" (string) (optional) return only users, whose username, name or email address <i>contain</i> the {@code filter} value + @param "role" (string) (optional) The role associated with the pull request participant. This must be one of {@code AUTHOR}, {@code REVIEWER}, or{@code PARTICIPANT} + @return +*/ func (a *DefaultApiService) Search(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -9852,18 +10425,21 @@ func (a *DefaultApiService) Search(projectKey, repositorySlug string, localVarOp } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the default branch of a repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. -@return */ -func (a *DefaultApiService) SetDefaultBranch(projectKey, repositorySlug string) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) SetDefaultBranch(projectKey, repositorySlug, branchRef string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -9871,6 +10447,10 @@ func (a *DefaultApiService) SetDefaultBranch(projectKey, repositorySlug string) localVarFileBytes []byte ) + localVarPostBody = map[string]interface{}{ + "id": branchRef, + } + // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches/default" localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) @@ -9908,19 +10488,22 @@ func (a *DefaultApiService) SetDefaultBranch(projectKey, repositorySlug string) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Set the current log level for a given logger. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. @param levelName the level to set the logger to. Either TRACE, DEBUG, INFO, WARN or ERROR @param loggerName the name of the logger. -@return */ +@return +*/ func (a *DefaultApiService) SetLevel(levelName string, loggerName string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -9966,17 +10549,20 @@ func (a *DefaultApiService) SetLevel(levelName string, loggerName string) (*APIR } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Updates the mail configuration The authenticated user must have the <strong>SYS_ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) SetMailConfig() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10020,18 +10606,21 @@ func (a *DefaultApiService) SetMailConfig() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the pull request merge strategies for the context repository. <p> The authenticated user must have <strong>ADMIN</strong> permission for the context repository to call this resource. <p> Only the strategies provided will be enabled, only one may be set to default <p> An explicitly set pull request merge strategy configuration can be deleted by POSTing a document with an empty \"mergeConfig\" attribute. i.e: <pre> { \"mergeConfig\": { } } </pre> Upon completion of this request, the effective configuration will be the default configuration. @param scmId the id of the scm to get strategies for -@return */ +@return +*/ func (a *DefaultApiService) SetMergeConfig(scmId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -10076,20 +10665,22 @@ func (a *DefaultApiService) SetMergeConfig(scmId string) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Promote or demote a group's permission level for the specified repository. Available repository permissions are: <ul> <li>REPO_READ</li> <li>REPO_WRITE</li> <li>REPO_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Using+repository+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. +/* + DefaultApiService + Promote or demote a group's permission level for the specified repository. Available repository permissions are: <ul> <li>REPO_READ</li> <li>REPO_WRITE</li> <li>REPO_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Using+repository+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. - @param optional (nil or map[string]interface{}) with one or more of: - @param "permission" (string) the permission to grant - @param "name" (string) the names of the groups - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "permission" (string) the permission to grant + @param "name" (string) the names of the groups + @return +*/ func (a *DefaultApiService) SetPermissionForGroup(projectKey string, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10148,7 +10739,7 @@ func (a *DefaultApiService) SetPermissionForGroup(projectKey string, repositoryS } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -10156,13 +10747,15 @@ func (a *DefaultApiService) SetPermissionForGroup(projectKey string, repositoryS return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Promote or demote a user's global permission level. Available global permissions are: <ul> <li>LICENSED_USER</li> <li>PROJECT_CREATE</li> <li>ADMIN</li> <li>SYS_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Global+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>the permission they are attempting to grant or higher; and</li> <li>greater or equal permissions than the current permission level of the group (a user may not demote the permission level of a group with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. +/* + DefaultApiService + Promote or demote a user's global permission level. Available global permissions are: <ul> <li>LICENSED_USER</li> <li>PROJECT_CREATE</li> <li>ADMIN</li> <li>SYS_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Global+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>the permission they are attempting to grant or higher; and</li> <li>greater or equal permissions than the current permission level of the group (a user may not demote the permission level of a group with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. - @param optional (nil or map[string]interface{}) with one or more of: - @param "permission" (string) the permission to grant - @param "name" (string) the names of the groups - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "permission" (string) the permission to grant + @param "name" (string) the names of the groups + @return +*/ func (a *DefaultApiService) SetPermissionForGroups(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10219,20 +10812,22 @@ func (a *DefaultApiService) SetPermissionForGroups(localVarOptionals map[string] } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Promote or demote a group's permission level for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. +/* + DefaultApiService + Promote or demote a group's permission level for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. In addition, a user may not demote a group's permission level if their own permission level would be reduced as a result. - @param optional (nil or map[string]interface{}) with one or more of: - @param "permission" (string) The permission to grant. See the [permissions documentation](https://confluence.atlassian.com/display/BitbucketServer/Using+project+permissions) for a detailed explanation of what each permission entails. Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> - @param "name" (string) the names of the groups - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "permission" (string) The permission to grant. See the [permissions documentation](https://confluence.atlassian.com/display/BitbucketServer/Using+project+permissions) for a detailed explanation of what each permission entails. Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> + @param "name" (string) the names of the groups + @return +*/ func (a *DefaultApiService) SetPermissionForGroups_32(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10290,22 +10885,24 @@ func (a *DefaultApiService) SetPermissionForGroups_32(projectKey string, localVa } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Promote or demote a user's permission level for the specified repository. Available repository permissions are: <ul> <li>REPO_READ</li> <li>REPO_WRITE</li> <li>REPO_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Using+repository+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. In addition, a user may not reduce their own permission level unless they have a project or global permission that already implies that permission. +/* + DefaultApiService + Promote or demote a user's permission level for the specified repository. Available repository permissions are: <ul> <li>REPO_READ</li> <li>REPO_WRITE</li> <li>REPO_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Using+repository+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository or a higher project or global permission to call this resource. In addition, a user may not reduce their own permission level unless they have a project or global permission that already implies that permission. - @param "projectKey" (string) name of the project to grant permission to - @param "repositorySlug" (string) repository slug - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the names of the users - @param "permission" (string) the permission to grant - @return */ + @param "projectKey" (string) name of the project to grant permission to + @param "repositorySlug" (string) repository slug + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the names of the users + @param "permission" (string) the permission to grant + @return +*/ func (a *DefaultApiService) SetPermissionForUser(projectKey string, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10364,20 +10961,22 @@ func (a *DefaultApiService) SetPermissionForUser(projectKey string, repositorySl } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewAPIResponse(localVarHTTPResponse), nil } -/* DefaultApiService - Promote or demote the global permission level of a user. Available global permissions are: <ul> <li>LICENSED_USER</li> <li>PROJECT_CREATE</li> <li>ADMIN</li> <li>SYS_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Global+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>the permission they are attempting to grant; and</li> <li>greater or equal permissions than the current permission level of the user (a user may not demote the permission level of a user with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote their own permission level. +/* + DefaultApiService + Promote or demote the global permission level of a user. Available global permissions are: <ul> <li>LICENSED_USER</li> <li>PROJECT_CREATE</li> <li>ADMIN</li> <li>SYS_ADMIN</li> </ul> See the <a href=\"https://confluence.atlassian.com/display/BitbucketServer/Global+permissions\">Bitbucket Server documentation</a> for a detailed explanation of what each permission entails. <p> The authenticated user must have: <ul> <li><strong>ADMIN</strong> permission or higher; and</li> <li>the permission they are attempting to grant; and</li> <li>greater or equal permissions than the current permission level of the user (a user may not demote the permission level of a user with higher permissions than them)</li> </ul> to call this resource. In addition, a user may not demote their own permission level. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the names of the users - @param "permission" (string) the permission to grant - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the names of the users + @param "permission" (string) the permission to grant + @return +*/ func (a *DefaultApiService) SetPermissionForUsers(localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10434,20 +11033,22 @@ func (a *DefaultApiService) SetPermissionForUsers(localVarOptionals map[string]i } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Promote or demote a user's permission level for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. In addition, a user may not reduce their own permission level unless they have a global permission that already implies that permission. +/* + DefaultApiService + Promote or demote a user's permission level for the specified project. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project or a higher global permission to call this resource. In addition, a user may not reduce their own permission level unless they have a global permission that already implies that permission. - @param optional (nil or map[string]interface{}) with one or more of: - @param "name" (string) the names of the users - @param "permission" (string) the permission to grant. See the [permissions documentation](https://confluence.atlassian.com/display/BitbucketServer/Using+project+permissions) for a detailed explanation of what each permission entails. Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the names of the users + @param "permission" (string) the permission to grant. See the [permissions documentation](https://confluence.atlassian.com/display/BitbucketServer/Using+project+permissions) for a detailed explanation of what each permission entails. Available project permissions are: <ul> <li>PROJECT_READ</li> <li>PROJECT_WRITE</li> <li>PROJECT_ADMIN</li> </ul> + @return +*/ func (a *DefaultApiService) SetPermissionForUsers_33(projectKey string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10501,22 +11102,97 @@ func (a *DefaultApiService) SetPermissionForUsers_33(projectKey string, localVar localVarHTTPResponse, err := a.client.callAPI(r) if err != nil || localVarHTTPResponse == nil { - return NewAPIResponseWithError(localVarHTTPResponse, nil, err) + return NewAPIResponseWithError(localVarHTTPResponse, nil, err) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewAPIResponse(localVarHTTPResponse), nil +} + +/* + DefaultApiService + + @param optional (nil or map[string]interface{}) with one or more of: + @param "name" (string) the names of the users + @param "permission" (string) the permission to grant + @return +*/ +func (a *DefaultApiService) CreateAccessToken(userSlug, tokenName string, repoAdmin, projectRead bool) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Put") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + // /rest/access-tokens/1.0/users/{userSlug} + localVarPath := a.client.cfg.BasePath + "/access-tokens/1.0/users/" + userSlug + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + + permissions := make([]string, 0) + if repoAdmin { + permissions = append(permissions, "REPO_ADMIN") + } + if projectRead { + permissions = append(permissions, "PROJECT_READ") + } + localVarPostBody = map[string]interface{}{ + "name": tokenName, + "permissions": permissions, + } + + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } - return NewAPIResponse(localVarHTTPResponse), nil + return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Set the current log level for the root logger. <p> The authenticated user must have <strong>ADMIN</strong> permission or higher to call this resource. @param levelName the level to set the logger to. Either TRACE, DEBUG, INFO, WARN or ERROR -@return */ +@return +*/ func (a *DefaultApiService) SetRootLevel(levelName string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10561,17 +11237,20 @@ func (a *DefaultApiService) SetRootLevel(levelName string) (*APIResponse, error) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Updates the server email address The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) SetSenderAddress() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10615,18 +11294,21 @@ func (a *DefaultApiService) SetSenderAddress() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Modify the settings for a repository hook for this repository. <p> The service will reject any settings which are too large, the current limit is 32KB once serialized. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. <p> A JSON document can be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) SetSettings(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10673,18 +11355,21 @@ func (a *DefaultApiService) SetSettings(projectKey, repositorySlug string, hookK } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Modify the settings for a repository hook for this project. <p> The service will reject any settings which are too large, the current limit is 32KB once serialized. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. <p> A JSON document can be provided to use as the settings for the hook. These structure and validity of the document is decided by the plugin providing the hook. @param hookKey -@return */ +@return +*/ func (a *DefaultApiService) SetSettings_34(projectKey, repositorySlug string, hookKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -10731,19 +11416,21 @@ func (a *DefaultApiService) SetSettings_34(projectKey, repositorySlug string, ho } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams files in the requested <code>path</code> with the last commit to modify each file. Commit modifications are traversed starting from the <code>at</code> commit or, if not specified, from the tip of the default branch. <p> Unless the repository is public, the authenticated user must have <b>REPO_READ</b> access to call this resource. +/* + DefaultApiService + Streams files in the requested <code>path</code> with the last commit to modify each file. Commit modifications are traversed starting from the <code>at</code> commit or, if not specified, from the tip of the default branch. <p> Unless the repository is public, the authenticated user must have <b>REPO_READ</b> access to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit to use as the starting point when listing files and calculating modifications - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit to use as the starting point when listing files and calculating modifications + @return +*/ func (a *DefaultApiService) Stream(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -10796,21 +11483,23 @@ func (a *DefaultApiService) Stream(projectKey, repositorySlug string, localVarOp } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Gets the file changes available in the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. +/* + DefaultApiService + Gets the file changes available in the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. - @param optional (nil or map[string]interface{}) with one or more of: - @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> + @return +*/ func (a *DefaultApiService) StreamChanges(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -10875,23 +11564,25 @@ func (a *DefaultApiService) StreamChanges(projectKey, repositorySlug string, loc } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Gets changes for the specified PullRequest. <p> If the {@code changeScope} query parameter is set to {@code unreviewed}, the application will attempt to stream unreviewed changes based on the {@code lastReviewedCommit} of the current user, which are the changes between the {@code lastReviewedCommit} and the latest commit of the source branch. The current user is considered to <i>not</i> have any unreviewed changes for the pull request when the {@code lastReviewedCommit} is either {@code null} (everything is unreviewed, so all changes are streamed), equal to the latest commit of the source branch (everything is reviewed), or no longer on the source branch (the source branch has been rebased). In these cases, the application will fall back to streaming all changes (the default), which is the effective diff for the pull request. The type of changes streamed can be determined by the {@code changeScope} parameter included in the properties map of the response. <p> Note: This resource is currently <i>not paged</i>. The server will return at most one page. The server will truncate the number of changes to either the request's page limit or an internal maximum, whichever is smaller. The start parameter of the page request is also ignored. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Gets changes for the specified PullRequest. <p> If the {@code changeScope} query parameter is set to {@code unreviewed}, the application will attempt to stream unreviewed changes based on the {@code lastReviewedCommit} of the current user, which are the changes between the {@code lastReviewedCommit} and the latest commit of the source branch. The current user is considered to <i>not</i> have any unreviewed changes for the pull request when the {@code lastReviewedCommit} is either {@code null} (everything is unreviewed, so all changes are streamed), equal to the latest commit of the source branch (everything is reviewed), or no longer on the source branch (the source branch has been rebased). In these cases, the application will fall back to streaming all changes (the default), which is the effective diff for the pull request. The type of changes streamed can be determined by the {@code changeScope} parameter included in the properties map of the response. <p> Note: This resource is currently <i>not paged</i>. The server will return at most one page. The server will truncate the number of changes to either the request's page limit or an internal maximum, whichever is smaller. The start parameter of the page request is also ignored. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "changeScope" (string) {@code UNREVIEWED} to stream the unreviewed changes for the current user (if they exist); {@code RANGE} to stream changes between two arbitrary commits (requires {@code sinceId} and {@code untilId}); otherwise {@code ALL} to stream all changes (the default) - @param "sinceId" (string) the since commit hash to stream changes for a {@code RANGE} arbitrary change scope - @param "untilId" (string) the until commit hash to stream changes for a {@code RANGE} arbitrary change scope - @param "withComments" (bool) {@code true} to apply comment counts in the changes (the default); otherwise, {@code false} to stream changes without comment counts - @return */ -func (a *DefaultApiService) StreamChanges_35(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param optional (nil or map[string]interface{}) with one or more of: + @param "changeScope" (string) {@code UNREVIEWED} to stream the unreviewed changes for the current user (if they exist); {@code RANGE} to stream changes between two arbitrary commits (requires {@code sinceId} and {@code untilId}); otherwise {@code ALL} to stream all changes (the default) + @param "sinceId" (string) the since commit hash to stream changes for a {@code RANGE} arbitrary change scope + @param "untilId" (string) the until commit hash to stream changes for a {@code RANGE} arbitrary change scope + @param "withComments" (bool) {@code true} to apply comment counts in the changes (the default); otherwise, {@code false} to stream changes without comment counts + @return +*/ +func (a *DefaultApiService) StreamChanges_35(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -10962,21 +11653,23 @@ func (a *DefaultApiService) StreamChanges_35(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Gets the commits accessible from the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. +/* + DefaultApiService + Gets the commits accessible from the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. - @param optional (nil or map[string]interface{}) with one or more of: - @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> + @return +*/ func (a *DefaultApiService) StreamCommits(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11039,25 +11732,27 @@ func (a *DefaultApiService) StreamCommits(project, repository string, localVarOp } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the diff between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more {@code truncated} flags will be set to {@code true} on the {@code \"segments\"}, {@code \"hunks\"} and {@code \"diffs\"} properties, as well as the top-level object, in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the diff between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more {@code truncated} flags will be set to {@code true} on the {@code \"segments\"}, {@code \"hunks\"} and {@code \"diffs\"} properties, as well as the top-level object, in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param commitId - @param optional (nil or map[string]interface{}) with one or more of: - @param "autoSrcPath" (bool) {@code true} to automatically try to find the source path when it's not provided, {@code false} otherwise. Requires the {@code path} to be provided. - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used - @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed - @param "whitespace" (string) optional whitespace flag which can be set to {@code ignore-all} - @param "withComments" (bool) {@code true} to embed comments in the diff (the default); otherwise {@code false} to stream the diff without comments - @return */ + @param commitId + @param optional (nil or map[string]interface{}) with one or more of: + @param "autoSrcPath" (bool) {@code true} to automatically try to find the source path when it's not provided, {@code false} otherwise. Requires the {@code path} to be provided. + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used + @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed + @param "whitespace" (string) optional whitespace flag which can be set to {@code ignore-all} + @param "withComments" (bool) {@code true} to embed comments in the diff (the default); otherwise {@code false} to stream the diff without comments + @return +*/ func (a *DefaultApiService) StreamDiff(projectKey, repositorySlug string, commitId string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11141,27 +11836,29 @@ func (a *DefaultApiService) StreamDiff(projectKey, repositorySlug string, commit } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the diff between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more {@code truncated} flags will be set to {@code true} on the {@code \"segments\"}, {@code \"hunks\"} and {@code \"diffs\"} properties, as well as the top-level object, in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the diff between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more {@code truncated} flags will be set to {@code true} on the {@code \"segments\"}, {@code \"hunks\"} and {@code \"diffs\"} properties, as well as the top-level object, in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param commitId - @param path the path to the file which should be diffed (optional) - @param commitId2 - @param optional (nil or map[string]interface{}) with one or more of: - @param "autoSrcPath" (bool) {@code true} to automatically try to find the source path when it's not provided, {@code false} otherwise. Requires the {@code path} to be provided. - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used - @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed - @param "whitespace" (string) optional whitespace flag which can be set to {@code ignore-all} - @param "withComments" (bool) {@code true} to embed comments in the diff (the default); otherwise {@code false} to stream the diff without comments - @return */ + @param commitId + @param path the path to the file which should be diffed (optional) + @param commitId2 + @param optional (nil or map[string]interface{}) with one or more of: + @param "autoSrcPath" (bool) {@code true} to automatically try to find the source path when it's not provided, {@code false} otherwise. Requires the {@code path} to be provided. + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used + @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed + @param "whitespace" (string) optional whitespace flag which can be set to {@code ignore-all} + @param "withComments" (bool) {@code true} to embed comments in the diff (the default); otherwise {@code false} to stream the diff without comments + @return +*/ func (a *DefaultApiService) StreamDiff_36(projectKey, repositorySlug string, commitId string, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11246,25 +11943,27 @@ func (a *DefaultApiService) StreamDiff_36(projectKey, repositorySlug string, com } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Gets a diff of the changes available in the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. +/* + DefaultApiService + Gets a diff of the changes available in the {@code from} commit but not in the {@code to} commit. <p> If either the {@code from} or {@code to} commit are not specified, they will be replaced by the default branch of their containing repository. - @param path the path to the file to diff (optional) - @param optional (nil or map[string]interface{}) with one or more of: - @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) - @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> - @param "srcPath" (string) - @param "contextLines" (int32) an optional number of context lines to include around each added or removed lines in the diff - @param "whitespace" (string) an optional whitespace flag which can be set to <code>ignore-all</code> - @return */ + @param path the path to the file to diff (optional) + @param optional (nil or map[string]interface{}) with one or more of: + @param "from" (string) the source commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) + @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> + @param "srcPath" (string) + @param "contextLines" (int32) an optional number of context lines to include around each added or removed lines in the diff + @param "whitespace" (string) an optional whitespace flag which can be set to <code>ignore-all</code> + @return +*/ func (a *DefaultApiService) StreamDiff_37(project, repository, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11345,23 +12044,25 @@ func (a *DefaultApiService) StreamDiff_37(project, repository, path string, loca } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the diff for a specified file path between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more <code>truncated</code> flags will be set to <code>true</code> on the segments, hunks and diffs substructures in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the diff for a specified file path between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more <code>truncated</code> flags will be set to <code>true</code> on the segments, hunks and diffs substructures in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used - @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed - @param "until" (string) the target revision to diff to (required) - @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used + @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed + @param "until" (string) the target revision to diff to (required) + @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> + @return +*/ func (a *DefaultApiService) StreamDiff_38(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11438,24 +12139,26 @@ func (a *DefaultApiService) StreamDiff_38(projectKey, repositorySlug string, loc } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve the diff for a specified file path between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more <code>truncated</code> flags will be set to <code>true</code> on the segments, hunks and diffs substructures in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve the diff for a specified file path between two provided revisions. <p> <strong>Note:</strong> This resource is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. In the event that the cap is reached, the diff will be cut short and one or more <code>truncated</code> flags will be set to <code>true</code> on the segments, hunks and diffs substructures in the returned JSON response. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param path the path to the file which should be diffed (required) - @param optional (nil or map[string]interface{}) with one or more of: - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used - @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed - @param "until" (string) the target revision to diff to (required) - @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> - @return */ + @param path the path to the file which should be diffed (required) + @param optional (nil or map[string]interface{}) with one or more of: + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "since" (string) the base revision to diff from. If omitted the parent revision of the until revision is used + @param "srcPath" (string) the source path for the file, if it was copied, moved or renamed + @param "until" (string) the target revision to diff to (required) + @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> + @return +*/ func (a *DefaultApiService) StreamDiff_39(projectKey, repositorySlug string, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11533,25 +12236,27 @@ func (a *DefaultApiService) StreamDiff_39(projectKey, repositorySlug string, pat } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff - @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes - @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed - @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes - @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> - @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff + @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes + @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed + @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes + @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> + @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments + @return +*/ func (a *DefaultApiService) GetPullRequestDiffRaw(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11612,25 +12317,27 @@ func (a *DefaultApiService) GetPullRequestDiffRaw(projectKey, repositorySlug str } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewRawAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff - @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes - @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed - @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes - @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> - @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff + @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes + @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed + @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes + @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> + @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments + @return +*/ func (a *DefaultApiService) GetPullRequestDiff(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11720,27 +12427,29 @@ func (a *DefaultApiService) GetPullRequestDiff(projectKey, repositorySlug string } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. +/* + DefaultApiService + Streams a diff within a pull request. <p> If the specified file has been copied, moved or renamed, the <code>srcPath</code> must also be specified to produce the correct diff. <p> Note: This RESTful endpoint is currently <i>not paged</i>. The server will internally apply a hard cap to the streamed lines, and it is not possible to request subsequent pages if that cap is exceeded. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. - @param path the path to the file which should be diffed (optional) - @param optional (nil or map[string]interface{}) with one or more of: - @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff - @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff - @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes - @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed - @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes - @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> - @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments - @return */ -func (a *DefaultApiService) StreamDiff_41(projectKey, repositorySlug string, pullRequestID int64, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param path the path to the file which should be diffed (optional) + @param optional (nil or map[string]interface{}) with one or more of: + @param "contextLines" (int32) the number of context lines to include around added/removed lines in the diff + @param "diffType" (string) the type of diff being requested. When {@code withComments} is {@code true} this works as a hint to the system to attach the correct set of comments to the diff + @param "sinceId" (string) the since commit hash to stream a diff between two arbitrary hashes + @param "srcPath" (string) the previous path to the file, if the file has been copied, moved or renamed + @param "untilId" (string) the until commit hash to stream a diff between two arbitrary hashes + @param "whitespace" (string) optional whitespace flag which can be set to <code>ignore-all</code> + @param "withComments" (bool) <code>true</code> to embed comments in the diff (the default); otherwise, <code>false</code> to stream the diff without comments + @return +*/ +func (a *DefaultApiService) StreamDiff_41(projectKey, repositorySlug string, pullRequestID int, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -11830,19 +12539,21 @@ func (a *DefaultApiService) StreamDiff_41(projectKey, repositorySlug string, pul } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of files from particular directory of a repository. The search is done recursively, so all files from any sub-directory of the specified directory will be returned. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of files from particular directory of a repository. The search is done recursively, so all files from any sub-directory of the specified directory will be returned. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref (e.g. a branch or tag) to list the files at. If not specified the default branch will be used instead. - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref (e.g. a branch or tag) to list the files at. If not specified the default branch will be used instead. + @return +*/ func (a *DefaultApiService) StreamFiles(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11860,9 +12571,6 @@ func (a *DefaultApiService) StreamFiles(projectKey, repositorySlug string, local localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if err := typeCheckParameter(localVarOptionals["at"], "string", "at"); err != nil { - return nil, err - } if err := typeCheckParameter(localVarOptionals["limit"], "int", "limit"); err != nil { return nil, err } @@ -11907,20 +12615,22 @@ func (a *DefaultApiService) StreamFiles(projectKey, repositorySlug string, local } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Retrieve a page of files from particular directory of a repository. The search is done recursively, so all files from any sub-directory of the specified directory will be returned. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Retrieve a page of files from particular directory of a repository. The search is done recursively, so all files from any sub-directory of the specified directory will be returned. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the specified repository to call this resource. - @param path the directory to list files for. - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit ID or ref (e.g. a branch or tag) to list the files at. If not specified the default branch will be used instead. - @return */ + @param path the directory to list files for. + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit ID or ref (e.g. a branch or tag) to list the files at. If not specified the default branch will be used instead. + @return +*/ func (a *DefaultApiService) StreamFiles_42(projectKey, repositorySlug string, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -11986,20 +12696,22 @@ func (a *DefaultApiService) StreamFiles_42(projectKey, repositorySlug string, pa } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Streams files in the requested <code>path</code> with the last commit to modify each file. Commit modifications are traversed starting from the <code>at</code> commit or, if not specified, from the tip of the default branch. <p> Unless the repository is public, the authenticated user must have <b>REPO_READ</b> access to call this resource. +/* + DefaultApiService + Streams files in the requested <code>path</code> with the last commit to modify each file. Commit modifications are traversed starting from the <code>at</code> commit or, if not specified, from the tip of the default branch. <p> Unless the repository is public, the authenticated user must have <b>REPO_READ</b> access to call this resource. - @param path the path within the repository whose files should be streamed - @param optional (nil or map[string]interface{}) with one or more of: - @param "at" (string) the commit to use as the starting point when listing files and calculating modifications - @return */ + @param path the path within the repository whose files should be streamed + @param optional (nil or map[string]interface{}) with one or more of: + @param "at" (string) the commit to use as the starting point when listing files and calculating modifications + @return +*/ func (a *DefaultApiService) Stream_43(projectKey, repositorySlug string, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") @@ -12053,19 +12765,21 @@ func (a *DefaultApiService) Stream_43(projectKey, repositorySlug string, path st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Test connectivity to a specific endpoint. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. +/* + DefaultApiService + Test connectivity to a specific endpoint. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. - @param optional (nil or map[string]interface{}) with one or more of: - @param "url" (string) the url in which to connect to - @return */ + @param optional (nil or map[string]interface{}) with one or more of: + @param "url" (string) the url in which to connect to + @return +*/ func (a *DefaultApiService) TestWebhook(projectKey, repositorySlug string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -12118,21 +12832,23 @@ func (a *DefaultApiService) TestWebhook(projectKey, repositorySlug string, local } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService - Unassigns a participant from the REVIEWER role they may have been given in a pull request. <p> If the participant has no explicit role this method has no effect. <p> Afterwards, the user will still remain a participant in the pull request but their role will be reduced to PARTICIPANT. This is because once made a participant of a pull request, a user will forever remain a participant. Only their role may be altered. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. <p> <strong>Deprecated since 4.2</strong>. Use /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/participants/{userSlug} instead. +/* + DefaultApiService + Unassigns a participant from the REVIEWER role they may have been given in a pull request. <p> If the participant has no explicit role this method has no effect. <p> Afterwards, the user will still remain a participant in the pull request but their role will be reduced to PARTICIPANT. This is because once made a participant of a pull request, a user will forever remain a participant. Only their role may be altered. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. <p> <strong>Deprecated since 4.2</strong>. Use /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/participants/{userSlug} instead. - @param pullRequestId the id of the pull request within the repository - @param optional (nil or map[string]interface{}) with one or more of: - @param "username" (string) the participant's user name - @return */ -func (a *DefaultApiService) UnassignParticipantRole(projectKey, repositorySlug string, pullRequestID int64, localVarOptionals map[string]interface{}) (*APIResponse, error) { + @param pullRequestId the id of the pull request within the repository + @param optional (nil or map[string]interface{}) with one or more of: + @param "username" (string) the participant's user name + @return +*/ +func (a *DefaultApiService) UnassignParticipantRole(projectKey, repositorySlug string, pullRequestID int, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -12185,21 +12901,24 @@ func (a *DefaultApiService) UnassignParticipantRole(projectKey, repositorySlug s } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* UnassignParticipantRole_44 +/* + UnassignParticipantRole_44 + Unassigns a participant from the REVIEWER role they may have been given in a pull request. <p> If the participant has no explicit role this method has no effect. <p> Afterwards, the user will still remain a participant in the pull request but their role will be reduced to PARTICIPANT. This is because once made a participant of a pull request, a user will forever remain a participant. Only their role may be altered. <p> The authenticated user must have <strong>REPO_WRITE</strong> permission for the repository that this pull request targets to call this resource. <p> <strong>Deprecated since 4.2</strong>. Use /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/participants/{userSlug} instead. - @param pullRequestId the id of the pull request within the repository - @param userSlug the slug for the user changing their status - @param pullRequestId2 the id of the pull request within the repository - @return */ -func (a *DefaultApiService) UnassignParticipantRole_44(projectKey, repositorySlug string, pullRequestID int64, userSlug string) (*APIResponse, error) { + @param pullRequestId the id of the pull request within the repository + @param userSlug the slug for the user changing their status + @param pullRequestId2 the id of the pull request within the repository + @return +*/ +func (a *DefaultApiService) UnassignParticipantRole_44(projectKey, repositorySlug string, pullRequestID int, userSlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -12246,18 +12965,21 @@ func (a *DefaultApiService) UnassignParticipantRole_44(projectKey, repositorySlu } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Removes the authenticated user as a watcher for the specified commit. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository containing the commit to call this resource. @param commitId the <i>full {@link Commit#getId() ID}</i> of the commit within the repository -@return */ +@return +*/ func (a *DefaultApiService) Unwatch(projectKey, repositorySlug string, commitId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") @@ -12304,19 +13026,22 @@ func (a *DefaultApiService) Unwatch(projectKey, repositorySlug string, commitId } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Make the authenticated user stop watching the specified pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) Unwatch_45(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Unwatch_45(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -12362,17 +13087,20 @@ func (a *DefaultApiService) Unwatch_45(projectKey, repositorySlug string, pullRe } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Decodes the provided encoded license and sets it as the active license. If no license was provided, a 400 is returned. If the license cannot be decoded, or cannot be applied, a 409 is returned. Some possible reasons a license may not be applied include: <ul> <li>It is for a different product</li> <li>It is already expired</li> </ul> Otherwise, if the license is updated successfully, details for the new license are returned with a 200 response. <p> <b>Warning</b>: It is possible to downgrade the license during update, applying a license with a lower number of permitted users. If the number of currently-licensed users exceeds the limits of the new license, pushing will be disabled until the licensed user count is brought into compliance with the new license. <p> The authenticated user must have <b>SYS_ADMIN</b> permission. <b>ADMIN</b> users may <i>view</i> the current license details, but they may not <i>update</i> the license. -@return */ +@return +*/ func (a *DefaultApiService) Update() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -12416,21 +13144,24 @@ func (a *DefaultApiService) Update() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the text of a comment. Only the user who created a comment may update it. <p> <strong>Note:</strong> the supplied supplied JSON object must contain a <code>version</code> that must match the server's version of the comment or the update will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the update. Look for the 'version' attribute in the returned JSON structure. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that the commit is in to call this resource. @param commitId the commit to which the comments must be anchored @param commentId the ID of the comment to retrieve @param commitId2 the <i>full {@link Commit#getId() ID}</i> of the commit within the repository -@return */ -func (a *DefaultApiService) UpdateComment(projectKey, repositorySlug string, commitId string, commentId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) UpdateComment(projectKey, repositorySlug string, commitId string, commentId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -12477,19 +13208,22 @@ func (a *DefaultApiService) UpdateComment(projectKey, repositorySlug string, com } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the text of a comment. Only the user who created a comment may update it. <p> <strong>Note:</strong> the supplied supplied JSON object must contain a <code>version</code> that must match the server's version of the comment or the update will fail. To determine the current version of the comment, the comment should be fetched from the server prior to the update. Look for the 'version' attribute in the returned JSON structure. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param commentId the id of the comment to retrieve -@return */ -func (a *DefaultApiService) UpdateComment_46(projectKey, repositorySlug string, pullRequestID, commentId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) UpdateComment_46(projectKey, repositorySlug string, pullRequestID, commentId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -12536,17 +13270,20 @@ func (a *DefaultApiService) UpdateComment_46(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the project matching the <strong>projectKey</strong> supplied in the resource path. <p> To include a custom avatar for the updated project, the project definition should contain an additional attribute with the key <code>avatar</code> and the value a data URI containing Base64-encoded image data. The URI should be in the following format: <code> data:(content type, e.g. image/png);base64,(data) </code> If the data is not Base64-encoded, or if a character set is defined in the URI, or the URI is otherwise invalid, <em>project creation will fail</em>. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) UpdateProject(projectKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -12591,17 +13328,20 @@ func (a *DefaultApiService) UpdateProject(projectKey string) (*APIResponse, erro } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the pull request settings for the context repository. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the context repository to call this resource. <p> This resource will call all RestFragments that are registered with the key <strong>bitbucket.repository.settings.pullRequests</strong>. If any fragment fails validations by returning a non-empty Map of errors, then no fragments will execute. <p> Only the settings that should be updated need to be included in the request. <p> The property keys for the settings that are bundled with the application are <ul> <li>mergeConfig - the merge strategy configuration for pull requests</li> <li>requiredApprovers - (Deprecated, please use com.atlassian.bitbucket.server.bundled-hooks.requiredApproversMergeHook instead) the number of approvals required on a pull request for it to be mergeable, or 0 to disable the merge check</li> <li>com.atlassian.bitbucket.server.bundled-hooks.requiredApproversMergeHook - a json map containing the keys 'enabled' (a boolean to enable or disable this merge check) and 'count' (an integer to set the number of required approvals)</li> <li>requiredAllApprovers - whether or not all approvers must approve a pull request for it to be mergeable</li> <li>requiredAllTasksComplete - whether or not all tasks on a pull request need to be completed for it to be mergeable</li> <li>requiredSuccessfulBuilds - (Deprecated, please use com.atlassian.bitbucket.server.bitbucket-build.requiredBuildsMergeCheck instead) the number of successful s on a pull request for it to be mergeable, or 0 to disable the merge check</li> <li>com.atlassian.bitbucket.server.bitbucket-build.requiredBuildsMergeCheck - a json map containing the keys 'enabled' (a boolean to enable or disable this merge check) and 'count' (an integer to set the number of required builds)</li> </ul> <strong>Merge strategy configuration deletion:</strong> <p> An explicitly set pull request merge strategy configuration can be deleted by POSTing a document with an empty \"mergeConfig\" attribute. i.e: <pre> { \"mergeConfig\": { } } </pre> Upon completion of this request, the effective configuration will be: <ul> <li>The configuration set for this repository's SCM type as set at the project level, if present, otherwise</li> <li>the configuration set for this repository's SCM type as set at the instance level, if present, otherwise</li> <li>the default configuration for this repository's SCM type</li> <ul> -@return */ +@return +*/ func (a *DefaultApiService) UpdatePullRequestSettings(projectKey, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -12647,18 +13387,21 @@ func (a *DefaultApiService) UpdatePullRequestSettings(projectKey, repositorySlug } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the pull request merge strategy configuration for this project and SCM. <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the context repository to call this resource. <p> Only the strategies provided will be enabled, the default must be set and included in the set of strategies. <p> An explicitly set pull request merge strategy configuration can be deleted by POSTing a document with an empty \"mergeConfig\" attribute. i.e: <pre> { \"mergeConfig\": { } } </pre> Upon completion of this request, the effective configuration will be the configuration explicitly set for the SCM, or if no such explicit configuration is set then the default configuration will be used. @param scmId the SCM to get strategies for -@return */ +@return +*/ func (a *DefaultApiService) UpdatePullRequestSettings_47(projectKey, repositorySlug string, scmId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -12705,20 +13448,23 @@ func (a *DefaultApiService) UpdatePullRequestSettings_47(projectKey, repositoryS } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the repository matching the <strong>repositorySlug</strong> supplied in the resource path. <p> The repository's slug is derived from its name. If the name changes the slug may also change. <p> This API can be used to move the repository to a different project by setting the new project in the request, example: {@code {\"project\":{\"key\":\"NEW_KEY\"}}} . <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param projectKey the parent project key @param projectKey2 the parent project key @param repositorySlug the repository slug -@return */ +@return +*/ func (a *DefaultApiService) UpdateRepository(projectKey, repositorySlug string) (*APIResponse, error) { return a.UpdateRepositoryWithOptions(projectKey, repositorySlug, nil, []string{}) } @@ -12764,17 +13510,20 @@ func (a *DefaultApiService) UpdateRepositoryWithOptions(projectKey, repositorySl } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the entries of a map of user setting key/values for a specific user identified by the user slug. <p> -@return */ +@return +*/ func (a *DefaultApiService) UpdateSettings(userSlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -12819,21 +13568,24 @@ func (a *DefaultApiService) UpdateSettings(userSlug string) (*APIResponse, error } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Change the current user's status for a pull request. Implicitly adds the user as a participant if they are not already. If the current user is the author, this method will fail. <p> The possible values for {@code status} are <strong>UNAPPROVED</strong>, <strong>NEEDS_WORK</strong>, or <strong>APPROVED</strong>. <p> If the new {@code status} is <strong>NEEDS_WORK</strong> or <strong>APPROVED</strong> then the {@code lastReviewedCommit} for the participant will be updated to the latest commit of the source branch of the pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the id of the pull request within the repository @param userSlug the slug for the user changing their status @param pullRequestId2 the id of the pull request within the repository -@return */ -func (a *DefaultApiService) UpdateStatus(projectKey, repositorySlug string, pullRequestID int64, userSlug string, participant UserWithMetadata) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) UpdateStatus(projectKey, repositorySlug string, pullRequestID int, userSlug string, participant UserWithMetadata) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -12885,19 +13637,22 @@ func (a *DefaultApiService) UpdateStatus(projectKey, repositorySlug string, pull } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update a existing task. <p> As of Stash 3.3, only the state and text of a task can be updated. <p> Updating the state of a task is allowed for any user having <em>READ</em> access to the repository. However only the task's creator, the context's author or an admin of the context's repository can update the task's text. (For a pull request task, those are the task's creator, the pull request's author or an admin on the repository containing the pull request). Additionally the task's text cannot be updated if it has been resolved. @param taskId the id identifying the task to delete -@return */ -func (a *DefaultApiService) UpdateTask(taskId int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) UpdateTask(taskId int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -12941,17 +13696,20 @@ func (a *DefaultApiService) UpdateTask(taskId int64) (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update a user's details. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) UpdateUserDetails() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -12995,17 +13753,20 @@ func (a *DefaultApiService) UpdateUserDetails() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the currently authenticated user's details. The update will always be applied to the currently authenticated user. -@return */ +@return +*/ func (a *DefaultApiService) UpdateUserDetails_48() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -13049,17 +13810,20 @@ func (a *DefaultApiService) UpdateUserDetails_48() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update a user's password. <p> The authenticated user must have the <strong>ADMIN</strong> permission to call this resource, and may not update the password of a user with greater permissions than themselves. -@return */ +@return +*/ func (a *DefaultApiService) UpdateUserPassword() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -13103,17 +13867,20 @@ func (a *DefaultApiService) UpdateUserPassword() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the currently authenticated user's password. -@return */ +@return +*/ func (a *DefaultApiService) UpdateUserPassword_49() (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -13157,18 +13924,21 @@ func (a *DefaultApiService) UpdateUserPassword_49() (*APIResponse, error) { } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update an existing webhook. <p> The authenticated user must have <strong>REPO_ADMIN</strong> permission for the specified repository to call this resource. @param webhookId the existing webhook id -@return */ +@return +*/ func (a *DefaultApiService) UpdateWebhook(projectKey, repositorySlug string, webhookId int32, localVarPostBody interface{}, localVarHTTPContentTypes []string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") @@ -13211,19 +13981,22 @@ func (a *DefaultApiService) UpdateWebhook(projectKey, repositorySlug string, web } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the title, description, reviewers or destination branch of an existing pull request. <p> <strong>Note:</strong> the <em>reviewers</em> list may be updated using this resource. However the <em>author</em> and <em>participants</em> list may not. <p> The authenticated user must either: <ul> <li>be the author of the pull request and have the <strong>REPO_READ</strong> permission for the repository that this pull request targets; or</li> <li>have the <strong>REPO_WRITE</strong> permission for the repository that this pull request targets</li> </ul> to call this resource. @param pullRequestId the ID of the pull request within the repository -@return */ -func (a *DefaultApiService) Update_50(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Update_50(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -13269,17 +14042,20 @@ func (a *DefaultApiService) Update_50(projectKey, repositorySlug string, pullReq } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the avatar for the project matching the supplied <strong>projectKey</strong>. <p> This resource accepts POST multipart form data, containing a single image in a form-field named 'avatar'. <p> There are configurable server limits on both the dimensions (1024x1024 pixels by default) and uploaded file size (1MB by default). Several different image formats are supported, but <strong>PNG</strong> and <strong>JPEG</strong> are preferred due to the file size limit. <p> This resource has Cross-Site Request Forgery (XSRF) protection. To allow the request to pass the XSRF check the caller needs to send an <code>X-Atlassian-Token</code> HTTP header with the value <code>no-check</code>. <p> An example <a href=\"http://curl.haxx.se/\">curl</a> request to upload an image name 'avatar.png' would be: <pre> curl -X POST -u username:password -H \"X-Atlassian-Token: no-check\" http://example.com/rest/api/1.0/projects/STASH/avatar.png -F avatar=@avatar.png </pre> <p> The authenticated user must have <strong>PROJECT_ADMIN</strong> permission for the specified project to call this resource. -@return */ +@return +*/ func (a *DefaultApiService) UploadAvatar(projectKey string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -13324,17 +14100,20 @@ func (a *DefaultApiService) UploadAvatar(projectKey string) (*APIResponse, error } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Update the avatar for the user with the supplied <strong>slug</strong>. <p> This resource accepts POST multipart form data, containing a single image in a form-field named 'avatar'. <p> There are configurable server limits on both the dimensions (1024x1024 pixels by default) and uploaded file size (1MB by default). Several different image formats are supported, but <strong>PNG</strong> and <strong>JPEG</strong> are preferred due to the file size limit. <p> This resource has Cross-Site Request Forgery (XSRF) protection. To allow the request to pass the XSRF check the caller needs to send an <code>X-Atlassian-Token</code> HTTP header with the value <code>no-check</code>. <p> An example <a href=\"http://curl.haxx.se/\">curl</a> request to upload an image name 'avatar.png' would be: <pre> curl -X POST -u username:password -H \"X-Atlassian-Token: no-check\" http://example.com/rest/api/latest/users/jdoe/avatar.png -F avatar=@avatar.png </pre> <p> Users are always allowed to update their own avatar. To update someone else's avatar the authenticated user must have global <strong>ADMIN</strong> permission, or global <strong>SYS_ADMIN</strong> permission to update a <strong>SYS_ADMIN</strong> user's avatar. -@return */ +@return +*/ func (a *DefaultApiService) UploadAvatar_51(userSlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -13379,18 +14158,21 @@ func (a *DefaultApiService) UploadAvatar_51(userSlug string) (*APIResponse, erro } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Adds the authenticated user as a watcher for the specified commit. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository containing the commit to call this resource. @param commitId the <i>full {@link Commit#getId() ID}</i> of the commit within the repository -@return */ +@return +*/ func (a *DefaultApiService) Watch(projectKey, repositorySlug string, commitId string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") @@ -13437,19 +14219,22 @@ func (a *DefaultApiService) Watch(projectKey, repositorySlug string, commitId st } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Make the authenticated user watch the specified pull request. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) Watch_52(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) Watch_52(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Post") localVarPostBody interface{} @@ -13495,19 +14280,22 @@ func (a *DefaultApiService) Watch_52(projectKey, repositorySlug string, pullRequ } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService +/* + DefaultApiService + Remove approval from a pull request as the current user. This does not remove the user as a participant. <p> The authenticated user must have <strong>REPO_READ</strong> permission for the repository that this pull request targets to call this resource. <p> <strong>Deprecated since 4.2</strong>. Use /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/participants/{userSlug} instead @param pullRequestId the id of the pull request within the repository -@return */ -func (a *DefaultApiService) WithdrawApproval(projectKey, repositorySlug string, pullRequestID int64) (*APIResponse, error) { +@return +*/ +func (a *DefaultApiService) WithdrawApproval(projectKey, repositorySlug string, pullRequestID int) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Delete") localVarPostBody interface{} @@ -13553,7 +14341,7 @@ func (a *DefaultApiService) WithdrawApproval(projectKey, repositorySlug string, } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -13563,10 +14351,8 @@ func (a *DefaultApiService) WithdrawApproval(projectKey, repositorySlug string, /* DefaultApiService Gets statistics regarding the builds associated with a commit. - * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param commitId the commit Id - - + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param commitId the commit Id */ func (a *DefaultApiService) GetCommitStats(commitId string) (*APIResponse, error) { var ( @@ -13612,7 +14398,7 @@ func (a *DefaultApiService) GetCommitStats(commitId string) (*APIResponse, error } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -13622,10 +14408,8 @@ func (a *DefaultApiService) GetCommitStats(commitId string) (*APIResponse, error /* DefaultApiService Gets the build statuses associated with a commit. - * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param commitId the commit Id - - + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param commitId the commit Id */ func (a *DefaultApiService) GetCommitStatus(commitId string) (*APIResponse, error) { var ( @@ -13671,7 +14455,7 @@ func (a *DefaultApiService) GetCommitStatus(commitId string) (*APIResponse, erro } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -13681,10 +14465,8 @@ func (a *DefaultApiService) GetCommitStatus(commitId string) (*APIResponse, erro /* DefaultApiService Produces a list of the build statistics for multiple commits. Commits without any builds associated with them will not be returned. For example if the commit e00cf62997a027bbf785614a93e2e55bb331d268 does not have any build statuses associated with it, it will not be present in the response. - * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param commits Array of commits. - - + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param commits Array of commits. */ func (a *DefaultApiService) GetCommitsStats(commits []string) (*APIResponse, error) { var ( @@ -13731,7 +14513,7 @@ func (a *DefaultApiService) GetCommitsStats(commits []string) (*APIResponse, err } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -13741,11 +14523,9 @@ func (a *DefaultApiService) GetCommitsStats(commits []string) (*APIResponse, err /* DefaultApiService Associates a build status with a commit. The state, the key and the url are mandatory. The name and description fields are optional. All fields (mandatory or optional) are limited to 255 characters, except for the url, which is limited to 450 characters. Supported values for the state are SUCCESSFUL, FAILED and INPROGRESS. The authenticated user must have LICENSED permission or higher to call this resource. - * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param commitId the commit Id - * @param buildStatus Array of commits. - - + - @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + - @param commitId the commit Id + - @param buildStatus Array of commits. */ func (a *DefaultApiService) SetCommitStatus(commitId string, buildStatus BuildStatus) (*APIResponse, error) { var ( @@ -13793,7 +14573,7 @@ func (a *DefaultApiService) SetCommitStatus(commitId string, buildStatus BuildSt } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } @@ -13803,8 +14583,7 @@ func (a *DefaultApiService) SetCommitStatus(commitId string, buildStatus BuildSt /* SearchCode - * @param searchString Search string - + - @param searchString Search string */ func (a *DefaultApiService) SearchCode(query SearchQuery) (*APIResponse, error) { var ( @@ -13852,7 +14631,68 @@ func (a *DefaultApiService) SearchCode(query SearchQuery) (*APIResponse, error) } defer localVarHTTPResponse.Body.Close() if localVarHTTPResponse.StatusCode >= 300 { - bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} + +func (a *DefaultApiService) GetHeadCommit(project, repository, branch string, localVarOptionals map[string]interface{}) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + if err := typeCheckParameter(localVarOptionals["apibasepath"], "string", "apibasepath"); err != nil { + return nil, err + } + apibasepath, _ := localVarOptionals["apibasepath"].(string) + if apibasepath == "" { + apibasepath = "/api/1.0" // or "apibasepath": "/api/latest" + } + + localVarPath := a.client.cfg.BasePath + apibasepath + "/projects/{projectKey}/repos/{repositorySlug}/commits?until=refs/heads/{branch}&start=0&limit=1" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repository), -1) + localVarPath = strings.Replace(localVarPath, "{"+"branch"+"}", fmt.Sprintf("%v", branch), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := io.ReadAll(localVarHTTPResponse.Body) return NewAPIResponseWithError(localVarHTTPResponse, bodyBytes, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) } diff --git a/vendor/github.com/gfleury/go-bitbucket-v1/git_push.sh b/vendor/github.com/gfleury/go-bitbucket-v1/git_push.sh deleted file mode 100644 index ae01b182a..000000000 --- a/vendor/github.com/gfleury/go-bitbucket-v1/git_push.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/sh -# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ -# -# Usage example: /bin/sh ./git_push.sh wing328 swagger-petstore-perl "minor update" - -git_user_id=$1 -git_repo_id=$2 -release_note=$3 - -if [ "$git_user_id" = "" ]; then - git_user_id="GIT_USER_ID" - echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" -fi - -if [ "$git_repo_id" = "" ]; then - git_repo_id="GIT_REPO_ID" - echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" -fi - -if [ "$release_note" = "" ]; then - release_note="Minor update" - echo "[INFO] No command line input provided. Set \$release_note to $release_note" -fi - -# Initialize the local directory as a Git repository -git init - -# Adds the files in the local repository and stages them for commit. -git add . - -# Commits the tracked changes and prepares them to be pushed to a remote repository. -git commit -m "$release_note" - -# Sets the new remote -git_remote=`git remote` -if [ "$git_remote" = "" ]; then # git remote not defined - - if [ "$GIT_TOKEN" = "" ]; then - echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." - git remote add origin https://github.com/${git_user_id}/${git_repo_id}.git - else - git remote add origin https://${git_user_id}:${GIT_TOKEN}@github.com/${git_user_id}/${git_repo_id}.git - fi - -fi - -git pull origin master - -# Pushes (Forces) the changes in the local repository up to the remote repository -echo "Git pushing to https://github.com/${git_user_id}/${git_repo_id}.git" -git push origin master 2>&1 | grep -v 'To https' - diff --git a/vendor/github.com/google/cel-go/cel/BUILD.bazel b/vendor/github.com/google/cel-go/cel/BUILD.bazel index 0905f6353..62b903c81 100644 --- a/vendor/github.com/google/cel-go/cel/BUILD.bazel +++ b/vendor/github.com/google/cel-go/cel/BUILD.bazel @@ -10,9 +10,11 @@ go_library( "cel.go", "decls.go", "env.go", + "folding.go", "io.go", "library.go", "macro.go", + "optimizer.go", "options.go", "program.go", "validator.go", @@ -56,7 +58,9 @@ go_test( "cel_test.go", "decls_test.go", "env_test.go", + "folding_test.go", "io_test.go", + "validator_test.go", ], data = [ "//cel/testdata:gen_test_fds", diff --git a/vendor/github.com/google/cel-go/cel/decls.go b/vendor/github.com/google/cel-go/cel/decls.go index 0f9501341..b59e3708d 100644 --- a/vendor/github.com/google/cel-go/cel/decls.go +++ b/vendor/github.com/google/cel-go/cel/decls.go @@ -353,43 +353,3 @@ func ExprDeclToDeclaration(d *exprpb.Decl) (EnvOption, error) { return nil, fmt.Errorf("unsupported decl: %v", d) } } - -func typeValueToKind(tv ref.Type) (Kind, error) { - switch tv { - case types.BoolType: - return BoolKind, nil - case types.DoubleType: - return DoubleKind, nil - case types.IntType: - return IntKind, nil - case types.UintType: - return UintKind, nil - case types.ListType: - return ListKind, nil - case types.MapType: - return MapKind, nil - case types.StringType: - return StringKind, nil - case types.BytesType: - return BytesKind, nil - case types.DurationType: - return DurationKind, nil - case types.TimestampType: - return TimestampKind, nil - case types.NullType: - return NullTypeKind, nil - case types.TypeType: - return TypeKind, nil - default: - switch tv.TypeName() { - case "dyn": - return DynKind, nil - case "google.protobuf.Any": - return AnyKind, nil - case "optional": - return OpaqueKind, nil - default: - return 0, fmt.Errorf("no known conversion for type of %s", tv.TypeName()) - } - } -} diff --git a/vendor/github.com/google/cel-go/cel/env.go b/vendor/github.com/google/cel-go/cel/env.go index 5cbb86a75..786a13c4d 100644 --- a/vendor/github.com/google/cel-go/cel/env.go +++ b/vendor/github.com/google/cel-go/cel/env.go @@ -38,26 +38,37 @@ type Source = common.Source // Ast representing the checked or unchecked expression, its source, and related metadata such as // source position information. type Ast struct { - expr *exprpb.Expr - info *exprpb.SourceInfo - source Source - refMap map[int64]*celast.ReferenceInfo - typeMap map[int64]*types.Type + source Source + impl *celast.AST } // Expr returns the proto serializable instance of the parsed/checked expression. +// +// Deprecated: prefer cel.AstToCheckedExpr() or cel.AstToParsedExpr() and call GetExpr() +// the result instead. func (ast *Ast) Expr() *exprpb.Expr { - return ast.expr + if ast == nil { + return nil + } + pbExpr, _ := celast.ExprToProto(ast.impl.Expr()) + return pbExpr } // IsChecked returns whether the Ast value has been successfully type-checked. func (ast *Ast) IsChecked() bool { - return ast.typeMap != nil && len(ast.typeMap) > 0 + if ast == nil { + return false + } + return ast.impl.IsChecked() } // SourceInfo returns character offset and newline position information about expression elements. func (ast *Ast) SourceInfo() *exprpb.SourceInfo { - return ast.info + if ast == nil { + return nil + } + pbInfo, _ := celast.SourceInfoToProto(ast.impl.SourceInfo()) + return pbInfo } // ResultType returns the output type of the expression if the Ast has been type-checked, else @@ -65,9 +76,6 @@ func (ast *Ast) SourceInfo() *exprpb.SourceInfo { // // Deprecated: use OutputType func (ast *Ast) ResultType() *exprpb.Type { - if !ast.IsChecked() { - return chkdecls.Dyn - } out := ast.OutputType() t, err := TypeToExprType(out) if err != nil { @@ -79,16 +87,18 @@ func (ast *Ast) ResultType() *exprpb.Type { // OutputType returns the output type of the expression if the Ast has been type-checked, else // returns cel.DynType as the parse step cannot infer types. func (ast *Ast) OutputType() *Type { - t, found := ast.typeMap[ast.expr.GetId()] - if !found { - return DynType + if ast == nil { + return types.ErrorType } - return t + return ast.impl.GetType(ast.impl.Expr().ID()) } // Source returns a view of the input used to create the Ast. This source may be complete or // constructed from the SourceInfo. func (ast *Ast) Source() Source { + if ast == nil { + return nil + } return ast.source } @@ -196,29 +206,28 @@ func NewCustomEnv(opts ...EnvOption) (*Env, error) { // It is possible to have both non-nil Ast and Issues values returned from this call: however, // the mere presence of an Ast does not imply that it is valid for use. func (e *Env) Check(ast *Ast) (*Ast, *Issues) { - // Note, errors aren't currently possible on the Ast to ParsedExpr conversion. - pe, _ := AstToParsedExpr(ast) - // Construct the internal checker env, erroring if there is an issue adding the declarations. chk, err := e.initChecker() if err != nil { errs := common.NewErrors(ast.Source()) errs.ReportError(common.NoLocation, err.Error()) - return nil, NewIssuesWithSourceInfo(errs, ast.SourceInfo()) + return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) } - res, errs := checker.Check(pe, ast.Source(), chk) + checked, errs := checker.Check(ast.impl, ast.Source(), chk) if len(errs.GetErrors()) > 0 { - return nil, NewIssuesWithSourceInfo(errs, ast.SourceInfo()) + return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) } // Manually create the Ast to ensure that the Ast source information (which may be more // detailed than the information provided by Check), is returned to the caller. ast = &Ast{ - source: ast.Source(), - expr: res.Expr, - info: res.SourceInfo, - refMap: res.ReferenceMap, - typeMap: res.TypeMap} + source: ast.Source(), + impl: checked} + + // Avoid creating a validator config if it's not needed. + if len(e.validators) == 0 { + return ast, nil + } // Generate a validator configuration from the set of configured validators. vConfig := newValidatorConfig() @@ -228,9 +237,9 @@ func (e *Env) Check(ast *Ast) (*Ast, *Issues) { } } // Apply additional validators on the type-checked result. - iss := NewIssuesWithSourceInfo(errs, ast.SourceInfo()) + iss := NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) for _, v := range e.validators { - v.Validate(e, vConfig, res, iss) + v.Validate(e, vConfig, checked, iss) } if iss.Err() != nil { return nil, iss @@ -388,6 +397,15 @@ func (e *Env) HasLibrary(libName string) bool { return exists && configured } +// Libraries returns a list of SingletonLibrary that have been configured in the environment. +func (e *Env) Libraries() []string { + libraries := make([]string, 0, len(e.libraries)) + for libName := range e.libraries { + libraries = append(libraries, libName) + } + return libraries +} + // HasValidator returns whether a specific ASTValidator has been configured in the environment. func (e *Env) HasValidator(name string) bool { for _, v := range e.validators { @@ -415,16 +433,11 @@ func (e *Env) Parse(txt string) (*Ast, *Issues) { // It is possible to have both non-nil Ast and Issues values returned from this call; however, // the mere presence of an Ast does not imply that it is valid for use. func (e *Env) ParseSource(src Source) (*Ast, *Issues) { - res, errs := e.prsr.Parse(src) + parsed, errs := e.prsr.Parse(src) if len(errs.GetErrors()) > 0 { return nil, &Issues{errs: errs} } - // Manually create the Ast to ensure that the text source information is propagated on - // subsequent calls to Check. - return &Ast{ - source: src, - expr: res.GetExpr(), - info: res.GetSourceInfo()}, nil + return &Ast{source: src, impl: parsed}, nil } // Program generates an evaluable instance of the Ast within the environment (Env). @@ -520,8 +533,9 @@ func (e *Env) PartialVars(vars any) (interpreter.PartialActivation, error) { // TODO: Consider adding an option to generate a Program.Residual to avoid round-tripping to an // Ast format and then Program again. func (e *Env) ResidualAst(a *Ast, details *EvalDetails) (*Ast, error) { - pruned := interpreter.PruneAst(a.Expr(), a.SourceInfo().GetMacroCalls(), details.State()) - expr, err := AstToString(ParsedExprToAst(pruned)) + pruned := interpreter.PruneAst(a.impl.Expr(), a.impl.SourceInfo().MacroCalls(), details.State()) + newAST := &Ast{source: a.Source(), impl: pruned} + expr, err := AstToString(newAST) if err != nil { return nil, err } @@ -542,13 +556,7 @@ func (e *Env) ResidualAst(a *Ast, details *EvalDetails) (*Ast, error) { // EstimateCost estimates the cost of a type checked CEL expression using the length estimates of input data and // extension functions provided by estimator. func (e *Env) EstimateCost(ast *Ast, estimator checker.CostEstimator, opts ...checker.CostOption) (checker.CostEstimate, error) { - checked := &celast.CheckedAST{ - Expr: ast.Expr(), - SourceInfo: ast.SourceInfo(), - TypeMap: ast.typeMap, - ReferenceMap: ast.refMap, - } - return checker.Cost(checked, estimator, opts...) + return checker.Cost(ast.impl, estimator, opts...) } // configure applies a series of EnvOptions to the current environment. @@ -690,7 +698,7 @@ type Error = common.Error // Note: in the future, non-fatal warnings and notices may be inspectable via the Issues struct. type Issues struct { errs *common.Errors - info *exprpb.SourceInfo + info *celast.SourceInfo } // NewIssues returns an Issues struct from a common.Errors object. @@ -701,7 +709,7 @@ func NewIssues(errs *common.Errors) *Issues { // NewIssuesWithSourceInfo returns an Issues struct from a common.Errors object with SourceInfo metatata // which can be used with the `ReportErrorAtID` method for additional error reports within the context // information that's inferred from an expression id. -func NewIssuesWithSourceInfo(errs *common.Errors, info *exprpb.SourceInfo) *Issues { +func NewIssuesWithSourceInfo(errs *common.Errors, info *celast.SourceInfo) *Issues { return &Issues{ errs: errs, info: info, @@ -751,30 +759,7 @@ func (i *Issues) String() string { // The source metadata for the expression at `id`, if present, is attached to the error report. // To ensure that source metadata is attached to error reports, use NewIssuesWithSourceInfo. func (i *Issues) ReportErrorAtID(id int64, message string, args ...any) { - i.errs.ReportErrorAtID(id, locationByID(id, i.info), message, args...) -} - -// locationByID returns a common.Location given an expression id. -// -// TODO: move this functionality into the native SourceInfo and an overhaul of the common.Source -// as this implementation relies on the abstractions present in the protobuf SourceInfo object, -// and is replicated in the checker. -func locationByID(id int64, sourceInfo *exprpb.SourceInfo) common.Location { - positions := sourceInfo.GetPositions() - var line = 1 - if offset, found := positions[id]; found { - col := int(offset) - for _, lineOffset := range sourceInfo.GetLineOffsets() { - if lineOffset < offset { - line++ - col = int(offset - lineOffset) - } else { - break - } - } - return common.NewLocation(line, col) - } - return common.NoLocation + i.errs.ReportErrorAtID(id, i.info.GetStartLocation(id), message, args...) } // getStdEnv lazy initializes the CEL standard environment. @@ -805,6 +790,13 @@ func (p *interopCELTypeProvider) FindStructType(typeName string) (*types.Type, b return nil, false } +// FindStructFieldNames returns an empty set of field for the interop provider. +// +// To inspect the field names, migrate to a `types.Provider` implementation. +func (p *interopCELTypeProvider) FindStructFieldNames(typeName string) ([]string, bool) { + return []string{}, false +} + // FindStructFieldType returns a types.FieldType instance for the given fully-qualified typeName and field // name, if one exists. // diff --git a/vendor/github.com/google/cel-go/cel/folding.go b/vendor/github.com/google/cel-go/cel/folding.go new file mode 100644 index 000000000..1ab0023aa --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/folding.go @@ -0,0 +1,561 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "fmt" + + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" +) + +// ConstantFoldingOption defines a functional option for configuring constant folding. +type ConstantFoldingOption func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error) + +// MaxConstantFoldIterations limits the number of times literals may be folding during optimization. +// +// Defaults to 100 if not set. +func MaxConstantFoldIterations(limit int) ConstantFoldingOption { + return func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error) { + opt.maxFoldIterations = limit + return opt, nil + } +} + +// NewConstantFoldingOptimizer creates an optimizer which inlines constant scalar an aggregate +// literal values within function calls and select statements with their evaluated result. +func NewConstantFoldingOptimizer(opts ...ConstantFoldingOption) (ASTOptimizer, error) { + folder := &constantFoldingOptimizer{ + maxFoldIterations: defaultMaxConstantFoldIterations, + } + var err error + for _, o := range opts { + folder, err = o(folder) + if err != nil { + return nil, err + } + } + return folder, nil +} + +type constantFoldingOptimizer struct { + maxFoldIterations int +} + +// Optimize queries the expression graph for scalar and aggregate literal expressions within call and +// select statements and then evaluates them and replaces the call site with the literal result. +// +// Note: only values which can be represented as literals in CEL syntax are supported. +func (opt *constantFoldingOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST { + root := ast.NavigateAST(a) + + // Walk the list of foldable expression and continue to fold until there are no more folds left. + // All of the fold candidates returned by the constantExprMatcher should succeed unless there's + // a logic bug with the selection of expressions. + foldableExprs := ast.MatchDescendants(root, constantExprMatcher) + foldCount := 0 + for len(foldableExprs) != 0 && foldCount < opt.maxFoldIterations { + for _, fold := range foldableExprs { + // If the expression could be folded because it's a non-strict call, and the + // branches are pruned, continue to the next fold. + if fold.Kind() == ast.CallKind && maybePruneBranches(ctx, fold) { + continue + } + // Otherwise, assume all context is needed to evaluate the expression. + err := tryFold(ctx, a, fold) + if err != nil { + ctx.ReportErrorAtID(fold.ID(), "constant-folding evaluation failed: %v", err.Error()) + return a + } + } + foldCount++ + foldableExprs = ast.MatchDescendants(root, constantExprMatcher) + } + // Once all of the constants have been folded, try to run through the remaining comprehensions + // one last time. In this case, there's no guarantee they'll run, so we only update the + // target comprehension node with the literal value if the evaluation succeeds. + for _, compre := range ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind)) { + tryFold(ctx, a, compre) + } + + // If the output is a list, map, or struct which contains optional entries, then prune it + // to make sure that the optionals, if resolved, do not surface in the output literal. + pruneOptionalElements(ctx, root) + + // Ensure that all intermediate values in the folded expression can be represented as valid + // CEL literals within the AST structure. Use `PostOrderVisit` rather than `MatchDescendents` + // to avoid extra allocations during this final pass through the AST. + ast.PostOrderVisit(root, ast.NewExprVisitor(func(e ast.Expr) { + if e.Kind() != ast.LiteralKind { + return + } + val := e.AsLiteral() + adapted, err := adaptLiteral(ctx, val) + if err != nil { + ctx.ReportErrorAtID(root.ID(), "constant-folding evaluation failed: %v", err.Error()) + return + } + e.SetKindCase(adapted) + })) + + return a +} + +// tryFold attempts to evaluate a sub-expression to a literal. +// +// If the evaluation succeeds, the input expr value will be modified to become a literal, otherwise +// the method will return an error. +func tryFold(ctx *OptimizerContext, a *ast.AST, expr ast.Expr) error { + // Assume all context is needed to evaluate the expression. + subAST := &Ast{ + impl: ast.NewCheckedAST(ast.NewAST(expr, a.SourceInfo()), a.TypeMap(), a.ReferenceMap()), + } + prg, err := ctx.Program(subAST) + if err != nil { + return err + } + out, _, err := prg.Eval(NoVars()) + if err != nil { + return err + } + // Clear any macro metadata associated with the fold. + a.SourceInfo().ClearMacroCall(expr.ID()) + // Update the fold expression to be a literal. + expr.SetKindCase(ctx.NewLiteral(out)) + return nil +} + +// maybePruneBranches inspects the non-strict call expression to determine whether +// a branch can be removed. Evaluation will naturally prune logical and / or calls, +// but conditional will not be pruned cleanly, so this is one small area where the +// constant folding step reimplements a portion of the evaluator. +func maybePruneBranches(ctx *OptimizerContext, expr ast.NavigableExpr) bool { + call := expr.AsCall() + args := call.Args() + switch call.FunctionName() { + case operators.LogicalAnd, operators.LogicalOr: + return maybeShortcircuitLogic(ctx, call.FunctionName(), args, expr) + case operators.Conditional: + cond := args[0] + truthy := args[1] + falsy := args[2] + if cond.Kind() != ast.LiteralKind { + return false + } + if cond.AsLiteral() == types.True { + expr.SetKindCase(truthy) + } else { + expr.SetKindCase(falsy) + } + return true + case operators.In: + haystack := args[1] + if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 { + expr.SetKindCase(ctx.NewLiteral(types.False)) + return true + } + needle := args[0] + if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind { + needleValue := needle.AsLiteral() + list := haystack.AsList() + for _, e := range list.Elements() { + if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True { + expr.SetKindCase(ctx.NewLiteral(types.True)) + return true + } + } + } + } + return false +} + +func maybeShortcircuitLogic(ctx *OptimizerContext, function string, args []ast.Expr, expr ast.NavigableExpr) bool { + shortcircuit := types.False + skip := types.True + if function == operators.LogicalOr { + shortcircuit = types.True + skip = types.False + } + newArgs := []ast.Expr{} + for _, arg := range args { + if arg.Kind() != ast.LiteralKind { + newArgs = append(newArgs, arg) + continue + } + if arg.AsLiteral() == skip { + continue + } + if arg.AsLiteral() == shortcircuit { + expr.SetKindCase(arg) + return true + } + } + if len(newArgs) == 0 { + newArgs = append(newArgs, args[0]) + expr.SetKindCase(newArgs[0]) + return true + } + if len(newArgs) == 1 { + expr.SetKindCase(newArgs[0]) + return true + } + expr.SetKindCase(ctx.NewCall(function, newArgs...)) + return true +} + +// pruneOptionalElements works from the bottom up to resolve optional elements within +// aggregate literals. +// +// Note, many aggregate literals will be resolved as arguments to functions or select +// statements, so this method exists to handle the case where the literal could not be +// fully resolved or exists outside of a call, select, or comprehension context. +func pruneOptionalElements(ctx *OptimizerContext, root ast.NavigableExpr) { + aggregateLiterals := ast.MatchDescendants(root, aggregateLiteralMatcher) + for _, lit := range aggregateLiterals { + switch lit.Kind() { + case ast.ListKind: + pruneOptionalListElements(ctx, lit) + case ast.MapKind: + pruneOptionalMapEntries(ctx, lit) + case ast.StructKind: + pruneOptionalStructFields(ctx, lit) + } + } +} + +func pruneOptionalListElements(ctx *OptimizerContext, e ast.Expr) { + l := e.AsList() + elems := l.Elements() + optIndices := l.OptionalIndices() + if len(optIndices) == 0 { + return + } + updatedElems := []ast.Expr{} + updatedIndices := []int32{} + newOptIndex := -1 + for _, e := range elems { + newOptIndex++ + if !l.IsOptional(int32(newOptIndex)) { + updatedElems = append(updatedElems, e) + continue + } + if e.Kind() != ast.LiteralKind { + updatedElems = append(updatedElems, e) + updatedIndices = append(updatedIndices, int32(newOptIndex)) + continue + } + optElemVal, ok := e.AsLiteral().(*types.Optional) + if !ok { + updatedElems = append(updatedElems, e) + updatedIndices = append(updatedIndices, int32(newOptIndex)) + continue + } + if !optElemVal.HasValue() { + newOptIndex-- // Skipping causes the list to get smaller. + continue + } + e.SetKindCase(ctx.NewLiteral(optElemVal.GetValue())) + updatedElems = append(updatedElems, e) + } + e.SetKindCase(ctx.NewList(updatedElems, updatedIndices)) +} + +func pruneOptionalMapEntries(ctx *OptimizerContext, e ast.Expr) { + m := e.AsMap() + entries := m.Entries() + updatedEntries := []ast.EntryExpr{} + modified := false + for _, e := range entries { + entry := e.AsMapEntry() + key := entry.Key() + val := entry.Value() + // If the entry is not optional, or the value-side of the optional hasn't + // been resolved to a literal, then preserve the entry as-is. + if !entry.IsOptional() || val.Kind() != ast.LiteralKind { + updatedEntries = append(updatedEntries, e) + continue + } + optElemVal, ok := val.AsLiteral().(*types.Optional) + if !ok { + updatedEntries = append(updatedEntries, e) + continue + } + // When the key is not a literal, but the value is, then it needs to be + // restored to an optional value. + if key.Kind() != ast.LiteralKind { + undoOptVal, err := adaptLiteral(ctx, optElemVal) + if err != nil { + ctx.ReportErrorAtID(val.ID(), "invalid map value literal %v: %v", optElemVal, err) + } + val.SetKindCase(undoOptVal) + updatedEntries = append(updatedEntries, e) + continue + } + modified = true + if !optElemVal.HasValue() { + continue + } + val.SetKindCase(ctx.NewLiteral(optElemVal.GetValue())) + updatedEntry := ctx.NewMapEntry(key, val, false) + updatedEntries = append(updatedEntries, updatedEntry) + } + if modified { + e.SetKindCase(ctx.NewMap(updatedEntries)) + } +} + +func pruneOptionalStructFields(ctx *OptimizerContext, e ast.Expr) { + s := e.AsStruct() + fields := s.Fields() + updatedFields := []ast.EntryExpr{} + modified := false + for _, f := range fields { + field := f.AsStructField() + val := field.Value() + if !field.IsOptional() || val.Kind() != ast.LiteralKind { + updatedFields = append(updatedFields, f) + continue + } + optElemVal, ok := val.AsLiteral().(*types.Optional) + if !ok { + updatedFields = append(updatedFields, f) + continue + } + modified = true + if !optElemVal.HasValue() { + continue + } + val.SetKindCase(ctx.NewLiteral(optElemVal.GetValue())) + updatedField := ctx.NewStructField(field.Name(), val, false) + updatedFields = append(updatedFields, updatedField) + } + if modified { + e.SetKindCase(ctx.NewStruct(s.TypeName(), updatedFields)) + } +} + +// adaptLiteral converts a runtime CEL value to its equivalent literal expression. +// +// For strongly typed values, the type-provider will be used to reconstruct the fields +// which are present in the literal and their equivalent initialization values. +func adaptLiteral(ctx *OptimizerContext, val ref.Val) (ast.Expr, error) { + switch t := val.Type().(type) { + case *types.Type: + switch t { + case types.BoolType, types.BytesType, types.DoubleType, types.IntType, + types.NullType, types.StringType, types.UintType: + return ctx.NewLiteral(val), nil + case types.DurationType: + return ctx.NewCall( + overloads.TypeConvertDuration, + ctx.NewLiteral(val.ConvertToType(types.StringType)), + ), nil + case types.TimestampType: + return ctx.NewCall( + overloads.TypeConvertTimestamp, + ctx.NewLiteral(val.ConvertToType(types.StringType)), + ), nil + case types.OptionalType: + opt := val.(*types.Optional) + if !opt.HasValue() { + return ctx.NewCall("optional.none"), nil + } + target, err := adaptLiteral(ctx, opt.GetValue()) + if err != nil { + return nil, err + } + return ctx.NewCall("optional.of", target), nil + case types.TypeType: + return ctx.NewIdent(val.(*types.Type).TypeName()), nil + case types.ListType: + l, ok := val.(traits.Lister) + if !ok { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + elems := make([]ast.Expr, l.Size().(types.Int)) + idx := 0 + it := l.Iterator() + for it.HasNext() == types.True { + elemVal := it.Next() + elemExpr, err := adaptLiteral(ctx, elemVal) + if err != nil { + return nil, err + } + elems[idx] = elemExpr + idx++ + } + return ctx.NewList(elems, []int32{}), nil + case types.MapType: + m, ok := val.(traits.Mapper) + if !ok { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + entries := make([]ast.EntryExpr, m.Size().(types.Int)) + idx := 0 + it := m.Iterator() + for it.HasNext() == types.True { + keyVal := it.Next() + keyExpr, err := adaptLiteral(ctx, keyVal) + if err != nil { + return nil, err + } + valVal := m.Get(keyVal) + valExpr, err := adaptLiteral(ctx, valVal) + if err != nil { + return nil, err + } + entries[idx] = ctx.NewMapEntry(keyExpr, valExpr, false) + idx++ + } + return ctx.NewMap(entries), nil + default: + provider := ctx.CELTypeProvider() + fields, found := provider.FindStructFieldNames(t.TypeName()) + if !found { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + tester := val.(traits.FieldTester) + indexer := val.(traits.Indexer) + fieldInits := []ast.EntryExpr{} + for _, f := range fields { + field := types.String(f) + if tester.IsSet(field) != types.True { + continue + } + fieldVal := indexer.Get(field) + fieldExpr, err := adaptLiteral(ctx, fieldVal) + if err != nil { + return nil, err + } + fieldInits = append(fieldInits, ctx.NewStructField(f, fieldExpr, false)) + } + return ctx.NewStruct(t.TypeName(), fieldInits), nil + } + } + return nil, fmt.Errorf("failed to adapt %v to literal", val) +} + +// constantExprMatcher matches calls, select statements, and comprehensions whose arguments +// are all constant scalar or aggregate literal values. +// +// Only comprehensions which are not nested are included as possible constant folds, and only +// if all variables referenced in the comprehension stack exist are only iteration or +// accumulation variables. +func constantExprMatcher(e ast.NavigableExpr) bool { + switch e.Kind() { + case ast.CallKind: + return constantCallMatcher(e) + case ast.SelectKind: + sel := e.AsSelect() // guaranteed to be a navigable value + return constantMatcher(sel.Operand().(ast.NavigableExpr)) + case ast.ComprehensionKind: + if isNestedComprehension(e) { + return false + } + vars := map[string]bool{} + constantExprs := true + visitor := ast.NewExprVisitor(func(e ast.Expr) { + if e.Kind() == ast.ComprehensionKind { + nested := e.AsComprehension() + vars[nested.AccuVar()] = true + vars[nested.IterVar()] = true + } + if e.Kind() == ast.IdentKind && !vars[e.AsIdent()] { + constantExprs = false + } + }) + ast.PreOrderVisit(e, visitor) + return constantExprs + default: + return false + } +} + +// constantCallMatcher identifies strict and non-strict calls which can be folded. +func constantCallMatcher(e ast.NavigableExpr) bool { + call := e.AsCall() + children := e.Children() + fnName := call.FunctionName() + if fnName == operators.LogicalAnd { + for _, child := range children { + if child.Kind() == ast.LiteralKind { + return true + } + } + } + if fnName == operators.LogicalOr { + for _, child := range children { + if child.Kind() == ast.LiteralKind { + return true + } + } + } + if fnName == operators.Conditional { + cond := children[0] + if cond.Kind() == ast.LiteralKind && cond.AsLiteral().Type() == types.BoolType { + return true + } + } + if fnName == operators.In { + haystack := children[1] + if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 { + return true + } + needle := children[0] + if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind { + needleValue := needle.AsLiteral() + list := haystack.AsList() + for _, e := range list.Elements() { + if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True { + return true + } + } + } + } + // convert all other calls with constant arguments + for _, child := range children { + if !constantMatcher(child) { + return false + } + } + return true +} + +func isNestedComprehension(e ast.NavigableExpr) bool { + parent, found := e.Parent() + for found { + if parent.Kind() == ast.ComprehensionKind { + return true + } + parent, found = parent.Parent() + } + return false +} + +func aggregateLiteralMatcher(e ast.NavigableExpr) bool { + return e.Kind() == ast.ListKind || e.Kind() == ast.MapKind || e.Kind() == ast.StructKind +} + +var ( + constantMatcher = ast.ConstantValueMatcher() +) + +const ( + defaultMaxConstantFoldIterations = 100 +) diff --git a/vendor/github.com/google/cel-go/cel/inlining.go b/vendor/github.com/google/cel-go/cel/inlining.go new file mode 100644 index 000000000..9fc3be278 --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/inlining.go @@ -0,0 +1,220 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/traits" +) + +// InlineVariable holds a variable name to be matched and an AST representing +// the expression graph which should be used to replace it. +type InlineVariable struct { + name string + alias string + def *ast.AST +} + +// Name returns the qualified variable or field selection to replace. +func (v *InlineVariable) Name() string { + return v.name +} + +// Alias returns the alias to use when performing cel.bind() calls during inlining. +func (v *InlineVariable) Alias() string { + return v.alias +} + +// Expr returns the inlined expression value. +func (v *InlineVariable) Expr() ast.Expr { + return v.def.Expr() +} + +// Type indicates the inlined expression type. +func (v *InlineVariable) Type() *Type { + return v.def.GetType(v.def.Expr().ID()) +} + +// NewInlineVariable declares a variable name to be replaced by a checked expression. +func NewInlineVariable(name string, definition *Ast) *InlineVariable { + return NewInlineVariableWithAlias(name, name, definition) +} + +// NewInlineVariableWithAlias declares a variable name to be replaced by a checked expression. +// If the variable occurs more than once, the provided alias will be used to replace the expressions +// where the variable name occurs. +func NewInlineVariableWithAlias(name, alias string, definition *Ast) *InlineVariable { + return &InlineVariable{name: name, alias: alias, def: definition.impl} +} + +// NewInliningOptimizer creates and optimizer which replaces variables with expression definitions. +// +// If a variable occurs one time, the variable is replaced by the inline definition. If the +// variable occurs more than once, the variable occurences are replaced by a cel.bind() call. +func NewInliningOptimizer(inlineVars ...*InlineVariable) ASTOptimizer { + return &inliningOptimizer{variables: inlineVars} +} + +type inliningOptimizer struct { + variables []*InlineVariable +} + +func (opt *inliningOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST { + root := ast.NavigateAST(a) + for _, inlineVar := range opt.variables { + matches := ast.MatchDescendants(root, opt.matchVariable(inlineVar.Name())) + // Skip cases where the variable isn't in the expression graph + if len(matches) == 0 { + continue + } + + // For a single match, do a direct replacement of the expression sub-graph. + if len(matches) == 1 { + opt.inlineExpr(ctx, matches[0], ctx.CopyExpr(inlineVar.Expr()), inlineVar.Type()) + continue + } + + if !isBindable(matches, inlineVar.Expr(), inlineVar.Type()) { + for _, match := range matches { + opt.inlineExpr(ctx, match, ctx.CopyExpr(inlineVar.Expr()), inlineVar.Type()) + } + continue + } + // For multiple matches, find the least common ancestor (lca) and insert the + // variable as a cel.bind() macro. + var lca ast.NavigableExpr = nil + ancestors := map[int64]bool{} + for _, match := range matches { + // Update the identifier matches with the provided alias. + aliasExpr := ctx.NewIdent(inlineVar.Alias()) + opt.inlineExpr(ctx, match, aliasExpr, inlineVar.Type()) + parent, found := match, true + for found { + _, hasAncestor := ancestors[parent.ID()] + if hasAncestor && (lca == nil || lca.Depth() < parent.Depth()) { + lca = parent + } + ancestors[parent.ID()] = true + parent, found = parent.Parent() + } + } + + // Update the least common ancestor by inserting a cel.bind() call to the alias. + inlined := ctx.NewBindMacro(lca.ID(), inlineVar.Alias(), inlineVar.Expr(), lca) + opt.inlineExpr(ctx, lca, inlined, inlineVar.Type()) + } + return a +} + +// inlineExpr replaces the current expression with the inlined one, unless the location of the inlining +// happens within a presence test, e.g. has(a.b.c) -> inline alpha for a.b.c in which case an attempt is +// made to determine whether the inlined value can be presence or existence tested. +func (opt *inliningOptimizer) inlineExpr(ctx *OptimizerContext, prev, inlined ast.Expr, inlinedType *Type) { + switch prev.Kind() { + case ast.SelectKind: + sel := prev.AsSelect() + if !sel.IsTestOnly() { + prev.SetKindCase(inlined) + return + } + opt.rewritePresenceExpr(ctx, prev, inlined, inlinedType) + default: + prev.SetKindCase(inlined) + } +} + +// rewritePresenceExpr converts the inlined expression, when it occurs within a has() macro, to type-safe +// expression appropriate for the inlined type, if possible. +// +// If the rewrite is not possible an error is reported at the inline expression site. +func (opt *inliningOptimizer) rewritePresenceExpr(ctx *OptimizerContext, prev, inlined ast.Expr, inlinedType *Type) { + // If the input inlined expression is not a select expression it won't work with the has() + // macro. Attempt to rewrite the presence test in terms of the typed input, otherwise error. + ctx.sourceInfo.ClearMacroCall(prev.ID()) + if inlined.Kind() == ast.SelectKind { + inlinedSel := inlined.AsSelect() + prev.SetKindCase( + ctx.NewPresenceTest(prev.ID(), inlinedSel.Operand(), inlinedSel.FieldName())) + return + } + if inlinedType.IsAssignableType(NullType) { + prev.SetKindCase( + ctx.NewCall(operators.NotEquals, + inlined, + ctx.NewLiteral(types.NullValue), + )) + return + } + if inlinedType.HasTrait(traits.SizerType) { + prev.SetKindCase( + ctx.NewCall(operators.NotEquals, + ctx.NewMemberCall(overloads.Size, inlined), + ctx.NewLiteral(types.IntZero), + )) + return + } + ctx.ReportErrorAtID(prev.ID(), "unable to inline expression type %v into presence test", inlinedType) +} + +// isBindable indicates whether the inlined type can be used within a cel.bind() if the expression +// being replaced occurs within a presence test. Value types with a size() method or field selection +// support can be bound. +// +// In future iterations, support may also be added for indexer types which can be rewritten as an `in` +// expression; however, this would imply a rewrite of the inlined expression that may not be necessary +// in most cases. +func isBindable(matches []ast.NavigableExpr, inlined ast.Expr, inlinedType *Type) bool { + if inlinedType.IsAssignableType(NullType) || + inlinedType.HasTrait(traits.SizerType) || + inlinedType.HasTrait(traits.FieldTesterType) { + return true + } + for _, m := range matches { + if m.Kind() != ast.SelectKind { + continue + } + sel := m.AsSelect() + if sel.IsTestOnly() { + return false + } + } + return true +} + +// matchVariable matches simple identifiers, select expressions, and presence test expressions +// which match the (potentially) qualified variable name provided as input. +// +// Note, this function does not support inlining against select expressions which includes optional +// field selection. This may be a future refinement. +func (opt *inliningOptimizer) matchVariable(varName string) ast.ExprMatcher { + return func(e ast.NavigableExpr) bool { + if e.Kind() == ast.IdentKind && e.AsIdent() == varName { + return true + } + if e.Kind() == ast.SelectKind { + sel := e.AsSelect() + // While the `ToQualifiedName` call could take the select directly, this + // would skip presence tests from possible matches, which we would like + // to include. + qualName, found := containers.ToQualifiedName(sel.Operand()) + return found && qualName+"."+sel.FieldName() == varName + } + return false + } +} diff --git a/vendor/github.com/google/cel-go/cel/io.go b/vendor/github.com/google/cel-go/cel/io.go index 80f63140e..3133fb9d7 100644 --- a/vendor/github.com/google/cel-go/cel/io.go +++ b/vendor/github.com/google/cel-go/cel/io.go @@ -47,17 +47,11 @@ func CheckedExprToAst(checkedExpr *exprpb.CheckedExpr) *Ast { // // Prefer CheckedExprToAst if loading expressions from storage. func CheckedExprToAstWithSource(checkedExpr *exprpb.CheckedExpr, src Source) (*Ast, error) { - checkedAST, err := ast.CheckedExprToCheckedAST(checkedExpr) + checked, err := ast.ToAST(checkedExpr) if err != nil { return nil, err } - return &Ast{ - expr: checkedAST.Expr, - info: checkedAST.SourceInfo, - source: src, - refMap: checkedAST.ReferenceMap, - typeMap: checkedAST.TypeMap, - }, nil + return &Ast{source: src, impl: checked}, nil } // AstToCheckedExpr converts an Ast to an protobuf CheckedExpr value. @@ -67,13 +61,7 @@ func AstToCheckedExpr(a *Ast) (*exprpb.CheckedExpr, error) { if !a.IsChecked() { return nil, fmt.Errorf("cannot convert unchecked ast") } - cAst := &ast.CheckedAST{ - Expr: a.expr, - SourceInfo: a.info, - ReferenceMap: a.refMap, - TypeMap: a.typeMap, - } - return ast.CheckedASTToCheckedExpr(cAst) + return ast.ToProto(a.impl) } // ParsedExprToAst converts a parsed expression proto message to an Ast. @@ -89,18 +77,12 @@ func ParsedExprToAst(parsedExpr *exprpb.ParsedExpr) *Ast { // // Prefer ParsedExprToAst if loading expressions from storage. func ParsedExprToAstWithSource(parsedExpr *exprpb.ParsedExpr, src Source) *Ast { - si := parsedExpr.GetSourceInfo() - if si == nil { - si = &exprpb.SourceInfo{} - } + info, _ := ast.ProtoToSourceInfo(parsedExpr.GetSourceInfo()) if src == nil { - src = common.NewInfoSource(si) - } - return &Ast{ - expr: parsedExpr.GetExpr(), - info: si, - source: src, + src = common.NewInfoSource(parsedExpr.GetSourceInfo()) } + e, _ := ast.ProtoToExpr(parsedExpr.GetExpr()) + return &Ast{source: src, impl: ast.NewAST(e, info)} } // AstToParsedExpr converts an Ast to an protobuf ParsedExpr value. @@ -116,9 +98,7 @@ func AstToParsedExpr(a *Ast) (*exprpb.ParsedExpr, error) { // Note, the conversion may not be an exact replica of the original expression, but will produce // a string that is semantically equivalent and whose textual representation is stable. func AstToString(a *Ast) (string, error) { - expr := a.Expr() - info := a.SourceInfo() - return parser.Unparse(expr, info) + return parser.Unparse(a.impl.Expr(), a.impl.SourceInfo()) } // RefValueToValue converts between ref.Val and api.expr.Value. diff --git a/vendor/github.com/google/cel-go/cel/library.go b/vendor/github.com/google/cel-go/cel/library.go index 4d232085c..deddc14e5 100644 --- a/vendor/github.com/google/cel-go/cel/library.go +++ b/vendor/github.com/google/cel-go/cel/library.go @@ -20,6 +20,7 @@ import ( "strings" "time" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/stdlib" @@ -28,8 +29,6 @@ import ( "github.com/google/cel-go/common/types/traits" "github.com/google/cel-go/interpreter" "github.com/google/cel-go/parser" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) const ( @@ -313,7 +312,7 @@ func (lib *optionalLib) CompileOptions() []EnvOption { Types(types.OptionalType), // Configure the optMap and optFlatMap macros. - Macros(NewReceiverMacro(optMapMacro, 2, optMap)), + Macros(ReceiverMacro(optMapMacro, 2, optMap)), // Global and member functions for working with optional values. Function(optionalOfFunc, @@ -374,7 +373,7 @@ func (lib *optionalLib) CompileOptions() []EnvOption { Overload("optional_map_index_value", []*Type{OptionalType(mapTypeKV), paramTypeK}, optionalTypeV)), } if lib.version >= 1 { - opts = append(opts, Macros(NewReceiverMacro(optFlatMapMacro, 2, optFlatMap))) + opts = append(opts, Macros(ReceiverMacro(optFlatMapMacro, 2, optFlatMap))) } return opts } @@ -386,57 +385,57 @@ func (lib *optionalLib) ProgramOptions() []ProgramOption { } } -func optMap(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { +func optMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) { varIdent := args[0] varName := "" - switch varIdent.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - varName = varIdent.GetIdentExpr().GetName() + switch varIdent.Kind() { + case ast.IdentKind: + varName = varIdent.AsIdent() default: - return nil, meh.NewError(varIdent.GetId(), "optMap() variable name must be a simple identifier") + return nil, meh.NewError(varIdent.ID(), "optMap() variable name must be a simple identifier") } mapExpr := args[1] - return meh.GlobalCall( + return meh.NewCall( operators.Conditional, - meh.ReceiverCall(hasValueFunc, target), - meh.GlobalCall(optionalOfFunc, - meh.Fold( - unusedIterVar, + meh.NewMemberCall(hasValueFunc, target), + meh.NewCall(optionalOfFunc, + meh.NewComprehension( meh.NewList(), + unusedIterVar, varName, - meh.ReceiverCall(valueFunc, target), - meh.LiteralBool(false), - meh.Ident(varName), + meh.NewMemberCall(valueFunc, target), + meh.NewLiteral(types.False), + meh.NewIdent(varName), mapExpr, ), ), - meh.GlobalCall(optionalNoneFunc), + meh.NewCall(optionalNoneFunc), ), nil } -func optFlatMap(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { +func optFlatMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) { varIdent := args[0] varName := "" - switch varIdent.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - varName = varIdent.GetIdentExpr().GetName() + switch varIdent.Kind() { + case ast.IdentKind: + varName = varIdent.AsIdent() default: - return nil, meh.NewError(varIdent.GetId(), "optFlatMap() variable name must be a simple identifier") + return nil, meh.NewError(varIdent.ID(), "optFlatMap() variable name must be a simple identifier") } mapExpr := args[1] - return meh.GlobalCall( + return meh.NewCall( operators.Conditional, - meh.ReceiverCall(hasValueFunc, target), - meh.Fold( - unusedIterVar, + meh.NewMemberCall(hasValueFunc, target), + meh.NewComprehension( meh.NewList(), + unusedIterVar, varName, - meh.ReceiverCall(valueFunc, target), - meh.LiteralBool(false), - meh.Ident(varName), + meh.NewMemberCall(valueFunc, target), + meh.NewLiteral(types.False), + meh.NewIdent(varName), mapExpr, ), - meh.GlobalCall(optionalNoneFunc), + meh.NewCall(optionalNoneFunc), ), nil } diff --git a/vendor/github.com/google/cel-go/cel/macro.go b/vendor/github.com/google/cel-go/cel/macro.go index 1eb414c8b..4db1fd57a 100644 --- a/vendor/github.com/google/cel-go/cel/macro.go +++ b/vendor/github.com/google/cel-go/cel/macro.go @@ -15,6 +15,11 @@ package cel import ( + "fmt" + + "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" @@ -26,7 +31,14 @@ import ( // a Macro should be created per arg-count or as a var arg macro. type Macro = parser.Macro -// MacroExpander converts a call and its associated arguments into a new CEL abstract syntax tree. +// MacroFactory defines an expansion function which converts a call and its arguments to a cel.Expr value. +type MacroFactory = parser.MacroExpander + +// MacroExprFactory assists with the creation of Expr values in a manner which is consistent +// the internal semantics and id generation behaviors of the parser and checker libraries. +type MacroExprFactory = parser.ExprHelper + +// MacroExpander converts a call and its associated arguments into a protobuf Expr representation. // // If the MacroExpander determines within the implementation that an expansion is not needed it may return // a nil Expr value to indicate a non-match. However, if an expansion is to be performed, but the arguments @@ -36,48 +48,197 @@ type Macro = parser.Macro // and produces as output an Expr ast node. // // Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil. -type MacroExpander = parser.MacroExpander +type MacroExpander func(eh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) // MacroExprHelper exposes helper methods for creating new expressions within a CEL abstract syntax tree. -type MacroExprHelper = parser.ExprHelper +// ExprHelper assists with the manipulation of proto-based Expr values in a manner which is +// consistent with the source position and expression id generation code leveraged by both +// the parser and type-checker. +type MacroExprHelper interface { + // Copy the input expression with a brand new set of identifiers. + Copy(*exprpb.Expr) *exprpb.Expr + + // LiteralBool creates an Expr value for a bool literal. + LiteralBool(value bool) *exprpb.Expr + + // LiteralBytes creates an Expr value for a byte literal. + LiteralBytes(value []byte) *exprpb.Expr + + // LiteralDouble creates an Expr value for double literal. + LiteralDouble(value float64) *exprpb.Expr + + // LiteralInt creates an Expr value for an int literal. + LiteralInt(value int64) *exprpb.Expr + + // LiteralString creates am Expr value for a string literal. + LiteralString(value string) *exprpb.Expr + + // LiteralUint creates an Expr value for a uint literal. + LiteralUint(value uint64) *exprpb.Expr + + // NewList creates a CreateList instruction where the list is comprised of the optional set + // of elements provided as arguments. + NewList(elems ...*exprpb.Expr) *exprpb.Expr + + // NewMap creates a CreateStruct instruction for a map where the map is comprised of the + // optional set of key, value entries. + NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + + // NewMapEntry creates a Map Entry for the key, value pair. + NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + + // NewObject creates a CreateStruct instruction for an object with a given type name and + // optional set of field initializers. + NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + + // NewObjectFieldInit creates a new Object field initializer from the field name and value. + NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + + // Fold creates a fold comprehension instruction. + // + // - iterVar is the iteration variable name. + // - iterRange represents the expression that resolves to a list or map where the elements or + // keys (respectively) will be iterated over. + // - accuVar is the accumulation variable name, typically parser.AccumulatorName. + // - accuInit is the initial expression whose value will be set for the accuVar prior to + // folding. + // - condition is the expression to test to determine whether to continue folding. + // - step is the expression to evaluation at the conclusion of a single fold iteration. + // - result is the computation to evaluate at the conclusion of the fold. + // + // The accuVar should not shadow variable names that you would like to reference within the + // environment in the step and condition expressions. Presently, the name __result__ is commonly + // used by built-in macros but this may change in the future. + Fold(iterVar string, + iterRange *exprpb.Expr, + accuVar string, + accuInit *exprpb.Expr, + condition *exprpb.Expr, + step *exprpb.Expr, + result *exprpb.Expr) *exprpb.Expr + + // Ident creates an identifier Expr value. + Ident(name string) *exprpb.Expr + + // AccuIdent returns an accumulator identifier for use with comprehension results. + AccuIdent() *exprpb.Expr + + // GlobalCall creates a function call Expr value for a global (free) function. + GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr + + // ReceiverCall creates a function call Expr value for a receiver-style function. + ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr + + // PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. + PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr + + // Select create a field traversal Expr value. + Select(operand *exprpb.Expr, field string) *exprpb.Expr + + // OffsetLocation returns the Location of the expression identifier. + OffsetLocation(exprID int64) common.Location + + // NewError associates an error message with a given expression id. + NewError(exprID int64, message string) *Error +} + +// GlobalMacro creates a Macro for a global function with the specified arg count. +func GlobalMacro(function string, argCount int, factory MacroFactory) Macro { + return parser.NewGlobalMacro(function, argCount, factory) +} + +// ReceiverMacro creates a Macro for a receiver function matching the specified arg count. +func ReceiverMacro(function string, argCount int, factory MacroFactory) Macro { + return parser.NewReceiverMacro(function, argCount, factory) +} + +// GlobalVarArgMacro creates a Macro for a global function with a variable arg count. +func GlobalVarArgMacro(function string, factory MacroFactory) Macro { + return parser.NewGlobalVarArgMacro(function, factory) +} + +// ReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count. +func ReceiverVarArgMacro(function string, factory MacroFactory) Macro { + return parser.NewReceiverVarArgMacro(function, factory) +} // NewGlobalMacro creates a Macro for a global function with the specified arg count. +// +// Deprecated: use GlobalMacro func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro { - return parser.NewGlobalMacro(function, argCount, expander) + expand := adaptingExpander{expander} + return parser.NewGlobalMacro(function, argCount, expand.Expander) } // NewReceiverMacro creates a Macro for a receiver function matching the specified arg count. +// +// Deprecated: use ReceiverMacro func NewReceiverMacro(function string, argCount int, expander MacroExpander) Macro { - return parser.NewReceiverMacro(function, argCount, expander) + expand := adaptingExpander{expander} + return parser.NewReceiverMacro(function, argCount, expand.Expander) } // NewGlobalVarArgMacro creates a Macro for a global function with a variable arg count. +// +// Deprecated: use GlobalVarArgMacro func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro { - return parser.NewGlobalVarArgMacro(function, expander) + expand := adaptingExpander{expander} + return parser.NewGlobalVarArgMacro(function, expand.Expander) } // NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count. +// +// Deprecated: use ReceiverVarArgMacro func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro { - return parser.NewReceiverVarArgMacro(function, expander) + expand := adaptingExpander{expander} + return parser.NewReceiverVarArgMacro(function, expand.Expander) } // HasMacroExpander expands the input call arguments into a presence test, e.g. has(.field) func HasMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { - return parser.MakeHas(meh, target, args) + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + arg, err := adaptToExpr(args[0]) + if err != nil { + return nil, err + } + if arg.Kind() == ast.SelectKind { + s := arg.AsSelect() + return adaptToProto(ph.NewPresenceTest(s.Operand(), s.FieldName())) + } + return nil, ph.NewError(arg.ID(), "invalid argument to has() macro") } // ExistsMacroExpander expands the input call arguments into a comprehension that returns true if any of the // elements in the range match the predicate expressions: // .exists(, ) func ExistsMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { - return parser.MakeExists(meh, target, args) + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeExists(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // ExistsOneMacroExpander expands the input call arguments into a comprehension that returns true if exactly // one of the elements in the range match the predicate expressions: // .exists_one(, ) func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { - return parser.MakeExistsOne(meh, target, args) + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeExistsOne(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // MapMacroExpander expands the input call arguments into a comprehension that transforms each element in the @@ -91,14 +252,30 @@ func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*ex // In the second form only iterVar values which return true when provided to the predicate expression // are transformed. func MapMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { - return parser.MakeMap(meh, target, args) + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeMap(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // FilterMacroExpander expands the input call arguments into a comprehension which produces a list which contains // only elements which match the provided predicate expression: // .filter(, ) func FilterMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { - return parser.MakeFilter(meh, target, args) + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeFilter(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } var ( @@ -142,3 +319,258 @@ var ( // NoMacros provides an alias to an empty list of macros NoMacros = []Macro{} ) + +type adaptingExpander struct { + legacyExpander MacroExpander +} + +func (adapt *adaptingExpander) Expander(eh parser.ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { + var legacyTarget *exprpb.Expr = nil + var err *Error = nil + if target != nil { + legacyTarget, err = adaptToProto(target) + if err != nil { + return nil, err + } + } + legacyArgs := make([]*exprpb.Expr, len(args)) + for i, arg := range args { + legacyArgs[i], err = adaptToProto(arg) + if err != nil { + return nil, err + } + } + ah := &adaptingHelper{modernHelper: eh} + legacyExpr, err := adapt.legacyExpander(ah, legacyTarget, legacyArgs) + if err != nil { + return nil, err + } + ex, err := adaptToExpr(legacyExpr) + if err != nil { + return nil, err + } + return ex, nil +} + +func wrapErr(id int64, message string, err error) *common.Error { + return &common.Error{ + Location: common.NoLocation, + Message: fmt.Sprintf("%s: %v", message, err), + ExprID: id, + } +} + +type adaptingHelper struct { + modernHelper parser.ExprHelper +} + +// Copy the input expression with a brand new set of identifiers. +func (ah *adaptingHelper) Copy(e *exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.Copy(mustAdaptToExpr(e))) +} + +// LiteralBool creates an Expr value for a bool literal. +func (ah *adaptingHelper) LiteralBool(value bool) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bool(value))) +} + +// LiteralBytes creates an Expr value for a byte literal. +func (ah *adaptingHelper) LiteralBytes(value []byte) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bytes(value))) +} + +// LiteralDouble creates an Expr value for double literal. +func (ah *adaptingHelper) LiteralDouble(value float64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Double(value))) +} + +// LiteralInt creates an Expr value for an int literal. +func (ah *adaptingHelper) LiteralInt(value int64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Int(value))) +} + +// LiteralString creates am Expr value for a string literal. +func (ah *adaptingHelper) LiteralString(value string) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.String(value))) +} + +// LiteralUint creates an Expr value for a uint literal. +func (ah *adaptingHelper) LiteralUint(value uint64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Uint(value))) +} + +// NewList creates a CreateList instruction where the list is comprised of the optional set +// of elements provided as arguments. +func (ah *adaptingHelper) NewList(elems ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewList(mustAdaptToExprs(elems)...)) +} + +// NewMap creates a CreateStruct instruction for a map where the map is comprised of the +// optional set of key, value entries. +func (ah *adaptingHelper) NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { + adaptedEntries := make([]ast.EntryExpr, len(entries)) + for i, e := range entries { + adaptedEntries[i] = mustAdaptToEntryExpr(e) + } + return mustAdaptToProto(ah.modernHelper.NewMap(adaptedEntries...)) +} + +// NewMapEntry creates a Map Entry for the key, value pair. +func (ah *adaptingHelper) NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { + return mustAdaptToProtoEntry( + ah.modernHelper.NewMapEntry(mustAdaptToExpr(key), mustAdaptToExpr(val), optional)) +} + +// NewObject creates a CreateStruct instruction for an object with a given type name and +// optional set of field initializers. +func (ah *adaptingHelper) NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { + adaptedEntries := make([]ast.EntryExpr, len(fieldInits)) + for i, e := range fieldInits { + adaptedEntries[i] = mustAdaptToEntryExpr(e) + } + return mustAdaptToProto(ah.modernHelper.NewStruct(typeName, adaptedEntries...)) +} + +// NewObjectFieldInit creates a new Object field initializer from the field name and value. +func (ah *adaptingHelper) NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { + return mustAdaptToProtoEntry( + ah.modernHelper.NewStructField(field, mustAdaptToExpr(init), optional)) +} + +// Fold creates a fold comprehension instruction. +// +// - iterVar is the iteration variable name. +// - iterRange represents the expression that resolves to a list or map where the elements or +// keys (respectively) will be iterated over. +// - accuVar is the accumulation variable name, typically parser.AccumulatorName. +// - accuInit is the initial expression whose value will be set for the accuVar prior to +// folding. +// - condition is the expression to test to determine whether to continue folding. +// - step is the expression to evaluation at the conclusion of a single fold iteration. +// - result is the computation to evaluate at the conclusion of the fold. +// +// The accuVar should not shadow variable names that you would like to reference within the +// environment in the step and condition expressions. Presently, the name __result__ is commonly +// used by built-in macros but this may change in the future. +func (ah *adaptingHelper) Fold(iterVar string, + iterRange *exprpb.Expr, + accuVar string, + accuInit *exprpb.Expr, + condition *exprpb.Expr, + step *exprpb.Expr, + result *exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto( + ah.modernHelper.NewComprehension( + mustAdaptToExpr(iterRange), + iterVar, + accuVar, + mustAdaptToExpr(accuInit), + mustAdaptToExpr(condition), + mustAdaptToExpr(step), + mustAdaptToExpr(result), + ), + ) +} + +// Ident creates an identifier Expr value. +func (ah *adaptingHelper) Ident(name string) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewIdent(name)) +} + +// AccuIdent returns an accumulator identifier for use with comprehension results. +func (ah *adaptingHelper) AccuIdent() *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewAccuIdent()) +} + +// GlobalCall creates a function call Expr value for a global (free) function. +func (ah *adaptingHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewCall(function, mustAdaptToExprs(args)...)) +} + +// ReceiverCall creates a function call Expr value for a receiver-style function. +func (ah *adaptingHelper) ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto( + ah.modernHelper.NewMemberCall(function, mustAdaptToExpr(target), mustAdaptToExprs(args)...)) +} + +// PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. +func (ah *adaptingHelper) PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr { + op := mustAdaptToExpr(operand) + return mustAdaptToProto(ah.modernHelper.NewPresenceTest(op, field)) +} + +// Select create a field traversal Expr value. +func (ah *adaptingHelper) Select(operand *exprpb.Expr, field string) *exprpb.Expr { + op := mustAdaptToExpr(operand) + return mustAdaptToProto(ah.modernHelper.NewSelect(op, field)) +} + +// OffsetLocation returns the Location of the expression identifier. +func (ah *adaptingHelper) OffsetLocation(exprID int64) common.Location { + return ah.modernHelper.OffsetLocation(exprID) +} + +// NewError associates an error message with a given expression id. +func (ah *adaptingHelper) NewError(exprID int64, message string) *Error { + return ah.modernHelper.NewError(exprID, message) +} + +func mustAdaptToExprs(exprs []*exprpb.Expr) []ast.Expr { + adapted := make([]ast.Expr, len(exprs)) + for i, e := range exprs { + adapted[i] = mustAdaptToExpr(e) + } + return adapted +} + +func mustAdaptToExpr(e *exprpb.Expr) ast.Expr { + out, _ := adaptToExpr(e) + return out +} + +func adaptToExpr(e *exprpb.Expr) (ast.Expr, *Error) { + if e == nil { + return nil, nil + } + out, err := ast.ProtoToExpr(e) + if err != nil { + return nil, wrapErr(e.GetId(), "proto conversion failure", err) + } + return out, nil +} + +func mustAdaptToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) ast.EntryExpr { + out, _ := ast.ProtoToEntryExpr(e) + return out +} + +func mustAdaptToProto(e ast.Expr) *exprpb.Expr { + out, _ := adaptToProto(e) + return out +} + +func adaptToProto(e ast.Expr) (*exprpb.Expr, *Error) { + if e == nil { + return nil, nil + } + out, err := ast.ExprToProto(e) + if err != nil { + return nil, wrapErr(e.ID(), "expr conversion failure", err) + } + return out, nil +} + +func mustAdaptToProtoEntry(e ast.EntryExpr) *exprpb.Expr_CreateStruct_Entry { + out, _ := ast.EntryExprToProto(e) + return out +} + +func toParserHelper(meh MacroExprHelper) (parser.ExprHelper, *Error) { + ah, ok := meh.(*adaptingHelper) + if !ok { + return nil, common.NewError(0, + fmt.Sprintf("unsupported macro helper: %v (%T)", meh, meh), + common.NoLocation) + } + return ah.modernHelper, nil +} diff --git a/vendor/github.com/google/cel-go/cel/optimizer.go b/vendor/github.com/google/cel-go/cel/optimizer.go new file mode 100644 index 000000000..9422a7eb6 --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/optimizer.go @@ -0,0 +1,390 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" +) + +// StaticOptimizer contains a sequence of ASTOptimizer instances which will be applied in order. +// +// The static optimizer normalizes expression ids and type-checking run between optimization +// passes to ensure that the final optimized output is a valid expression with metadata consistent +// with what would have been generated from a parsed and checked expression. +// +// Note: source position information is best-effort and likely wrong, but optimized expressions +// should be suitable for calls to parser.Unparse. +type StaticOptimizer struct { + optimizers []ASTOptimizer +} + +// NewStaticOptimizer creates a StaticOptimizer with a sequence of ASTOptimizer's to be applied +// to a checked expression. +func NewStaticOptimizer(optimizers ...ASTOptimizer) *StaticOptimizer { + return &StaticOptimizer{ + optimizers: optimizers, + } +} + +// Optimize applies a sequence of optimizations to an Ast within a given environment. +// +// If issues are encountered, the Issues.Err() return value will be non-nil. +func (opt *StaticOptimizer) Optimize(env *Env, a *Ast) (*Ast, *Issues) { + // Make a copy of the AST to be optimized. + optimized := ast.Copy(a.impl) + + // Create the optimizer context, could be pooled in the future. + issues := NewIssues(common.NewErrors(a.Source())) + ids := newMonotonicIDGen(ast.MaxID(a.impl)) + fac := &optimizerExprFactory{ + nextID: ids.nextID, + renumberID: ids.renumberID, + fac: ast.NewExprFactory(), + sourceInfo: optimized.SourceInfo(), + } + ctx := &OptimizerContext{ + optimizerExprFactory: fac, + Env: env, + Issues: issues, + } + + // Apply the optimizations sequentially. + for _, o := range opt.optimizers { + optimized = o.Optimize(ctx, optimized) + if issues.Err() != nil { + return nil, issues + } + // Normalize expression id metadata including coordination with macro call metadata. + normalizeIDs(env, optimized) + + // Recheck the updated expression for any possible type-agreement or validation errors. + parsed := &Ast{ + source: a.Source(), + impl: ast.NewAST(optimized.Expr(), optimized.SourceInfo())} + checked, iss := ctx.Check(parsed) + if iss.Err() != nil { + return nil, iss + } + optimized = checked.impl + } + + // Return the optimized result. + return &Ast{ + source: a.Source(), + impl: optimized, + }, nil +} + +// normalizeIDs ensures that the metadata present with an AST is reset in a manner such +// that the ids within the expression correspond to the ids within macros. +func normalizeIDs(e *Env, optimized *ast.AST) { + ids := newStableIDGen() + optimized.Expr().RenumberIDs(ids.renumberID) + allExprMap := make(map[int64]ast.Expr) + ast.PostOrderVisit(optimized.Expr(), ast.NewExprVisitor(func(e ast.Expr) { + allExprMap[e.ID()] = e + })) + info := optimized.SourceInfo() + + // First, update the macro call ids themselves. + for id, call := range info.MacroCalls() { + info.ClearMacroCall(id) + callID := ids.renumberID(id) + if e, found := allExprMap[callID]; found && e.Kind() == ast.LiteralKind { + continue + } + info.SetMacroCall(callID, call) + } + + // Second, update the macro call id references to ensure that macro pointers are' + // updated consistently across macros. + for id, call := range info.MacroCalls() { + call.RenumberIDs(ids.renumberID) + resetMacroCall(optimized, call, allExprMap) + info.SetMacroCall(id, call) + } +} + +func resetMacroCall(optimized *ast.AST, call ast.Expr, allExprMap map[int64]ast.Expr) { + modified := []ast.Expr{} + ast.PostOrderVisit(call, ast.NewExprVisitor(func(e ast.Expr) { + if _, found := allExprMap[e.ID()]; found { + modified = append(modified, e) + } + })) + for _, m := range modified { + updated := allExprMap[m.ID()] + m.SetKindCase(updated) + } +} + +// newMonotonicIDGen increments numbers from an initial seed value. +func newMonotonicIDGen(seed int64) *monotonicIDGenerator { + return &monotonicIDGenerator{seed: seed} +} + +type monotonicIDGenerator struct { + seed int64 +} + +func (gen *monotonicIDGenerator) nextID() int64 { + gen.seed++ + return gen.seed +} + +func (gen *monotonicIDGenerator) renumberID(int64) int64 { + return gen.nextID() +} + +// newStableIDGen ensures that new ids are only created the first time they are encountered. +func newStableIDGen() *stableIDGenerator { + return &stableIDGenerator{ + idMap: make(map[int64]int64), + } +} + +type stableIDGenerator struct { + idMap map[int64]int64 + nextID int64 +} + +func (gen *stableIDGenerator) renumberID(id int64) int64 { + if id == 0 { + return 0 + } + if newID, found := gen.idMap[id]; found { + return newID + } + gen.nextID++ + gen.idMap[id] = gen.nextID + return gen.nextID +} + +// OptimizerContext embeds Env and Issues instances to make it easy to type-check and evaluate +// subexpressions and report any errors encountered along the way. The context also embeds the +// optimizerExprFactory which can be used to generate new sub-expressions with expression ids +// consistent with the expectations of a parsed expression. +type OptimizerContext struct { + *Env + *optimizerExprFactory + *Issues +} + +// ASTOptimizer applies an optimization over an AST and returns the optimized result. +type ASTOptimizer interface { + // Optimize optimizes a type-checked AST within an Environment and accumulates any issues. + Optimize(*OptimizerContext, *ast.AST) *ast.AST +} + +type optimizerExprFactory struct { + nextID func() int64 + renumberID ast.IDGenerator + fac ast.ExprFactory + sourceInfo *ast.SourceInfo +} + +// CopyExpr copies the structure of the input ast.Expr and renumbers the identifiers in a manner +// consistent with the CEL parser / checker. +func (opt *optimizerExprFactory) CopyExpr(e ast.Expr) ast.Expr { + copy := opt.fac.CopyExpr(e) + copy.RenumberIDs(opt.renumberID) + return copy +} + +// NewBindMacro creates a cel.bind() call with a variable name, initialization expression, and remaining expression. +// +// Note: the macroID indicates the insertion point, the call id that matched the macro signature, which will be used +// for coordinating macro metadata with the bind call. This piece of data is what makes it possible to unparse +// optimized expressions which use the bind() call. +// +// Example: +// +// cel.bind(myVar, a && b || c, !myVar || (myVar && d)) +// - varName: myVar +// - varInit: a && b || c +// - remaining: !myVar || (myVar && d) +func (opt *optimizerExprFactory) NewBindMacro(macroID int64, varName string, varInit, remaining ast.Expr) ast.Expr { + bindID := opt.nextID() + varID := opt.nextID() + + varInit = opt.CopyExpr(varInit) + varInit.RenumberIDs(opt.renumberID) + + remaining = opt.fac.CopyExpr(remaining) + remaining.RenumberIDs(opt.renumberID) + + // Place the expanded macro form in the macro calls list so that the inlined + // call can be unparsed. + opt.sourceInfo.SetMacroCall(macroID, + opt.fac.NewMemberCall(0, "bind", + opt.fac.NewIdent(opt.nextID(), "cel"), + opt.fac.NewIdent(varID, varName), + varInit, + remaining)) + + // Replace the parent node with the intercepted inlining using cel.bind()-like + // generated comprehension AST. + return opt.fac.NewComprehension(bindID, + opt.fac.NewList(opt.nextID(), []ast.Expr{}, []int32{}), + "#unused", + varName, + opt.fac.CopyExpr(varInit), + opt.fac.NewLiteral(opt.nextID(), types.False), + opt.fac.NewIdent(varID, varName), + opt.fac.CopyExpr(remaining)) +} + +// NewCall creates a global function call invocation expression. +// +// Example: +// +// countByField(list, fieldName) +// - function: countByField +// - args: [list, fieldName] +func (opt *optimizerExprFactory) NewCall(function string, args ...ast.Expr) ast.Expr { + return opt.fac.NewCall(opt.nextID(), function, args...) +} + +// NewMemberCall creates a member function call invocation expression where 'target' is the receiver of the call. +// +// Example: +// +// list.countByField(fieldName) +// - function: countByField +// - target: list +// - args: [fieldName] +func (opt *optimizerExprFactory) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return opt.fac.NewMemberCall(opt.nextID(), function, target, args...) +} + +// NewIdent creates a new identifier expression. +// +// Examples: +// +// - simple_var_name +// - qualified.subpackage.var_name +func (opt *optimizerExprFactory) NewIdent(name string) ast.Expr { + return opt.fac.NewIdent(opt.nextID(), name) +} + +// NewLiteral creates a new literal expression value. +// +// The range of valid values for a literal generated during optimization is different than for expressions +// generated via parsing / type-checking, as the ref.Val may be _any_ CEL value so long as the value can +// be converted back to a literal-like form. +func (opt *optimizerExprFactory) NewLiteral(value ref.Val) ast.Expr { + return opt.fac.NewLiteral(opt.nextID(), value) +} + +// NewList creates a list expression with a set of optional indices. +// +// Examples: +// +// [a, b] +// - elems: [a, b] +// - optIndices: [] +// +// [a, ?b, ?c] +// - elems: [a, b, c] +// - optIndices: [1, 2] +func (opt *optimizerExprFactory) NewList(elems []ast.Expr, optIndices []int32) ast.Expr { + return opt.fac.NewList(opt.nextID(), elems, optIndices) +} + +// NewMap creates a map from a set of entry expressions which contain a key and value expression. +func (opt *optimizerExprFactory) NewMap(entries []ast.EntryExpr) ast.Expr { + return opt.fac.NewMap(opt.nextID(), entries) +} + +// NewMapEntry creates a map entry with a key and value expression and a flag to indicate whether the +// entry is optional. +// +// Examples: +// +// {a: b} +// - key: a +// - value: b +// - optional: false +// +// {?a: ?b} +// - key: a +// - value: b +// - optional: true +func (opt *optimizerExprFactory) NewMapEntry(key, value ast.Expr, isOptional bool) ast.EntryExpr { + return opt.fac.NewMapEntry(opt.nextID(), key, value, isOptional) +} + +// NewPresenceTest creates a new presence test macro call. +// +// Example: +// +// has(msg.field_name) +// - operand: msg +// - field: field_name +func (opt *optimizerExprFactory) NewPresenceTest(macroID int64, operand ast.Expr, field string) ast.Expr { + // Copy the input operand and renumber it. + operand = opt.CopyExpr(operand) + operand.RenumberIDs(opt.renumberID) + + // Place the expanded macro form in the macro calls list so that the inlined call can be unparsed. + opt.sourceInfo.SetMacroCall(macroID, + opt.fac.NewCall(0, "has", + opt.fac.NewSelect(opt.nextID(), operand, field))) + + // Generate a new presence test macro. + return opt.fac.NewPresenceTest(opt.nextID(), opt.CopyExpr(operand), field) +} + +// NewSelect creates a select expression where a field value is selected from an operand. +// +// Example: +// +// msg.field_name +// - operand: msg +// - field: field_name +func (opt *optimizerExprFactory) NewSelect(operand ast.Expr, field string) ast.Expr { + return opt.fac.NewSelect(opt.nextID(), operand, field) +} + +// NewStruct creates a new typed struct value with an set of field initializations. +// +// Example: +// +// pkg.TypeName{field: value} +// - typeName: pkg.TypeName +// - fields: [{field: value}] +func (opt *optimizerExprFactory) NewStruct(typeName string, fields []ast.EntryExpr) ast.Expr { + return opt.fac.NewStruct(opt.nextID(), typeName, fields) +} + +// NewStructField creates a struct field initialization. +// +// Examples: +// +// {count: 3u} +// - field: count +// - value: 3u +// - optional: false +// +// {?count: x} +// - field: count +// - value: x +// - optional: true +func (opt *optimizerExprFactory) NewStructField(field string, value ast.Expr, isOptional bool) ast.EntryExpr { + return opt.fac.NewStructField(opt.nextID(), field, value, isOptional) +} diff --git a/vendor/github.com/google/cel-go/cel/options.go b/vendor/github.com/google/cel-go/cel/options.go index d47f55d8e..74e3b0bac 100644 --- a/vendor/github.com/google/cel-go/cel/options.go +++ b/vendor/github.com/google/cel-go/cel/options.go @@ -447,6 +447,8 @@ const ( OptTrackCost EvalOption = 1 << iota // OptCheckStringFormat enables compile-time checking of string.format calls for syntax/cardinality. + // + // Deprecated: use ext.StringsValidateFormatCalls() as this option is now a no-op. OptCheckStringFormat EvalOption = 1 << iota ) diff --git a/vendor/github.com/google/cel-go/cel/program.go b/vendor/github.com/google/cel-go/cel/program.go index 11c5c447e..cec4839df 100644 --- a/vendor/github.com/google/cel-go/cel/program.go +++ b/vendor/github.com/google/cel-go/cel/program.go @@ -19,7 +19,6 @@ import ( "fmt" "sync" - celast "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/interpreter" @@ -148,7 +147,7 @@ func (p *prog) clone() *prog { // ProgramOption values. // // If the program cannot be configured the prog will be nil, with a non-nil error response. -func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { +func newProgram(e *Env, a *Ast, opts []ProgramOption) (Program, error) { // Build the dispatcher, interpreter, and default program value. disp := interpreter.NewDispatcher() @@ -208,34 +207,6 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { if len(p.regexOptimizations) > 0 { decorators = append(decorators, interpreter.CompileRegexConstants(p.regexOptimizations...)) } - // Enable compile-time checking of syntax/cardinality for string.format calls. - if p.evalOpts&OptCheckStringFormat == OptCheckStringFormat { - var isValidType func(id int64, validTypes ...ref.Type) (bool, error) - if ast.IsChecked() { - isValidType = func(id int64, validTypes ...ref.Type) (bool, error) { - t := ast.typeMap[id] - if t.Kind() == DynKind { - return true, nil - } - for _, vt := range validTypes { - k, err := typeValueToKind(vt) - if err != nil { - return false, err - } - if t.Kind() == k { - return true, nil - } - } - return false, nil - } - } else { - // if the AST isn't type-checked, short-circuit validation - isValidType = func(id int64, validTypes ...ref.Type) (bool, error) { - return true, nil - } - } - decorators = append(decorators, interpreter.InterpolateFormattedString(isValidType)) - } // Enable exhaustive eval, state tracking and cost tracking last since they require a factory. if p.evalOpts&(OptExhaustiveEval|OptTrackState|OptTrackCost) != 0 { @@ -263,33 +234,16 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { decs = append(decs, interpreter.Observe(observers...)) } - return p.clone().initInterpretable(ast, decs) + return p.clone().initInterpretable(a, decs) } return newProgGen(factory) } - return p.initInterpretable(ast, decorators) + return p.initInterpretable(a, decorators) } -func (p *prog) initInterpretable(ast *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) { - // Unchecked programs do not contain type and reference information and may be slower to execute. - if !ast.IsChecked() { - interpretable, err := - p.interpreter.NewUncheckedInterpretable(ast.Expr(), decs...) - if err != nil { - return nil, err - } - p.interpretable = interpretable - return p, nil - } - - // When the AST has been checked it contains metadata that can be used to speed up program execution. - checked := &celast.CheckedAST{ - Expr: ast.Expr(), - SourceInfo: ast.SourceInfo(), - TypeMap: ast.typeMap, - ReferenceMap: ast.refMap, - } - interpretable, err := p.interpreter.NewInterpretable(checked, decs...) +func (p *prog) initInterpretable(a *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) { + // When the AST has been exprAST it contains metadata that can be used to speed up program execution. + interpretable, err := p.interpreter.NewInterpretable(a.impl, decs...) if err != nil { return nil, err } @@ -559,8 +513,6 @@ func (p *evalActivationPool) Put(value any) { } var ( - emptyEvalState = interpreter.NewEvalState() - // activationPool is an internally managed pool of Activation values that wrap map[string]any inputs activationPool = newEvalActivationPool() diff --git a/vendor/github.com/google/cel-go/cel/validator.go b/vendor/github.com/google/cel-go/cel/validator.go index 78b311381..b50c67452 100644 --- a/vendor/github.com/google/cel-go/cel/validator.go +++ b/vendor/github.com/google/cel-go/cel/validator.go @@ -21,8 +21,6 @@ import ( "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/overloads" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) const ( @@ -69,7 +67,7 @@ type ASTValidator interface { // // See individual validators for more information on their configuration keys and configuration // properties. - Validate(*Env, ValidatorConfig, *ast.CheckedAST, *Issues) + Validate(*Env, ValidatorConfig, *ast.AST, *Issues) } // ValidatorConfig provides an accessor method for querying validator configuration state. @@ -180,7 +178,7 @@ func ValidateComprehensionNestingLimit(limit int) ASTValidator { return nestingLimitValidator{limit: limit} } -type argChecker func(env *Env, call, arg ast.NavigableExpr) error +type argChecker func(env *Env, call, arg ast.Expr) error func newFormatValidator(funcName string, argNum int, check argChecker) formatValidator { return formatValidator{ @@ -203,8 +201,8 @@ func (v formatValidator) Name() string { // Validate searches the AST for uses of a given function name with a constant argument and performs a check // on whether the argument is a valid literal value. -func (v formatValidator) Validate(e *Env, _ ValidatorConfig, a *ast.CheckedAST, iss *Issues) { - root := ast.NavigateCheckedAST(a) +func (v formatValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) { + root := ast.NavigateAST(a) funcCalls := ast.MatchDescendants(root, ast.FunctionMatcher(v.funcName)) for _, call := range funcCalls { callArgs := call.AsCall().Args() @@ -221,8 +219,8 @@ func (v formatValidator) Validate(e *Env, _ ValidatorConfig, a *ast.CheckedAST, } } -func evalCall(env *Env, call, arg ast.NavigableExpr) error { - ast := ParsedExprToAst(&exprpb.ParsedExpr{Expr: call.ToExpr()}) +func evalCall(env *Env, call, arg ast.Expr) error { + ast := &Ast{impl: ast.NewAST(call, ast.NewSourceInfo(nil))} prg, err := env.Program(ast) if err != nil { return err @@ -231,7 +229,7 @@ func evalCall(env *Env, call, arg ast.NavigableExpr) error { return err } -func compileRegex(_ *Env, _, arg ast.NavigableExpr) error { +func compileRegex(_ *Env, _, arg ast.Expr) error { pattern := arg.AsLiteral().Value().(string) _, err := regexp.Compile(pattern) return err @@ -244,25 +242,14 @@ func (homogeneousAggregateLiteralValidator) Name() string { return homogeneousValidatorName } -// Configure implements the ASTValidatorConfigurer interface and currently sets the list of standard -// and exempt functions from homogeneous aggregate literal checks. -// -// TODO: Move this call into the string.format() ASTValidator once ported. -func (homogeneousAggregateLiteralValidator) Configure(c MutableValidatorConfig) error { - emptyList := []string{} - exemptFunctions := c.GetOrDefault(HomogeneousAggregateLiteralExemptFunctions, emptyList).([]string) - exemptFunctions = append(exemptFunctions, "format") - return c.Set(HomogeneousAggregateLiteralExemptFunctions, exemptFunctions) -} - // Validate validates that all lists and map literals have homogeneous types, i.e. don't contain dyn types. // // This validator makes an exception for list and map literals which occur at any level of nesting within // string format calls. -func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig, a *ast.CheckedAST, iss *Issues) { +func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig, a *ast.AST, iss *Issues) { var exemptedFunctions []string exemptedFunctions = c.GetOrDefault(HomogeneousAggregateLiteralExemptFunctions, exemptedFunctions).([]string) - root := ast.NavigateCheckedAST(a) + root := ast.NavigateAST(a) listExprs := ast.MatchDescendants(root, ast.KindMatcher(ast.ListKind)) for _, listExpr := range listExprs { if inExemptFunction(listExpr, exemptedFunctions) { @@ -273,7 +260,7 @@ func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig optIndices := l.OptionalIndices() var elemType *Type for i, e := range elements { - et := e.Type() + et := a.GetType(e.ID()) if isOptionalIndex(i, optIndices) { et = et.Parameters()[0] } @@ -296,9 +283,10 @@ func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig entries := m.Entries() var keyType, valType *Type for _, e := range entries { - key, val := e.Key(), e.Value() - kt, vt := key.Type(), val.Type() - if e.IsOptional() { + mapEntry := e.AsMapEntry() + key, val := mapEntry.Key(), mapEntry.Value() + kt, vt := a.GetType(key.ID()), a.GetType(val.ID()) + if mapEntry.IsOptional() { vt = vt.Parameters()[0] } if keyType == nil && valType == nil { @@ -316,7 +304,8 @@ func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig } func inExemptFunction(e ast.NavigableExpr, exemptFunctions []string) bool { - if parent, found := e.Parent(); found { + parent, found := e.Parent() + for found { if parent.Kind() == ast.CallKind { fnName := parent.AsCall().FunctionName() for _, exempt := range exemptFunctions { @@ -325,9 +314,7 @@ func inExemptFunction(e ast.NavigableExpr, exemptFunctions []string) bool { } } } - if parent.Kind() == ast.ListKind || parent.Kind() == ast.MapKind { - return inExemptFunction(parent, exemptFunctions) - } + parent, found = parent.Parent() } return false } @@ -353,8 +340,8 @@ func (v nestingLimitValidator) Name() string { return "cel.lib.std.validate.comprehension_nesting_limit" } -func (v nestingLimitValidator) Validate(e *Env, _ ValidatorConfig, a *ast.CheckedAST, iss *Issues) { - root := ast.NavigateCheckedAST(a) +func (v nestingLimitValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) { + root := ast.NavigateAST(a) comprehensions := ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind)) if len(comprehensions) <= v.limit { return diff --git a/vendor/github.com/google/cel-go/checker/checker.go b/vendor/github.com/google/cel-go/checker/checker.go index 720e4fa96..57fb3ce5e 100644 --- a/vendor/github.com/google/cel-go/checker/checker.go +++ b/vendor/github.com/google/cel-go/checker/checker.go @@ -18,6 +18,7 @@ package checker import ( "fmt" + "reflect" "github.com/google/cel-go/common" "github.com/google/cel-go/common/ast" @@ -25,139 +26,98 @@ import ( "github.com/google/cel-go/common/decls" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/types" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types/ref" ) type checker struct { + *ast.AST + ast.ExprFactory env *Env errors *typeErrors mappings *mapping freeTypeVarCounter int - sourceInfo *exprpb.SourceInfo - types map[int64]*types.Type - references map[int64]*ast.ReferenceInfo } // Check performs type checking, giving a typed AST. -// The input is a ParsedExpr proto and an env which encapsulates -// type binding of variables, declarations of built-in functions, -// descriptions of protocol buffers, and a registry for errors. -// Returns a CheckedExpr proto, which might not be usable if -// there are errors in the error registry. -func Check(parsedExpr *exprpb.ParsedExpr, source common.Source, env *Env) (*ast.CheckedAST, *common.Errors) { +// +// The input is a parsed AST and an env which encapsulates type binding of variables, +// declarations of built-in functions, descriptions of protocol buffers, and a registry for +// errors. +// +// Returns a type-checked AST, which might not be usable if there are errors in the error +// registry. +func Check(parsed *ast.AST, source common.Source, env *Env) (*ast.AST, *common.Errors) { errs := common.NewErrors(source) + typeMap := make(map[int64]*types.Type) + refMap := make(map[int64]*ast.ReferenceInfo) c := checker{ + AST: ast.NewCheckedAST(parsed, typeMap, refMap), + ExprFactory: ast.NewExprFactory(), env: env, errors: &typeErrors{errs: errs}, mappings: newMapping(), freeTypeVarCounter: 0, - sourceInfo: parsedExpr.GetSourceInfo(), - types: make(map[int64]*types.Type), - references: make(map[int64]*ast.ReferenceInfo), } - c.check(parsedExpr.GetExpr()) + c.check(c.Expr()) - // Walk over the final type map substituting any type parameters either by their bound value or - // by DYN. - m := make(map[int64]*types.Type) - for id, t := range c.types { - m[id] = substitute(c.mappings, t, true) + // Walk over the final type map substituting any type parameters either by their bound value + // or by DYN. + for id, t := range c.TypeMap() { + c.SetType(id, substitute(c.mappings, t, true)) } - - return &ast.CheckedAST{ - Expr: parsedExpr.GetExpr(), - SourceInfo: parsedExpr.GetSourceInfo(), - TypeMap: m, - ReferenceMap: c.references, - }, errs + return c.AST, errs } -func (c *checker) check(e *exprpb.Expr) { +func (c *checker) check(e ast.Expr) { if e == nil { return } - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - literal := e.GetConstExpr() - switch literal.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - c.checkBoolLiteral(e) - case *exprpb.Constant_BytesValue: - c.checkBytesLiteral(e) - case *exprpb.Constant_DoubleValue: - c.checkDoubleLiteral(e) - case *exprpb.Constant_Int64Value: - c.checkInt64Literal(e) - case *exprpb.Constant_NullValue: - c.checkNullLiteral(e) - case *exprpb.Constant_StringValue: - c.checkStringLiteral(e) - case *exprpb.Constant_Uint64Value: - c.checkUint64Literal(e) + switch e.Kind() { + case ast.LiteralKind: + literal := ref.Val(e.AsLiteral()) + switch literal.Type() { + case types.BoolType, types.BytesType, types.DoubleType, types.IntType, + types.NullType, types.StringType, types.UintType: + c.setType(e, literal.Type().(*types.Type)) + default: + c.errors.unexpectedASTType(e.ID(), c.location(e), "literal", literal.Type().TypeName()) } - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: c.checkIdent(e) - case *exprpb.Expr_SelectExpr: + case ast.SelectKind: c.checkSelect(e) - case *exprpb.Expr_CallExpr: + case ast.CallKind: c.checkCall(e) - case *exprpb.Expr_ListExpr: + case ast.ListKind: c.checkCreateList(e) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + c.checkCreateMap(e) + case ast.StructKind: c.checkCreateStruct(e) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: c.checkComprehension(e) default: - c.errors.unexpectedASTType(e.GetId(), c.location(e), e) + c.errors.unexpectedASTType(e.ID(), c.location(e), "unspecified", reflect.TypeOf(e).Name()) } } -func (c *checker) checkInt64Literal(e *exprpb.Expr) { - c.setType(e, types.IntType) -} - -func (c *checker) checkUint64Literal(e *exprpb.Expr) { - c.setType(e, types.UintType) -} - -func (c *checker) checkStringLiteral(e *exprpb.Expr) { - c.setType(e, types.StringType) -} - -func (c *checker) checkBytesLiteral(e *exprpb.Expr) { - c.setType(e, types.BytesType) -} - -func (c *checker) checkDoubleLiteral(e *exprpb.Expr) { - c.setType(e, types.DoubleType) -} - -func (c *checker) checkBoolLiteral(e *exprpb.Expr) { - c.setType(e, types.BoolType) -} - -func (c *checker) checkNullLiteral(e *exprpb.Expr) { - c.setType(e, types.NullType) -} - -func (c *checker) checkIdent(e *exprpb.Expr) { - identExpr := e.GetIdentExpr() +func (c *checker) checkIdent(e ast.Expr) { + identName := e.AsIdent() // Check to see if the identifier is declared. - if ident := c.env.LookupIdent(identExpr.GetName()); ident != nil { + if ident := c.env.LookupIdent(identName); ident != nil { c.setType(e, ident.Type()) c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value())) // Overwrite the identifier with its fully qualified name. - identExpr.Name = ident.Name() + e.SetKindCase(c.NewIdent(e.ID(), ident.Name())) return } c.setType(e, types.ErrorType) - c.errors.undeclaredReference(e.GetId(), c.location(e), c.env.container.Name(), identExpr.GetName()) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), identName) } -func (c *checker) checkSelect(e *exprpb.Expr) { - sel := e.GetSelectExpr() +func (c *checker) checkSelect(e ast.Expr) { + sel := e.AsSelect() // Before traversing down the tree, try to interpret as qualified name. qname, found := containers.ToQualifiedName(e) if found { @@ -170,31 +130,26 @@ func (c *checker) checkSelect(e *exprpb.Expr) { // variable name. c.setType(e, ident.Type()) c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value())) - identName := ident.Name() - e.ExprKind = &exprpb.Expr_IdentExpr{ - IdentExpr: &exprpb.Expr_Ident{ - Name: identName, - }, - } + e.SetKindCase(c.NewIdent(e.ID(), ident.Name())) return } } - resultType := c.checkSelectField(e, sel.GetOperand(), sel.GetField(), false) - if sel.TestOnly { + resultType := c.checkSelectField(e, sel.Operand(), sel.FieldName(), false) + if sel.IsTestOnly() { resultType = types.BoolType } c.setType(e, substitute(c.mappings, resultType, false)) } -func (c *checker) checkOptSelect(e *exprpb.Expr) { +func (c *checker) checkOptSelect(e ast.Expr) { // Collect metadata related to the opt select call packaged by the parser. - call := e.GetCallExpr() - operand := call.GetArgs()[0] - field := call.GetArgs()[1] + call := e.AsCall() + operand := call.Args()[0] + field := call.Args()[1] fieldName, isString := maybeUnwrapString(field) if !isString { - c.errors.notAnOptionalFieldSelection(field.GetId(), c.location(field), field) + c.errors.notAnOptionalFieldSelection(field.ID(), c.location(field), field) return } @@ -204,7 +159,7 @@ func (c *checker) checkOptSelect(e *exprpb.Expr) { c.setReference(e, ast.NewFunctionReference("select_optional_field")) } -func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, optional bool) *types.Type { +func (c *checker) checkSelectField(e, operand ast.Expr, field string, optional bool) *types.Type { // Interpret as field selection, first traversing down the operand. c.check(operand) operandType := substitute(c.mappings, c.getType(operand), false) @@ -222,7 +177,7 @@ func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, option // Objects yield their field type declaration as the selection result type, but only if // the field is defined. messageType := targetType - if fieldType, found := c.lookupFieldType(e.GetId(), messageType.TypeName(), field); found { + if fieldType, found := c.lookupFieldType(e.ID(), messageType.TypeName(), field); found { resultType = fieldType } case types.TypeParamKind: @@ -236,7 +191,7 @@ func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, option // Dynamic / error values are treated as DYN type. Errors are handled this way as well // in order to allow forward progress on the check. if !isDynOrError(targetType) { - c.errors.typeDoesNotSupportFieldSelection(e.GetId(), c.location(e), targetType) + c.errors.typeDoesNotSupportFieldSelection(e.ID(), c.location(e), targetType) } resultType = types.DynType } @@ -248,35 +203,34 @@ func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, option return resultType } -func (c *checker) checkCall(e *exprpb.Expr) { +func (c *checker) checkCall(e ast.Expr) { // Note: similar logic exists within the `interpreter/planner.go`. If making changes here // please consider the impact on planner.go and consolidate implementations or mirror code // as appropriate. - call := e.GetCallExpr() - fnName := call.GetFunction() + call := e.AsCall() + fnName := call.FunctionName() if fnName == operators.OptSelect { c.checkOptSelect(e) return } - args := call.GetArgs() + args := call.Args() // Traverse arguments. for _, arg := range args { c.check(arg) } - target := call.GetTarget() // Regular static call with simple name. - if target == nil { + if !call.IsMemberFunction() { // Check for the existence of the function. fn := c.env.LookupFunction(fnName) if fn == nil { - c.errors.undeclaredReference(e.GetId(), c.location(e), c.env.container.Name(), fnName) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName) c.setType(e, types.ErrorType) return } // Overwrite the function name with its fully qualified resolved name. - call.Function = fn.Name() + e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...)) // Check to see whether the overload resolves. c.resolveOverloadOrError(e, fn, nil, args) return @@ -287,6 +241,7 @@ func (c *checker) checkCall(e *exprpb.Expr) { // target a.b. // // Check whether the target is a namespaced function name. + target := call.Target() qualifiedPrefix, maybeQualified := containers.ToQualifiedName(target) if maybeQualified { maybeQualifiedName := qualifiedPrefix + "." + fnName @@ -295,15 +250,14 @@ func (c *checker) checkCall(e *exprpb.Expr) { // The function name is namespaced and so preserving the target operand would // be an inaccurate representation of the desired evaluation behavior. // Overwrite with fully-qualified resolved function name sans receiver target. - call.Target = nil - call.Function = fn.Name() + e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...)) c.resolveOverloadOrError(e, fn, nil, args) return } } // Regular instance call. - c.check(call.Target) + c.check(target) fn := c.env.LookupFunction(fnName) // Function found, attempt overload resolution. if fn != nil { @@ -312,11 +266,11 @@ func (c *checker) checkCall(e *exprpb.Expr) { } // Function name not declared, record error. c.setType(e, types.ErrorType) - c.errors.undeclaredReference(e.GetId(), c.location(e), c.env.container.Name(), fnName) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName) } func (c *checker) resolveOverloadOrError( - e *exprpb.Expr, fn *decls.FunctionDecl, target *exprpb.Expr, args []*exprpb.Expr) { + e ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) { // Attempt to resolve the overload. resolution := c.resolveOverload(e, fn, target, args) // No such overload, error noted in the resolveOverload call, type recorded here. @@ -330,7 +284,7 @@ func (c *checker) resolveOverloadOrError( } func (c *checker) resolveOverload( - call *exprpb.Expr, fn *decls.FunctionDecl, target *exprpb.Expr, args []*exprpb.Expr) *overloadResolution { + call ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) *overloadResolution { var argTypes []*types.Type if target != nil { @@ -362,8 +316,8 @@ func (c *checker) resolveOverload( for i, argType := range argTypes { if !c.isAssignable(argType, types.BoolType) { c.errors.typeMismatch( - args[i].GetId(), - c.locationByID(args[i].GetId()), + args[i].ID(), + c.locationByID(args[i].ID()), types.BoolType, argType) resultType = types.ErrorType @@ -408,29 +362,29 @@ func (c *checker) resolveOverload( for i, argType := range argTypes { argTypes[i] = substitute(c.mappings, argType, true) } - c.errors.noMatchingOverload(call.GetId(), c.location(call), fn.Name(), argTypes, target != nil) + c.errors.noMatchingOverload(call.ID(), c.location(call), fn.Name(), argTypes, target != nil) return nil } return newResolution(checkedRef, resultType) } -func (c *checker) checkCreateList(e *exprpb.Expr) { - create := e.GetListExpr() +func (c *checker) checkCreateList(e ast.Expr) { + create := e.AsList() var elemsType *types.Type - optionalIndices := create.GetOptionalIndices() + optionalIndices := create.OptionalIndices() optionals := make(map[int32]bool, len(optionalIndices)) for _, optInd := range optionalIndices { optionals[optInd] = true } - for i, e := range create.GetElements() { + for i, e := range create.Elements() { c.check(e) elemType := c.getType(e) if optionals[int32(i)] { var isOptional bool elemType, isOptional = maybeUnwrapOptional(elemType) if !isOptional && !isDyn(elemType) { - c.errors.typeMismatch(e.GetId(), c.location(e), types.NewOptionalType(elemType), elemType) + c.errors.typeMismatch(e.ID(), c.location(e), types.NewOptionalType(elemType), elemType) } } elemsType = c.joinTypes(e, elemsType, elemType) @@ -442,32 +396,24 @@ func (c *checker) checkCreateList(e *exprpb.Expr) { c.setType(e, types.NewListType(elemsType)) } -func (c *checker) checkCreateStruct(e *exprpb.Expr) { - str := e.GetStructExpr() - if str.GetMessageName() != "" { - c.checkCreateMessage(e) - } else { - c.checkCreateMap(e) - } -} - -func (c *checker) checkCreateMap(e *exprpb.Expr) { - mapVal := e.GetStructExpr() +func (c *checker) checkCreateMap(e ast.Expr) { + mapVal := e.AsMap() var mapKeyType *types.Type var mapValueType *types.Type - for _, ent := range mapVal.GetEntries() { - key := ent.GetMapKey() + for _, e := range mapVal.Entries() { + entry := e.AsMapEntry() + key := entry.Key() c.check(key) mapKeyType = c.joinTypes(key, mapKeyType, c.getType(key)) - val := ent.GetValue() + val := entry.Value() c.check(val) valType := c.getType(val) - if ent.GetOptionalEntry() { + if entry.IsOptional() { var isOptional bool valType, isOptional = maybeUnwrapOptional(valType) if !isOptional && !isDyn(valType) { - c.errors.typeMismatch(val.GetId(), c.location(val), types.NewOptionalType(valType), valType) + c.errors.typeMismatch(val.ID(), c.location(val), types.NewOptionalType(valType), valType) } } mapValueType = c.joinTypes(val, mapValueType, valType) @@ -480,25 +426,28 @@ func (c *checker) checkCreateMap(e *exprpb.Expr) { c.setType(e, types.NewMapType(mapKeyType, mapValueType)) } -func (c *checker) checkCreateMessage(e *exprpb.Expr) { - msgVal := e.GetStructExpr() +func (c *checker) checkCreateStruct(e ast.Expr) { + msgVal := e.AsStruct() // Determine the type of the message. resultType := types.ErrorType - ident := c.env.LookupIdent(msgVal.GetMessageName()) + ident := c.env.LookupIdent(msgVal.TypeName()) if ident == nil { c.errors.undeclaredReference( - e.GetId(), c.location(e), c.env.container.Name(), msgVal.GetMessageName()) + e.ID(), c.location(e), c.env.container.Name(), msgVal.TypeName()) c.setType(e, types.ErrorType) return } // Ensure the type name is fully qualified in the AST. typeName := ident.Name() - msgVal.MessageName = typeName - c.setReference(e, ast.NewIdentReference(ident.Name(), nil)) + if msgVal.TypeName() != typeName { + e.SetKindCase(c.NewStruct(e.ID(), typeName, msgVal.Fields())) + msgVal = e.AsStruct() + } + c.setReference(e, ast.NewIdentReference(typeName, nil)) identKind := ident.Type().Kind() if identKind != types.ErrorKind { if identKind != types.TypeKind { - c.errors.notAType(e.GetId(), c.location(e), ident.Type().DeclaredTypeName()) + c.errors.notAType(e.ID(), c.location(e), ident.Type().DeclaredTypeName()) } else { resultType = ident.Type().Parameters()[0] // Backwards compatibility test between well-known types and message types @@ -509,7 +458,7 @@ func (c *checker) checkCreateMessage(e *exprpb.Expr) { } else if resultType.Kind() == types.StructKind { typeName = resultType.DeclaredTypeName() } else { - c.errors.notAMessageType(e.GetId(), c.location(e), resultType.DeclaredTypeName()) + c.errors.notAMessageType(e.ID(), c.location(e), resultType.DeclaredTypeName()) resultType = types.ErrorType } } @@ -517,37 +466,38 @@ func (c *checker) checkCreateMessage(e *exprpb.Expr) { c.setType(e, resultType) // Check the field initializers. - for _, ent := range msgVal.GetEntries() { - field := ent.GetFieldKey() - value := ent.GetValue() + for _, f := range msgVal.Fields() { + field := f.AsStructField() + fieldName := field.Name() + value := field.Value() c.check(value) fieldType := types.ErrorType - ft, found := c.lookupFieldType(ent.GetId(), typeName, field) + ft, found := c.lookupFieldType(f.ID(), typeName, fieldName) if found { fieldType = ft } valType := c.getType(value) - if ent.GetOptionalEntry() { + if field.IsOptional() { var isOptional bool valType, isOptional = maybeUnwrapOptional(valType) if !isOptional && !isDyn(valType) { - c.errors.typeMismatch(value.GetId(), c.location(value), types.NewOptionalType(valType), valType) + c.errors.typeMismatch(value.ID(), c.location(value), types.NewOptionalType(valType), valType) } } if !c.isAssignable(fieldType, valType) { - c.errors.fieldTypeMismatch(ent.GetId(), c.locationByID(ent.GetId()), field, fieldType, valType) + c.errors.fieldTypeMismatch(f.ID(), c.locationByID(f.ID()), fieldName, fieldType, valType) } } } -func (c *checker) checkComprehension(e *exprpb.Expr) { - comp := e.GetComprehensionExpr() - c.check(comp.GetIterRange()) - c.check(comp.GetAccuInit()) - accuType := c.getType(comp.GetAccuInit()) - rangeType := substitute(c.mappings, c.getType(comp.GetIterRange()), false) +func (c *checker) checkComprehension(e ast.Expr) { + comp := e.AsComprehension() + c.check(comp.IterRange()) + c.check(comp.AccuInit()) + accuType := c.getType(comp.AccuInit()) + rangeType := substitute(c.mappings, c.getType(comp.IterRange()), false) var varType *types.Type switch rangeType.Kind() { @@ -564,32 +514,32 @@ func (c *checker) checkComprehension(e *exprpb.Expr) { // Set the range iteration variable to type DYN as well. varType = types.DynType default: - c.errors.notAComprehensionRange(comp.GetIterRange().GetId(), c.location(comp.GetIterRange()), rangeType) + c.errors.notAComprehensionRange(comp.IterRange().ID(), c.location(comp.IterRange()), rangeType) varType = types.ErrorType } // Create a scope for the comprehension since it has a local accumulation variable. // This scope will contain the accumulation variable used to compute the result. c.env = c.env.enterScope() - c.env.AddIdents(decls.NewVariable(comp.GetAccuVar(), accuType)) + c.env.AddIdents(decls.NewVariable(comp.AccuVar(), accuType)) // Create a block scope for the loop. c.env = c.env.enterScope() - c.env.AddIdents(decls.NewVariable(comp.GetIterVar(), varType)) + c.env.AddIdents(decls.NewVariable(comp.IterVar(), varType)) // Check the variable references in the condition and step. - c.check(comp.GetLoopCondition()) - c.assertType(comp.GetLoopCondition(), types.BoolType) - c.check(comp.GetLoopStep()) - c.assertType(comp.GetLoopStep(), accuType) + c.check(comp.LoopCondition()) + c.assertType(comp.LoopCondition(), types.BoolType) + c.check(comp.LoopStep()) + c.assertType(comp.LoopStep(), accuType) // Exit the loop's block scope before checking the result. c.env = c.env.exitScope() - c.check(comp.GetResult()) + c.check(comp.Result()) // Exit the comprehension scope. c.env = c.env.exitScope() - c.setType(e, substitute(c.mappings, c.getType(comp.GetResult()), false)) + c.setType(e, substitute(c.mappings, c.getType(comp.Result()), false)) } // Checks compatibility of joined types, and returns the most general common type. -func (c *checker) joinTypes(e *exprpb.Expr, previous, current *types.Type) *types.Type { +func (c *checker) joinTypes(e ast.Expr, previous, current *types.Type) *types.Type { if previous == nil { return current } @@ -599,7 +549,7 @@ func (c *checker) joinTypes(e *exprpb.Expr, previous, current *types.Type) *type if c.dynAggregateLiteralElementTypesEnabled() { return types.DynType } - c.errors.typeMismatch(e.GetId(), c.location(e), previous, current) + c.errors.typeMismatch(e.ID(), c.location(e), previous, current) return types.ErrorType } @@ -633,41 +583,41 @@ func (c *checker) isAssignableList(l1, l2 []*types.Type) bool { return false } -func maybeUnwrapString(e *exprpb.Expr) (string, bool) { - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - literal := e.GetConstExpr() - switch literal.GetConstantKind().(type) { - case *exprpb.Constant_StringValue: - return literal.GetStringValue(), true +func maybeUnwrapString(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.LiteralKind: + literal := e.AsLiteral() + switch v := literal.(type) { + case types.String: + return string(v), true } } return "", false } -func (c *checker) setType(e *exprpb.Expr, t *types.Type) { - if old, found := c.types[e.GetId()]; found && !old.IsExactType(t) { - c.errors.incompatibleType(e.GetId(), c.location(e), e, old, t) +func (c *checker) setType(e ast.Expr, t *types.Type) { + if old, found := c.TypeMap()[e.ID()]; found && !old.IsExactType(t) { + c.errors.incompatibleType(e.ID(), c.location(e), e, old, t) return } - c.types[e.GetId()] = t + c.SetType(e.ID(), t) } -func (c *checker) getType(e *exprpb.Expr) *types.Type { - return c.types[e.GetId()] +func (c *checker) getType(e ast.Expr) *types.Type { + return c.TypeMap()[e.ID()] } -func (c *checker) setReference(e *exprpb.Expr, r *ast.ReferenceInfo) { - if old, found := c.references[e.GetId()]; found && !old.Equals(r) { - c.errors.referenceRedefinition(e.GetId(), c.location(e), e, old, r) +func (c *checker) setReference(e ast.Expr, r *ast.ReferenceInfo) { + if old, found := c.ReferenceMap()[e.ID()]; found && !old.Equals(r) { + c.errors.referenceRedefinition(e.ID(), c.location(e), e, old, r) return } - c.references[e.GetId()] = r + c.SetReference(e.ID(), r) } -func (c *checker) assertType(e *exprpb.Expr, t *types.Type) { +func (c *checker) assertType(e ast.Expr, t *types.Type) { if !c.isAssignable(t, c.getType(e)) { - c.errors.typeMismatch(e.GetId(), c.location(e), t, c.getType(e)) + c.errors.typeMismatch(e.ID(), c.location(e), t, c.getType(e)) } } @@ -683,26 +633,12 @@ func newResolution(r *ast.ReferenceInfo, t *types.Type) *overloadResolution { } } -func (c *checker) location(e *exprpb.Expr) common.Location { - return c.locationByID(e.GetId()) +func (c *checker) location(e ast.Expr) common.Location { + return c.locationByID(e.ID()) } func (c *checker) locationByID(id int64) common.Location { - positions := c.sourceInfo.GetPositions() - var line = 1 - if offset, found := positions[id]; found { - col := int(offset) - for _, lineOffset := range c.sourceInfo.GetLineOffsets() { - if lineOffset < offset { - line++ - col = int(offset - lineOffset) - } else { - break - } - } - return common.NewLocation(line, col) - } - return common.NoLocation + return c.SourceInfo().GetStartLocation(id) } func (c *checker) lookupFieldType(exprID int64, structType, fieldName string) (*types.Type, bool) { diff --git a/vendor/github.com/google/cel-go/checker/cost.go b/vendor/github.com/google/cel-go/checker/cost.go index d6a799a8d..b6109d918 100644 --- a/vendor/github.com/google/cel-go/checker/cost.go +++ b/vendor/github.com/google/cel-go/checker/cost.go @@ -22,8 +22,6 @@ import ( "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // WARNING: Any changes to cost calculations in this file require a corresponding change in interpreter/runtimecost.go @@ -58,7 +56,7 @@ type AstNode interface { // Type returns the deduced type of the AstNode. Type() *types.Type // Expr returns the expression of the AstNode. - Expr() *exprpb.Expr + Expr() ast.Expr // ComputedSize returns a size estimate of the AstNode derived from information available in the CEL expression. // For constants and inline list and map declarations, the exact size is returned. For concatenated list, strings // and bytes, the size is derived from the size estimates of the operands. nil is returned if there is no @@ -69,7 +67,7 @@ type AstNode interface { type astNode struct { path []string t *types.Type - expr *exprpb.Expr + expr ast.Expr derivedSize *SizeEstimate } @@ -81,7 +79,7 @@ func (e astNode) Type() *types.Type { return e.t } -func (e astNode) Expr() *exprpb.Expr { +func (e astNode) Expr() ast.Expr { return e.expr } @@ -90,29 +88,27 @@ func (e astNode) ComputedSize() *SizeEstimate { return e.derivedSize } var v uint64 - switch ek := e.expr.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - switch ck := ek.ConstExpr.GetConstantKind().(type) { - case *exprpb.Constant_StringValue: + switch e.expr.Kind() { + case ast.LiteralKind: + switch ck := e.expr.AsLiteral().(type) { + case types.String: // converting to runes here is an O(n) operation, but // this is consistent with how size is computed at runtime, // and how the language definition defines string size - v = uint64(len([]rune(ck.StringValue))) - case *exprpb.Constant_BytesValue: - v = uint64(len(ck.BytesValue)) - case *exprpb.Constant_BoolValue, *exprpb.Constant_DoubleValue, *exprpb.Constant_DurationValue, - *exprpb.Constant_Int64Value, *exprpb.Constant_TimestampValue, *exprpb.Constant_Uint64Value, - *exprpb.Constant_NullValue: + v = uint64(len([]rune(ck))) + case types.Bytes: + v = uint64(len(ck)) + case types.Bool, types.Double, types.Duration, + types.Int, types.Timestamp, types.Uint, + types.Null: v = uint64(1) default: return nil } - case *exprpb.Expr_ListExpr: - v = uint64(len(ek.ListExpr.GetElements())) - case *exprpb.Expr_StructExpr: - if ek.StructExpr.GetMessageName() == "" { - v = uint64(len(ek.StructExpr.GetEntries())) - } + case ast.ListKind: + v = uint64(e.expr.AsList().Size()) + case ast.MapKind: + v = uint64(e.expr.AsMap().Size()) default: return nil } @@ -261,7 +257,7 @@ type coster struct { iterRanges iterRangeScopes // computedSizes tracks the computed sizes of call results. computedSizes map[int64]SizeEstimate - checkedAST *ast.CheckedAST + checkedAST *ast.AST estimator CostEstimator // presenceTestCost will either be a zero or one based on whether has() macros count against cost computations. presenceTestCost CostEstimate @@ -270,8 +266,8 @@ type coster struct { // Use a stack of iterVar -> iterRange Expr Ids to handle shadowed variable names. type iterRangeScopes map[string][]int64 -func (vs iterRangeScopes) push(varName string, expr *exprpb.Expr) { - vs[varName] = append(vs[varName], expr.GetId()) +func (vs iterRangeScopes) push(varName string, expr ast.Expr) { + vs[varName] = append(vs[varName], expr.ID()) } func (vs iterRangeScopes) pop(varName string) { @@ -304,9 +300,9 @@ func PresenceTestHasCost(hasCost bool) CostOption { } // Cost estimates the cost of the parsed and type checked CEL expression. -func Cost(checker *ast.CheckedAST, estimator CostEstimator, opts ...CostOption) (CostEstimate, error) { +func Cost(checked *ast.AST, estimator CostEstimator, opts ...CostOption) (CostEstimate, error) { c := &coster{ - checkedAST: checker, + checkedAST: checked, estimator: estimator, exprPath: map[int64][]string{}, iterRanges: map[string][]int64{}, @@ -319,28 +315,30 @@ func Cost(checker *ast.CheckedAST, estimator CostEstimator, opts ...CostOption) return CostEstimate{}, err } } - return c.cost(checker.Expr), nil + return c.cost(checked.Expr()), nil } -func (c *coster) cost(e *exprpb.Expr) CostEstimate { +func (c *coster) cost(e ast.Expr) CostEstimate { if e == nil { return CostEstimate{} } var cost CostEstimate - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: + switch e.Kind() { + case ast.LiteralKind: cost = constCost - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: cost = c.costIdent(e) - case *exprpb.Expr_SelectExpr: + case ast.SelectKind: cost = c.costSelect(e) - case *exprpb.Expr_CallExpr: + case ast.CallKind: cost = c.costCall(e) - case *exprpb.Expr_ListExpr: + case ast.ListKind: cost = c.costCreateList(e) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + cost = c.costCreateMap(e) + case ast.StructKind: cost = c.costCreateStruct(e) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: cost = c.costComprehension(e) default: return CostEstimate{} @@ -348,53 +346,51 @@ func (c *coster) cost(e *exprpb.Expr) CostEstimate { return cost } -func (c *coster) costIdent(e *exprpb.Expr) CostEstimate { - identExpr := e.GetIdentExpr() - +func (c *coster) costIdent(e ast.Expr) CostEstimate { + identName := e.AsIdent() // build and track the field path - if iterRange, ok := c.iterRanges.peek(identExpr.GetName()); ok { - switch c.checkedAST.TypeMap[iterRange].Kind() { + if iterRange, ok := c.iterRanges.peek(identName); ok { + switch c.checkedAST.GetType(iterRange).Kind() { case types.ListKind: c.addPath(e, append(c.exprPath[iterRange], "@items")) case types.MapKind: c.addPath(e, append(c.exprPath[iterRange], "@keys")) } } else { - c.addPath(e, []string{identExpr.GetName()}) + c.addPath(e, []string{identName}) } return selectAndIdentCost } -func (c *coster) costSelect(e *exprpb.Expr) CostEstimate { - sel := e.GetSelectExpr() +func (c *coster) costSelect(e ast.Expr) CostEstimate { + sel := e.AsSelect() var sum CostEstimate - if sel.GetTestOnly() { + if sel.IsTestOnly() { // recurse, but do not add any cost // this is equivalent to how evalTestOnly increments the runtime cost counter // but does not add any additional cost for the qualifier, except here we do // the reverse (ident adds cost) sum = sum.Add(c.presenceTestCost) - sum = sum.Add(c.cost(sel.GetOperand())) + sum = sum.Add(c.cost(sel.Operand())) return sum } - sum = sum.Add(c.cost(sel.GetOperand())) - targetType := c.getType(sel.GetOperand()) + sum = sum.Add(c.cost(sel.Operand())) + targetType := c.getType(sel.Operand()) switch targetType.Kind() { case types.MapKind, types.StructKind, types.TypeParamKind: sum = sum.Add(selectAndIdentCost) } // build and track the field path - c.addPath(e, append(c.getPath(sel.GetOperand()), sel.GetField())) + c.addPath(e, append(c.getPath(sel.Operand()), sel.FieldName())) return sum } -func (c *coster) costCall(e *exprpb.Expr) CostEstimate { - call := e.GetCallExpr() - target := call.GetTarget() - args := call.GetArgs() +func (c *coster) costCall(e ast.Expr) CostEstimate { + call := e.AsCall() + args := call.Args() var sum CostEstimate @@ -405,22 +401,20 @@ func (c *coster) costCall(e *exprpb.Expr) CostEstimate { argTypes[i] = c.newAstNode(arg) } - ref := c.checkedAST.ReferenceMap[e.GetId()] - if ref == nil || len(ref.OverloadIDs) == 0 { + overloadIDs := c.checkedAST.GetOverloadIDs(e.ID()) + if len(overloadIDs) == 0 { return CostEstimate{} } var targetType AstNode - if target != nil { - if call.Target != nil { - sum = sum.Add(c.cost(call.GetTarget())) - targetType = c.newAstNode(call.GetTarget()) - } + if call.IsMemberFunction() { + sum = sum.Add(c.cost(call.Target())) + targetType = c.newAstNode(call.Target()) } // Pick a cost estimate range that covers all the overload cost estimation ranges fnCost := CostEstimate{Min: uint64(math.MaxUint64), Max: 0} var resultSize *SizeEstimate - for _, overload := range ref.OverloadIDs { - overloadCost := c.functionCost(call.GetFunction(), overload, &targetType, argTypes, argCosts) + for _, overload := range overloadIDs { + overloadCost := c.functionCost(call.FunctionName(), overload, &targetType, argTypes, argCosts) fnCost = fnCost.Union(overloadCost.CostEstimate) if overloadCost.ResultSize != nil { if resultSize == nil { @@ -443,62 +437,54 @@ func (c *coster) costCall(e *exprpb.Expr) CostEstimate { } } if resultSize != nil { - c.computedSizes[e.GetId()] = *resultSize + c.computedSizes[e.ID()] = *resultSize } return sum.Add(fnCost) } -func (c *coster) costCreateList(e *exprpb.Expr) CostEstimate { - create := e.GetListExpr() +func (c *coster) costCreateList(e ast.Expr) CostEstimate { + create := e.AsList() var sum CostEstimate - for _, e := range create.GetElements() { + for _, e := range create.Elements() { sum = sum.Add(c.cost(e)) } return sum.Add(createListBaseCost) } -func (c *coster) costCreateStruct(e *exprpb.Expr) CostEstimate { - str := e.GetStructExpr() - if str.MessageName != "" { - return c.costCreateMessage(e) - } - return c.costCreateMap(e) -} - -func (c *coster) costCreateMap(e *exprpb.Expr) CostEstimate { - mapVal := e.GetStructExpr() +func (c *coster) costCreateMap(e ast.Expr) CostEstimate { + mapVal := e.AsMap() var sum CostEstimate - for _, ent := range mapVal.GetEntries() { - key := ent.GetMapKey() - sum = sum.Add(c.cost(key)) - - sum = sum.Add(c.cost(ent.GetValue())) + for _, ent := range mapVal.Entries() { + entry := ent.AsMapEntry() + sum = sum.Add(c.cost(entry.Key())) + sum = sum.Add(c.cost(entry.Value())) } return sum.Add(createMapBaseCost) } -func (c *coster) costCreateMessage(e *exprpb.Expr) CostEstimate { - msgVal := e.GetStructExpr() +func (c *coster) costCreateStruct(e ast.Expr) CostEstimate { + msgVal := e.AsStruct() var sum CostEstimate - for _, ent := range msgVal.GetEntries() { - sum = sum.Add(c.cost(ent.GetValue())) + for _, ent := range msgVal.Fields() { + field := ent.AsStructField() + sum = sum.Add(c.cost(field.Value())) } return sum.Add(createMessageBaseCost) } -func (c *coster) costComprehension(e *exprpb.Expr) CostEstimate { - comp := e.GetComprehensionExpr() +func (c *coster) costComprehension(e ast.Expr) CostEstimate { + comp := e.AsComprehension() var sum CostEstimate - sum = sum.Add(c.cost(comp.GetIterRange())) - sum = sum.Add(c.cost(comp.GetAccuInit())) + sum = sum.Add(c.cost(comp.IterRange())) + sum = sum.Add(c.cost(comp.AccuInit())) // Track the iterRange of each IterVar for field path construction - c.iterRanges.push(comp.GetIterVar(), comp.GetIterRange()) - loopCost := c.cost(comp.GetLoopCondition()) - stepCost := c.cost(comp.GetLoopStep()) - c.iterRanges.pop(comp.GetIterVar()) - sum = sum.Add(c.cost(comp.Result)) - rangeCnt := c.sizeEstimate(c.newAstNode(comp.GetIterRange())) + c.iterRanges.push(comp.IterVar(), comp.IterRange()) + loopCost := c.cost(comp.LoopCondition()) + stepCost := c.cost(comp.LoopStep()) + c.iterRanges.pop(comp.IterVar()) + sum = sum.Add(c.cost(comp.Result())) + rangeCnt := c.sizeEstimate(c.newAstNode(comp.IterRange())) rangeCost := rangeCnt.MultiplyByCost(stepCost.Add(loopCost)) sum = sum.Add(rangeCost) @@ -535,14 +521,34 @@ func (c *coster) functionCost(function, overloadID string, target *AstNode, args if est := c.estimator.EstimateCallCost(function, overloadID, target, args); est != nil { callEst := *est - return CallEstimate{CostEstimate: callEst.Add(argCostSum())} + return CallEstimate{CostEstimate: callEst.Add(argCostSum()), ResultSize: est.ResultSize} } switch overloadID { // O(n) functions - case overloads.StartsWithString, overloads.EndsWithString, overloads.StringToBytes, overloads.BytesToString, overloads.ExtQuoteString, overloads.ExtFormatString: - if overloadID == overloads.ExtFormatString { + case overloads.ExtFormatString: + if target != nil { + // ResultSize not calculated because we can't bound the max size. return CallEstimate{CostEstimate: c.sizeEstimate(*target).MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum())} } + case overloads.StringToBytes: + if len(args) == 1 { + sz := c.sizeEstimate(args[0]) + // ResultSize max is when each char converts to 4 bytes. + return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min, Max: sz.Max * 4}} + } + case overloads.BytesToString: + if len(args) == 1 { + sz := c.sizeEstimate(args[0]) + // ResultSize min is when 4 bytes convert to 1 char. + return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min / 4, Max: sz.Max}} + } + case overloads.ExtQuoteString: + if len(args) == 1 { + sz := c.sizeEstimate(args[0]) + // ResultSize max is when each char is escaped. 2 quote chars always added. + return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min + 2, Max: sz.Max*2 + 2}} + } + case overloads.StartsWithString, overloads.EndsWithString: if len(args) == 1 { return CallEstimate{CostEstimate: c.sizeEstimate(args[0]).MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum())} } @@ -623,26 +629,26 @@ func (c *coster) functionCost(function, overloadID string, target *AstNode, args return CallEstimate{CostEstimate: CostEstimate{Min: 1, Max: 1}.Add(argCostSum())} } -func (c *coster) getType(e *exprpb.Expr) *types.Type { - return c.checkedAST.TypeMap[e.GetId()] +func (c *coster) getType(e ast.Expr) *types.Type { + return c.checkedAST.GetType(e.ID()) } -func (c *coster) getPath(e *exprpb.Expr) []string { - return c.exprPath[e.GetId()] +func (c *coster) getPath(e ast.Expr) []string { + return c.exprPath[e.ID()] } -func (c *coster) addPath(e *exprpb.Expr, path []string) { - c.exprPath[e.GetId()] = path +func (c *coster) addPath(e ast.Expr, path []string) { + c.exprPath[e.ID()] = path } -func (c *coster) newAstNode(e *exprpb.Expr) *astNode { +func (c *coster) newAstNode(e ast.Expr) *astNode { path := c.getPath(e) if len(path) > 0 && path[0] == parser.AccumulatorName { // only provide paths to root vars; omit accumulator vars path = nil } var derivedSize *SizeEstimate - if size, ok := c.computedSizes[e.GetId()]; ok { + if size, ok := c.computedSizes[e.ID()]; ok { derivedSize = &size } return &astNode{ diff --git a/vendor/github.com/google/cel-go/checker/errors.go b/vendor/github.com/google/cel-go/checker/errors.go index c2b96498d..8b3bf0b8b 100644 --- a/vendor/github.com/google/cel-go/checker/errors.go +++ b/vendor/github.com/google/cel-go/checker/errors.go @@ -15,13 +15,9 @@ package checker import ( - "reflect" - "github.com/google/cel-go/common" "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/types" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // typeErrors is a specialization of Errors. @@ -34,9 +30,9 @@ func (e *typeErrors) fieldTypeMismatch(id int64, l common.Location, name string, name, FormatCELType(field), FormatCELType(value)) } -func (e *typeErrors) incompatibleType(id int64, l common.Location, ex *exprpb.Expr, prev, next *types.Type) { +func (e *typeErrors) incompatibleType(id int64, l common.Location, ex ast.Expr, prev, next *types.Type) { e.errs.ReportErrorAtID(id, l, - "incompatible type already exists for expression: %v(%d) old:%v, new:%v", ex, ex.GetId(), prev, next) + "incompatible type already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next) } func (e *typeErrors) noMatchingOverload(id int64, l common.Location, name string, args []*types.Type, isInstance bool) { @@ -49,7 +45,7 @@ func (e *typeErrors) notAComprehensionRange(id int64, l common.Location, t *type FormatCELType(t)) } -func (e *typeErrors) notAnOptionalFieldSelection(id int64, l common.Location, field *exprpb.Expr) { +func (e *typeErrors) notAnOptionalFieldSelection(id int64, l common.Location, field ast.Expr) { e.errs.ReportErrorAtID(id, l, "unsupported optional field selection: %v", field) } @@ -61,9 +57,9 @@ func (e *typeErrors) notAMessageType(id int64, l common.Location, typeName strin e.errs.ReportErrorAtID(id, l, "'%s' is not a message type", typeName) } -func (e *typeErrors) referenceRedefinition(id int64, l common.Location, ex *exprpb.Expr, prev, next *ast.ReferenceInfo) { +func (e *typeErrors) referenceRedefinition(id int64, l common.Location, ex ast.Expr, prev, next *ast.ReferenceInfo) { e.errs.ReportErrorAtID(id, l, - "reference already exists for expression: %v(%d) old:%v, new:%v", ex, ex.GetId(), prev, next) + "reference already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next) } func (e *typeErrors) typeDoesNotSupportFieldSelection(id int64, l common.Location, t *types.Type) { @@ -87,6 +83,6 @@ func (e *typeErrors) unexpectedFailedResolution(id int64, l common.Location, typ e.errs.ReportErrorAtID(id, l, "unexpected failed resolution of '%s'", typeName) } -func (e *typeErrors) unexpectedASTType(id int64, l common.Location, ex *exprpb.Expr) { - e.errs.ReportErrorAtID(id, l, "unrecognized ast type: %v", reflect.TypeOf(ex)) +func (e *typeErrors) unexpectedASTType(id int64, l common.Location, kind, typeName string) { + e.errs.ReportErrorAtID(id, l, "unexpected %s type: %v", kind, typeName) } diff --git a/vendor/github.com/google/cel-go/checker/printer.go b/vendor/github.com/google/cel-go/checker/printer.go index 15cba06ee..7a3984f02 100644 --- a/vendor/github.com/google/cel-go/checker/printer.go +++ b/vendor/github.com/google/cel-go/checker/printer.go @@ -17,40 +17,40 @@ package checker import ( "sort" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/debug" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) type semanticAdorner struct { - checks *exprpb.CheckedExpr + checked *ast.AST } var _ debug.Adorner = &semanticAdorner{} func (a *semanticAdorner) GetMetadata(elem any) string { result := "" - e, isExpr := elem.(*exprpb.Expr) + e, isExpr := elem.(ast.Expr) if !isExpr { return result } - t := a.checks.TypeMap[e.GetId()] + t := a.checked.TypeMap()[e.ID()] if t != nil { result += "~" - result += FormatCheckedType(t) + result += FormatCELType(t) } - switch e.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr, - *exprpb.Expr_CallExpr, - *exprpb.Expr_StructExpr, - *exprpb.Expr_SelectExpr: - if ref, found := a.checks.ReferenceMap[e.GetId()]; found { - if len(ref.GetOverloadId()) == 0 { + switch e.Kind() { + case ast.IdentKind, + ast.CallKind, + ast.ListKind, + ast.StructKind, + ast.SelectKind: + if ref, found := a.checked.ReferenceMap()[e.ID()]; found { + if len(ref.OverloadIDs) == 0 { result += "^" + ref.Name } else { - sort.Strings(ref.GetOverloadId()) - for i, overload := range ref.GetOverloadId() { + sort.Strings(ref.OverloadIDs) + for i, overload := range ref.OverloadIDs { if i == 0 { result += "^" } else { @@ -68,7 +68,7 @@ func (a *semanticAdorner) GetMetadata(elem any) string { // Print returns a string representation of the Expr message, // annotated with types from the CheckedExpr. The Expr must // be a sub-expression embedded in the CheckedExpr. -func Print(e *exprpb.Expr, checks *exprpb.CheckedExpr) string { - a := &semanticAdorner{checks: checks} +func Print(e ast.Expr, checked *ast.AST) string { + a := &semanticAdorner{checked: checked} return debug.ToAdornedDebugString(e, a) } diff --git a/vendor/github.com/google/cel-go/common/ast/BUILD.bazel b/vendor/github.com/google/cel-go/common/ast/BUILD.bazel index 7269cdff5..c92a0f179 100644 --- a/vendor/github.com/google/cel-go/common/ast/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/ast/BUILD.bazel @@ -5,7 +5,9 @@ package( "//cel:__subpackages__", "//checker:__subpackages__", "//common:__subpackages__", + "//ext:__subpackages__", "//interpreter:__subpackages__", + "//parser:__subpackages__", ], licenses = ["notice"], # Apache 2.0 ) @@ -14,10 +16,14 @@ go_library( name = "go_default_library", srcs = [ "ast.go", + "conversion.go", "expr.go", + "factory.go", + "navigable.go", ], importpath = "github.com/google/cel-go/common/ast", deps = [ + "//common:go_default_library", "//common/types:go_default_library", "//common/types/ref:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", @@ -29,7 +35,9 @@ go_test( name = "go_default_test", srcs = [ "ast_test.go", + "conversion_test.go", "expr_test.go", + "navigable_test.go", ], embed = [ ":go_default_library", @@ -48,5 +56,6 @@ go_test( "//test/proto3pb:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", + "@org_golang_google_protobuf//encoding/prototext:go_default_library", ], ) \ No newline at end of file diff --git a/vendor/github.com/google/cel-go/common/ast/ast.go b/vendor/github.com/google/cel-go/common/ast/ast.go index b3c150793..386514dd4 100644 --- a/vendor/github.com/google/cel-go/common/ast/ast.go +++ b/vendor/github.com/google/cel-go/common/ast/ast.go @@ -16,74 +16,353 @@ package ast import ( - "fmt" - + "github.com/google/cel-go/common" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" +) - structpb "google.golang.org/protobuf/types/known/structpb" +// AST contains a protobuf expression and source info along with CEL-native type and reference information. +type AST struct { + expr Expr + sourceInfo *SourceInfo + typeMap map[int64]*types.Type + refMap map[int64]*ReferenceInfo +} - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" -) +// Expr returns the root ast.Expr value in the AST. +func (a *AST) Expr() Expr { + if a == nil { + return nilExpr + } + return a.expr +} -// CheckedAST contains a protobuf expression and source info along with CEL-native type and reference information. -type CheckedAST struct { - Expr *exprpb.Expr - SourceInfo *exprpb.SourceInfo - TypeMap map[int64]*types.Type - ReferenceMap map[int64]*ReferenceInfo -} - -// CheckedASTToCheckedExpr converts a CheckedAST to a CheckedExpr protobouf. -func CheckedASTToCheckedExpr(ast *CheckedAST) (*exprpb.CheckedExpr, error) { - refMap := make(map[int64]*exprpb.Reference, len(ast.ReferenceMap)) - for id, ref := range ast.ReferenceMap { - r, err := ReferenceInfoToReferenceExpr(ref) - if err != nil { - return nil, err - } - refMap[id] = r +// SourceInfo returns the source metadata associated with the parse / type-check passes. +func (a *AST) SourceInfo() *SourceInfo { + if a == nil { + return nil + } + return a.sourceInfo +} + +// GetType returns the type for the expression at the given id, if one exists, else types.DynType. +func (a *AST) GetType(id int64) *types.Type { + if t, found := a.TypeMap()[id]; found { + return t + } + return types.DynType +} + +// SetType sets the type of the expression node at the given id. +func (a *AST) SetType(id int64, t *types.Type) { + if a == nil { + return + } + a.typeMap[id] = t +} + +// TypeMap returns the map of expression ids to type-checked types. +// +// If the AST is not type-checked, the map will be empty. +func (a *AST) TypeMap() map[int64]*types.Type { + if a == nil { + return map[int64]*types.Type{} + } + return a.typeMap +} + +// GetOverloadIDs returns the set of overload function names for a given expression id. +// +// If the expression id is not a function call, or the AST is not type-checked, the result will be empty. +func (a *AST) GetOverloadIDs(id int64) []string { + if ref, found := a.ReferenceMap()[id]; found { + return ref.OverloadIDs + } + return []string{} +} + +// ReferenceMap returns the map of expression id to identifier, constant, and function references. +func (a *AST) ReferenceMap() map[int64]*ReferenceInfo { + if a == nil { + return map[int64]*ReferenceInfo{} + } + return a.refMap +} + +// SetReference adds a reference to the checked AST type map. +func (a *AST) SetReference(id int64, r *ReferenceInfo) { + if a == nil { + return } - typeMap := make(map[int64]*exprpb.Type, len(ast.TypeMap)) - for id, typ := range ast.TypeMap { - t, err := types.TypeToExprType(typ) - if err != nil { - return nil, err + a.refMap[id] = r +} + +// IsChecked returns whether the AST is type-checked. +func (a *AST) IsChecked() bool { + return a != nil && len(a.TypeMap()) > 0 +} + +// NewAST creates a base AST instance with an ast.Expr and ast.SourceInfo value. +func NewAST(e Expr, sourceInfo *SourceInfo) *AST { + if e == nil { + e = nilExpr + } + return &AST{ + expr: e, + sourceInfo: sourceInfo, + typeMap: make(map[int64]*types.Type), + refMap: make(map[int64]*ReferenceInfo), + } +} + +// NewCheckedAST wraps an parsed AST and augments it with type and reference metadata. +func NewCheckedAST(parsed *AST, typeMap map[int64]*types.Type, refMap map[int64]*ReferenceInfo) *AST { + return &AST{ + expr: parsed.Expr(), + sourceInfo: parsed.SourceInfo(), + typeMap: typeMap, + refMap: refMap, + } +} + +// Copy creates a deep copy of the Expr and SourceInfo values in the input AST. +// +// Copies of the Expr value are generated using an internal default ExprFactory. +func Copy(a *AST) *AST { + if a == nil { + return nil + } + e := defaultFactory.CopyExpr(a.expr) + if !a.IsChecked() { + return NewAST(e, CopySourceInfo(a.SourceInfo())) + } + typesCopy := make(map[int64]*types.Type, len(a.typeMap)) + for id, t := range a.typeMap { + typesCopy[id] = t + } + refsCopy := make(map[int64]*ReferenceInfo, len(a.refMap)) + for id, r := range a.refMap { + refsCopy[id] = r + } + return NewCheckedAST(NewAST(e, CopySourceInfo(a.SourceInfo())), typesCopy, refsCopy) +} + +// MaxID returns the upper-bound, non-inclusive, of ids present within the AST's Expr value. +func MaxID(a *AST) int64 { + visitor := &maxIDVisitor{maxID: 1} + PostOrderVisit(a.Expr(), visitor) + return visitor.maxID + 1 +} + +// NewSourceInfo creates a simple SourceInfo object from an input common.Source value. +func NewSourceInfo(src common.Source) *SourceInfo { + var lineOffsets []int32 + var desc string + baseLine := int32(0) + baseCol := int32(0) + if src != nil { + desc = src.Description() + lineOffsets = src.LineOffsets() + // Determine whether the source metadata should be computed relative + // to a base line and column value. This can be determined by requesting + // the location for offset 0 from the source object. + if loc, found := src.OffsetLocation(0); found { + baseLine = int32(loc.Line()) - 1 + baseCol = int32(loc.Column()) } - typeMap[id] = t - } - return &exprpb.CheckedExpr{ - Expr: ast.Expr, - SourceInfo: ast.SourceInfo, - ReferenceMap: refMap, - TypeMap: typeMap, - }, nil -} - -// CheckedExprToCheckedAST converts a CheckedExpr protobuf to a CheckedAST instance. -func CheckedExprToCheckedAST(checked *exprpb.CheckedExpr) (*CheckedAST, error) { - refMap := make(map[int64]*ReferenceInfo, len(checked.GetReferenceMap())) - for id, ref := range checked.GetReferenceMap() { - r, err := ReferenceExprToReferenceInfo(ref) - if err != nil { - return nil, err + } + return &SourceInfo{ + desc: desc, + lines: lineOffsets, + baseLine: baseLine, + baseCol: baseCol, + offsetRanges: make(map[int64]OffsetRange), + macroCalls: make(map[int64]Expr), + } +} + +// CopySourceInfo creates a deep copy of the MacroCalls within the input SourceInfo. +// +// Copies of macro Expr values are generated using an internal default ExprFactory. +func CopySourceInfo(info *SourceInfo) *SourceInfo { + if info == nil { + return nil + } + rangesCopy := make(map[int64]OffsetRange, len(info.offsetRanges)) + for id, off := range info.offsetRanges { + rangesCopy[id] = off + } + callsCopy := make(map[int64]Expr, len(info.macroCalls)) + for id, call := range info.macroCalls { + callsCopy[id] = defaultFactory.CopyExpr(call) + } + return &SourceInfo{ + syntax: info.syntax, + desc: info.desc, + lines: info.lines, + offsetRanges: rangesCopy, + macroCalls: callsCopy, + } +} + +// SourceInfo records basic information about the expression as a textual input and +// as a parsed expression value. +type SourceInfo struct { + syntax string + desc string + lines []int32 + baseLine int32 + baseCol int32 + offsetRanges map[int64]OffsetRange + macroCalls map[int64]Expr +} + +// SyntaxVersion returns the syntax version associated with the text expression. +func (s *SourceInfo) SyntaxVersion() string { + if s == nil { + return "" + } + return s.syntax +} + +// Description provides information about where the expression came from. +func (s *SourceInfo) Description() string { + if s == nil { + return "" + } + return s.desc +} + +// LineOffsets returns a list of the 0-based character offsets in the input text where newlines appear. +func (s *SourceInfo) LineOffsets() []int32 { + if s == nil { + return []int32{} + } + return s.lines +} + +// MacroCalls returns a map of expression id to ast.Expr value where the id represents the expression +// node where the macro was inserted into the AST, and the ast.Expr value represents the original call +// signature which was replaced. +func (s *SourceInfo) MacroCalls() map[int64]Expr { + if s == nil { + return map[int64]Expr{} + } + return s.macroCalls +} + +// GetMacroCall returns the original ast.Expr value for the given expression if it was generated via +// a macro replacement. +// +// Note, parsing options must be enabled to track macro calls before this method will return a value. +func (s *SourceInfo) GetMacroCall(id int64) (Expr, bool) { + e, found := s.MacroCalls()[id] + return e, found +} + +// SetMacroCall records a macro call at a specific location. +func (s *SourceInfo) SetMacroCall(id int64, e Expr) { + if s != nil { + s.macroCalls[id] = e + } +} + +// ClearMacroCall removes the macro call at the given expression id. +func (s *SourceInfo) ClearMacroCall(id int64) { + if s != nil { + delete(s.macroCalls, id) + } +} + +// OffsetRanges returns a map of expression id to OffsetRange values where the range indicates either: +// the start and end position in the input stream where the expression occurs, or the start position +// only. If the range only captures start position, the stop position of the range will be equal to +// the start. +func (s *SourceInfo) OffsetRanges() map[int64]OffsetRange { + if s == nil { + return map[int64]OffsetRange{} + } + return s.offsetRanges +} + +// GetOffsetRange retrieves an OffsetRange for the given expression id if one exists. +func (s *SourceInfo) GetOffsetRange(id int64) (OffsetRange, bool) { + if s == nil { + return OffsetRange{}, false + } + o, found := s.offsetRanges[id] + return o, found +} + +// SetOffsetRange sets the OffsetRange for the given expression id. +func (s *SourceInfo) SetOffsetRange(id int64, o OffsetRange) { + if s == nil { + return + } + s.offsetRanges[id] = o +} + +// GetStartLocation calculates the human-readable 1-based line and 0-based column of the first character +// of the expression node at the id. +func (s *SourceInfo) GetStartLocation(id int64) common.Location { + if o, found := s.GetOffsetRange(id); found { + line := 1 + col := int(o.Start) + for _, lineOffset := range s.LineOffsets() { + if lineOffset < o.Start { + line++ + col = int(o.Start - lineOffset) + } else { + break + } } - refMap[id] = r + return common.NewLocation(line, col) } - typeMap := make(map[int64]*types.Type, len(checked.GetTypeMap())) - for id, typ := range checked.GetTypeMap() { - t, err := types.ExprTypeToType(typ) - if err != nil { - return nil, err + return common.NoLocation +} + +// GetStopLocation calculates the human-readable 1-based line and 0-based column of the last character for +// the expression node at the given id. +// +// If the SourceInfo was generated from a serialized protobuf representation, the stop location will +// be identical to the start location for the expression. +func (s *SourceInfo) GetStopLocation(id int64) common.Location { + if o, found := s.GetOffsetRange(id); found { + line := 1 + col := int(o.Stop) + for _, lineOffset := range s.LineOffsets() { + if lineOffset < o.Stop { + line++ + col = int(o.Stop - lineOffset) + } else { + break + } } - typeMap[id] = t + return common.NewLocation(line, col) } - return &CheckedAST{ - Expr: checked.GetExpr(), - SourceInfo: checked.GetSourceInfo(), - ReferenceMap: refMap, - TypeMap: typeMap, - }, nil + return common.NoLocation +} + +// ComputeOffset calculates the 0-based character offset from a 1-based line and 0-based column. +func (s *SourceInfo) ComputeOffset(line, col int32) int32 { + if s != nil { + line = s.baseLine + line + col = s.baseCol + col + } + if line == 1 { + return col + } + if line < 1 || line > int32(len(s.LineOffsets())) { + return -1 + } + offset := s.LineOffsets()[line-2] + return offset + col +} + +// OffsetRange captures the start and stop positions of a section of text in the input expression. +type OffsetRange struct { + Start int32 + Stop int32 } // ReferenceInfo contains a CEL native representation of an identifier reference which may refer to @@ -149,78 +428,21 @@ func (r *ReferenceInfo) Equals(other *ReferenceInfo) bool { return true } -// ReferenceInfoToReferenceExpr converts a ReferenceInfo instance to a protobuf Reference suitable for serialization. -func ReferenceInfoToReferenceExpr(info *ReferenceInfo) (*exprpb.Reference, error) { - c, err := ValToConstant(info.Value) - if err != nil { - return nil, err - } - return &exprpb.Reference{ - Name: info.Name, - OverloadId: info.OverloadIDs, - Value: c, - }, nil +type maxIDVisitor struct { + maxID int64 + *baseVisitor } -// ReferenceExprToReferenceInfo converts a protobuf Reference into a CEL-native ReferenceInfo instance. -func ReferenceExprToReferenceInfo(ref *exprpb.Reference) (*ReferenceInfo, error) { - v, err := ConstantToVal(ref.GetValue()) - if err != nil { - return nil, err +// VisitExpr updates the max identifier if the incoming expression id is greater than previously observed. +func (v *maxIDVisitor) VisitExpr(e Expr) { + if v.maxID < e.ID() { + v.maxID = e.ID() } - return &ReferenceInfo{ - Name: ref.GetName(), - OverloadIDs: ref.GetOverloadId(), - Value: v, - }, nil } -// ValToConstant converts a CEL-native ref.Val to a protobuf Constant. -// -// Only simple scalar types are supported by this method. -func ValToConstant(v ref.Val) (*exprpb.Constant, error) { - if v == nil { - return nil, nil - } - switch v.Type() { - case types.BoolType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: v.Value().(bool)}}, nil - case types.BytesType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: v.Value().([]byte)}}, nil - case types.DoubleType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: v.Value().(float64)}}, nil - case types.IntType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: v.Value().(int64)}}, nil - case types.NullType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: structpb.NullValue_NULL_VALUE}}, nil - case types.StringType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: v.Value().(string)}}, nil - case types.UintType: - return &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: v.Value().(uint64)}}, nil - } - return nil, fmt.Errorf("unsupported constant kind: %v", v.Type()) -} - -// ConstantToVal converts a protobuf Constant to a CEL-native ref.Val. -func ConstantToVal(c *exprpb.Constant) (ref.Val, error) { - if c == nil { - return nil, nil - } - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - return types.Bool(c.GetBoolValue()), nil - case *exprpb.Constant_BytesValue: - return types.Bytes(c.GetBytesValue()), nil - case *exprpb.Constant_DoubleValue: - return types.Double(c.GetDoubleValue()), nil - case *exprpb.Constant_Int64Value: - return types.Int(c.GetInt64Value()), nil - case *exprpb.Constant_NullValue: - return types.NullValue, nil - case *exprpb.Constant_StringValue: - return types.String(c.GetStringValue()), nil - case *exprpb.Constant_Uint64Value: - return types.Uint(c.GetUint64Value()), nil - } - return nil, fmt.Errorf("unsupported constant kind: %v", c.GetConstantKind()) +// VisitEntryExpr updates the max identifier if the incoming entry id is greater than previously observed. +func (v *maxIDVisitor) VisitEntryExpr(e EntryExpr) { + if v.maxID < e.ID() { + v.maxID = e.ID() + } } diff --git a/vendor/github.com/google/cel-go/common/ast/conversion.go b/vendor/github.com/google/cel-go/common/ast/conversion.go new file mode 100644 index 000000000..8f2c4bd1e --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/conversion.go @@ -0,0 +1,632 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + + structpb "google.golang.org/protobuf/types/known/structpb" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +// ToProto converts an AST to a CheckedExpr protobouf. +func ToProto(ast *AST) (*exprpb.CheckedExpr, error) { + refMap := make(map[int64]*exprpb.Reference, len(ast.ReferenceMap())) + for id, ref := range ast.ReferenceMap() { + r, err := ReferenceInfoToProto(ref) + if err != nil { + return nil, err + } + refMap[id] = r + } + typeMap := make(map[int64]*exprpb.Type, len(ast.TypeMap())) + for id, typ := range ast.TypeMap() { + t, err := types.TypeToExprType(typ) + if err != nil { + return nil, err + } + typeMap[id] = t + } + e, err := ExprToProto(ast.Expr()) + if err != nil { + return nil, err + } + info, err := SourceInfoToProto(ast.SourceInfo()) + if err != nil { + return nil, err + } + return &exprpb.CheckedExpr{ + Expr: e, + SourceInfo: info, + ReferenceMap: refMap, + TypeMap: typeMap, + }, nil +} + +// ToAST converts a CheckedExpr protobuf to an AST instance. +func ToAST(checked *exprpb.CheckedExpr) (*AST, error) { + refMap := make(map[int64]*ReferenceInfo, len(checked.GetReferenceMap())) + for id, ref := range checked.GetReferenceMap() { + r, err := ProtoToReferenceInfo(ref) + if err != nil { + return nil, err + } + refMap[id] = r + } + typeMap := make(map[int64]*types.Type, len(checked.GetTypeMap())) + for id, typ := range checked.GetTypeMap() { + t, err := types.ExprTypeToType(typ) + if err != nil { + return nil, err + } + typeMap[id] = t + } + info, err := ProtoToSourceInfo(checked.GetSourceInfo()) + if err != nil { + return nil, err + } + root, err := ProtoToExpr(checked.GetExpr()) + if err != nil { + return nil, err + } + ast := NewCheckedAST(NewAST(root, info), typeMap, refMap) + return ast, nil +} + +// ProtoToExpr converts a protobuf Expr value to an ast.Expr value. +func ProtoToExpr(e *exprpb.Expr) (Expr, error) { + factory := NewExprFactory() + return exprInternal(factory, e) +} + +// ProtoToEntryExpr converts a protobuf struct/map entry to an ast.EntryExpr +func ProtoToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + factory := NewExprFactory() + switch e.GetKeyKind().(type) { + case *exprpb.Expr_CreateStruct_Entry_FieldKey: + return exprStructField(factory, e.GetId(), e) + case *exprpb.Expr_CreateStruct_Entry_MapKey: + return exprMapEntry(factory, e.GetId(), e) + } + return nil, fmt.Errorf("unsupported expr entry kind: %v", e) +} + +func exprInternal(factory ExprFactory, e *exprpb.Expr) (Expr, error) { + id := e.GetId() + switch e.GetExprKind().(type) { + case *exprpb.Expr_CallExpr: + return exprCall(factory, id, e.GetCallExpr()) + case *exprpb.Expr_ComprehensionExpr: + return exprComprehension(factory, id, e.GetComprehensionExpr()) + case *exprpb.Expr_ConstExpr: + return exprLiteral(factory, id, e.GetConstExpr()) + case *exprpb.Expr_IdentExpr: + return exprIdent(factory, id, e.GetIdentExpr()) + case *exprpb.Expr_ListExpr: + return exprList(factory, id, e.GetListExpr()) + case *exprpb.Expr_SelectExpr: + return exprSelect(factory, id, e.GetSelectExpr()) + case *exprpb.Expr_StructExpr: + s := e.GetStructExpr() + if s.GetMessageName() != "" { + return exprStruct(factory, id, s) + } + return exprMap(factory, id, s) + } + return factory.NewUnspecifiedExpr(id), nil +} + +func exprCall(factory ExprFactory, id int64, call *exprpb.Expr_Call) (Expr, error) { + var err error + args := make([]Expr, len(call.GetArgs())) + for i, a := range call.GetArgs() { + args[i], err = exprInternal(factory, a) + if err != nil { + return nil, err + } + } + if call.GetTarget() == nil { + return factory.NewCall(id, call.GetFunction(), args...), nil + } + + target, err := exprInternal(factory, call.GetTarget()) + if err != nil { + return nil, err + } + return factory.NewMemberCall(id, call.GetFunction(), target, args...), nil +} + +func exprComprehension(factory ExprFactory, id int64, comp *exprpb.Expr_Comprehension) (Expr, error) { + iterRange, err := exprInternal(factory, comp.GetIterRange()) + if err != nil { + return nil, err + } + accuInit, err := exprInternal(factory, comp.GetAccuInit()) + if err != nil { + return nil, err + } + loopCond, err := exprInternal(factory, comp.GetLoopCondition()) + if err != nil { + return nil, err + } + loopStep, err := exprInternal(factory, comp.GetLoopStep()) + if err != nil { + return nil, err + } + result, err := exprInternal(factory, comp.GetResult()) + if err != nil { + return nil, err + } + return factory.NewComprehension(id, + iterRange, + comp.GetIterVar(), + comp.GetAccuVar(), + accuInit, + loopCond, + loopStep, + result), nil +} + +func exprLiteral(factory ExprFactory, id int64, c *exprpb.Constant) (Expr, error) { + val, err := ConstantToVal(c) + if err != nil { + return nil, err + } + return factory.NewLiteral(id, val), nil +} + +func exprIdent(factory ExprFactory, id int64, i *exprpb.Expr_Ident) (Expr, error) { + return factory.NewIdent(id, i.GetName()), nil +} + +func exprList(factory ExprFactory, id int64, l *exprpb.Expr_CreateList) (Expr, error) { + elems := make([]Expr, len(l.GetElements())) + for i, e := range l.GetElements() { + elem, err := exprInternal(factory, e) + if err != nil { + return nil, err + } + elems[i] = elem + } + return factory.NewList(id, elems, l.GetOptionalIndices()), nil +} + +func exprMap(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) { + entries := make([]EntryExpr, len(s.GetEntries())) + var err error + for i, entry := range s.GetEntries() { + entries[i], err = exprMapEntry(factory, entry.GetId(), entry) + if err != nil { + return nil, err + } + } + return factory.NewMap(id, entries), nil +} + +func exprMapEntry(factory ExprFactory, id int64, e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + k, err := exprInternal(factory, e.GetMapKey()) + if err != nil { + return nil, err + } + v, err := exprInternal(factory, e.GetValue()) + if err != nil { + return nil, err + } + return factory.NewMapEntry(id, k, v, e.GetOptionalEntry()), nil +} + +func exprSelect(factory ExprFactory, id int64, s *exprpb.Expr_Select) (Expr, error) { + op, err := exprInternal(factory, s.GetOperand()) + if err != nil { + return nil, err + } + if s.GetTestOnly() { + return factory.NewPresenceTest(id, op, s.GetField()), nil + } + return factory.NewSelect(id, op, s.GetField()), nil +} + +func exprStruct(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) { + fields := make([]EntryExpr, len(s.GetEntries())) + var err error + for i, field := range s.GetEntries() { + fields[i], err = exprStructField(factory, field.GetId(), field) + if err != nil { + return nil, err + } + } + return factory.NewStruct(id, s.GetMessageName(), fields), nil +} + +func exprStructField(factory ExprFactory, id int64, f *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + v, err := exprInternal(factory, f.GetValue()) + if err != nil { + return nil, err + } + return factory.NewStructField(id, f.GetFieldKey(), v, f.GetOptionalEntry()), nil +} + +// ExprToProto serializes an ast.Expr value to a protobuf Expr representation. +func ExprToProto(e Expr) (*exprpb.Expr, error) { + if e == nil { + return &exprpb.Expr{}, nil + } + switch e.Kind() { + case CallKind: + return protoCall(e.ID(), e.AsCall()) + case ComprehensionKind: + return protoComprehension(e.ID(), e.AsComprehension()) + case IdentKind: + return protoIdent(e.ID(), e.AsIdent()) + case ListKind: + return protoList(e.ID(), e.AsList()) + case LiteralKind: + return protoLiteral(e.ID(), e.AsLiteral()) + case MapKind: + return protoMap(e.ID(), e.AsMap()) + case SelectKind: + return protoSelect(e.ID(), e.AsSelect()) + case StructKind: + return protoStruct(e.ID(), e.AsStruct()) + case UnspecifiedExprKind: + // Handle the case where a macro reference may be getting translated. + // A nested macro 'pointer' is a non-zero expression id with no kind set. + if e.ID() != 0 { + return &exprpb.Expr{Id: e.ID()}, nil + } + return &exprpb.Expr{}, nil + } + return nil, fmt.Errorf("unsupported expr kind: %v", e) +} + +// EntryExprToProto converts an ast.EntryExpr to a protobuf CreateStruct entry +func EntryExprToProto(e EntryExpr) (*exprpb.Expr_CreateStruct_Entry, error) { + switch e.Kind() { + case MapEntryKind: + return protoMapEntry(e.ID(), e.AsMapEntry()) + case StructFieldKind: + return protoStructField(e.ID(), e.AsStructField()) + case UnspecifiedEntryExprKind: + return &exprpb.Expr_CreateStruct_Entry{}, nil + } + return nil, fmt.Errorf("unsupported expr entry kind: %v", e) +} + +func protoCall(id int64, call CallExpr) (*exprpb.Expr, error) { + var err error + var target *exprpb.Expr + if call.IsMemberFunction() { + target, err = ExprToProto(call.Target()) + if err != nil { + return nil, err + } + } + callArgs := call.Args() + args := make([]*exprpb.Expr, len(callArgs)) + for i, a := range callArgs { + args[i], err = ExprToProto(a) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_CallExpr{ + CallExpr: &exprpb.Expr_Call{ + Function: call.FunctionName(), + Target: target, + Args: args, + }, + }, + }, nil +} + +func protoComprehension(id int64, comp ComprehensionExpr) (*exprpb.Expr, error) { + iterRange, err := ExprToProto(comp.IterRange()) + if err != nil { + return nil, err + } + accuInit, err := ExprToProto(comp.AccuInit()) + if err != nil { + return nil, err + } + loopCond, err := ExprToProto(comp.LoopCondition()) + if err != nil { + return nil, err + } + loopStep, err := ExprToProto(comp.LoopStep()) + if err != nil { + return nil, err + } + result, err := ExprToProto(comp.Result()) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ComprehensionExpr{ + ComprehensionExpr: &exprpb.Expr_Comprehension{ + IterVar: comp.IterVar(), + IterRange: iterRange, + AccuVar: comp.AccuVar(), + AccuInit: accuInit, + LoopCondition: loopCond, + LoopStep: loopStep, + Result: result, + }, + }, + }, nil +} + +func protoIdent(id int64, name string) (*exprpb.Expr, error) { + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_IdentExpr{ + IdentExpr: &exprpb.Expr_Ident{ + Name: name, + }, + }, + }, nil +} + +func protoList(id int64, list ListExpr) (*exprpb.Expr, error) { + var err error + elems := make([]*exprpb.Expr, list.Size()) + for i, e := range list.Elements() { + elems[i], err = ExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ListExpr{ + ListExpr: &exprpb.Expr_CreateList{ + Elements: elems, + OptionalIndices: list.OptionalIndices(), + }, + }, + }, nil +} + +func protoLiteral(id int64, val ref.Val) (*exprpb.Expr, error) { + c, err := ValToConstant(val) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ConstExpr{ + ConstExpr: c, + }, + }, nil +} + +func protoMap(id int64, m MapExpr) (*exprpb.Expr, error) { + entries := make([]*exprpb.Expr_CreateStruct_Entry, len(m.Entries())) + var err error + for i, e := range m.Entries() { + entries[i], err = EntryExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_StructExpr{ + StructExpr: &exprpb.Expr_CreateStruct{ + Entries: entries, + }, + }, + }, nil +} + +func protoMapEntry(id int64, e MapEntry) (*exprpb.Expr_CreateStruct_Entry, error) { + k, err := ExprToProto(e.Key()) + if err != nil { + return nil, err + } + v, err := ExprToProto(e.Value()) + if err != nil { + return nil, err + } + return &exprpb.Expr_CreateStruct_Entry{ + Id: id, + KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{ + MapKey: k, + }, + Value: v, + OptionalEntry: e.IsOptional(), + }, nil +} + +func protoSelect(id int64, s SelectExpr) (*exprpb.Expr, error) { + op, err := ExprToProto(s.Operand()) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_SelectExpr{ + SelectExpr: &exprpb.Expr_Select{ + Operand: op, + Field: s.FieldName(), + TestOnly: s.IsTestOnly(), + }, + }, + }, nil +} + +func protoStruct(id int64, s StructExpr) (*exprpb.Expr, error) { + entries := make([]*exprpb.Expr_CreateStruct_Entry, len(s.Fields())) + var err error + for i, e := range s.Fields() { + entries[i], err = EntryExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_StructExpr{ + StructExpr: &exprpb.Expr_CreateStruct{ + MessageName: s.TypeName(), + Entries: entries, + }, + }, + }, nil +} + +func protoStructField(id int64, f StructField) (*exprpb.Expr_CreateStruct_Entry, error) { + v, err := ExprToProto(f.Value()) + if err != nil { + return nil, err + } + return &exprpb.Expr_CreateStruct_Entry{ + Id: id, + KeyKind: &exprpb.Expr_CreateStruct_Entry_FieldKey{ + FieldKey: f.Name(), + }, + Value: v, + OptionalEntry: f.IsOptional(), + }, nil +} + +// SourceInfoToProto serializes an ast.SourceInfo value to a protobuf SourceInfo object. +func SourceInfoToProto(info *SourceInfo) (*exprpb.SourceInfo, error) { + if info == nil { + return &exprpb.SourceInfo{}, nil + } + sourceInfo := &exprpb.SourceInfo{ + SyntaxVersion: info.SyntaxVersion(), + Location: info.Description(), + LineOffsets: info.LineOffsets(), + Positions: make(map[int64]int32, len(info.OffsetRanges())), + MacroCalls: make(map[int64]*exprpb.Expr, len(info.MacroCalls())), + } + for id, offset := range info.OffsetRanges() { + sourceInfo.Positions[id] = offset.Start + } + for id, e := range info.MacroCalls() { + call, err := ExprToProto(e) + if err != nil { + return nil, err + } + sourceInfo.MacroCalls[id] = call + } + return sourceInfo, nil +} + +// ProtoToSourceInfo deserializes the protobuf into a native SourceInfo value. +func ProtoToSourceInfo(info *exprpb.SourceInfo) (*SourceInfo, error) { + sourceInfo := &SourceInfo{ + syntax: info.GetSyntaxVersion(), + desc: info.GetLocation(), + lines: info.GetLineOffsets(), + offsetRanges: make(map[int64]OffsetRange, len(info.GetPositions())), + macroCalls: make(map[int64]Expr, len(info.GetMacroCalls())), + } + for id, offset := range info.GetPositions() { + sourceInfo.SetOffsetRange(id, OffsetRange{Start: offset, Stop: offset}) + } + for id, e := range info.GetMacroCalls() { + call, err := ProtoToExpr(e) + if err != nil { + return nil, err + } + sourceInfo.SetMacroCall(id, call) + } + return sourceInfo, nil +} + +// ReferenceInfoToProto converts a ReferenceInfo instance to a protobuf Reference suitable for serialization. +func ReferenceInfoToProto(info *ReferenceInfo) (*exprpb.Reference, error) { + c, err := ValToConstant(info.Value) + if err != nil { + return nil, err + } + return &exprpb.Reference{ + Name: info.Name, + OverloadId: info.OverloadIDs, + Value: c, + }, nil +} + +// ProtoToReferenceInfo converts a protobuf Reference into a CEL-native ReferenceInfo instance. +func ProtoToReferenceInfo(ref *exprpb.Reference) (*ReferenceInfo, error) { + v, err := ConstantToVal(ref.GetValue()) + if err != nil { + return nil, err + } + return &ReferenceInfo{ + Name: ref.GetName(), + OverloadIDs: ref.GetOverloadId(), + Value: v, + }, nil +} + +// ValToConstant converts a CEL-native ref.Val to a protobuf Constant. +// +// Only simple scalar types are supported by this method. +func ValToConstant(v ref.Val) (*exprpb.Constant, error) { + if v == nil { + return nil, nil + } + switch v.Type() { + case types.BoolType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: v.Value().(bool)}}, nil + case types.BytesType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: v.Value().([]byte)}}, nil + case types.DoubleType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: v.Value().(float64)}}, nil + case types.IntType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: v.Value().(int64)}}, nil + case types.NullType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: structpb.NullValue_NULL_VALUE}}, nil + case types.StringType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: v.Value().(string)}}, nil + case types.UintType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: v.Value().(uint64)}}, nil + } + return nil, fmt.Errorf("unsupported constant kind: %v", v.Type()) +} + +// ConstantToVal converts a protobuf Constant to a CEL-native ref.Val. +func ConstantToVal(c *exprpb.Constant) (ref.Val, error) { + if c == nil { + return nil, nil + } + switch c.GetConstantKind().(type) { + case *exprpb.Constant_BoolValue: + return types.Bool(c.GetBoolValue()), nil + case *exprpb.Constant_BytesValue: + return types.Bytes(c.GetBytesValue()), nil + case *exprpb.Constant_DoubleValue: + return types.Double(c.GetDoubleValue()), nil + case *exprpb.Constant_Int64Value: + return types.Int(c.GetInt64Value()), nil + case *exprpb.Constant_NullValue: + return types.NullValue, nil + case *exprpb.Constant_StringValue: + return types.String(c.GetStringValue()), nil + case *exprpb.Constant_Uint64Value: + return types.Uint(c.GetUint64Value()), nil + } + return nil, fmt.Errorf("unsupported constant kind: %v", c.GetConstantKind()) +} diff --git a/vendor/github.com/google/cel-go/common/ast/expr.go b/vendor/github.com/google/cel-go/common/ast/expr.go index b63884a60..c9d88bbaa 100644 --- a/vendor/github.com/google/cel-go/common/ast/expr.go +++ b/vendor/github.com/google/cel-go/common/ast/expr.go @@ -15,168 +15,61 @@ package ast import ( - "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // ExprKind represents the expression node kind. type ExprKind int const ( - // UnspecifiedKind represents an unset expression with no specified properties. - UnspecifiedKind ExprKind = iota + // UnspecifiedExprKind represents an unset expression with no specified properties. + UnspecifiedExprKind ExprKind = iota - // LiteralKind represents a primitive scalar literal. - LiteralKind + // CallKind represents a function call. + CallKind + + // ComprehensionKind represents a comprehension expression generated by a macro. + ComprehensionKind // IdentKind represents a simple variable, constant, or type identifier. IdentKind - // SelectKind represents a field selection expression. - SelectKind - - // CallKind represents a function call. - CallKind - // ListKind represents a list literal expression. ListKind + // LiteralKind represents a primitive scalar literal. + LiteralKind + // MapKind represents a map literal expression. MapKind + // SelectKind represents a field selection expression. + SelectKind + // StructKind represents a struct literal expression. StructKind - - // ComprehensionKind represents a comprehension expression generated by a macro. - ComprehensionKind ) -// NavigateCheckedAST converts a CheckedAST to a NavigableExpr -func NavigateCheckedAST(ast *CheckedAST) NavigableExpr { - return newNavigableExpr(nil, ast.Expr, ast.TypeMap) -} - -// ExprMatcher takes a NavigableExpr in and indicates whether the value is a match. +// Expr represents the base expression node in a CEL abstract syntax tree. // -// This function type should be use with the `Match` and `MatchList` calls. -type ExprMatcher func(NavigableExpr) bool - -// ConstantValueMatcher returns an ExprMatcher which will return true if the input NavigableExpr -// is comprised of all constant values, such as a simple literal or even list and map literal. -func ConstantValueMatcher() ExprMatcher { - return matchIsConstantValue -} - -// KindMatcher returns an ExprMatcher which will return true if the input NavigableExpr.Kind() matches -// the specified `kind`. -func KindMatcher(kind ExprKind) ExprMatcher { - return func(e NavigableExpr) bool { - return e.Kind() == kind - } -} - -// FunctionMatcher returns an ExprMatcher which will match NavigableExpr nodes of CallKind type whose -// function name is equal to `funcName`. -func FunctionMatcher(funcName string) ExprMatcher { - return func(e NavigableExpr) bool { - if e.Kind() != CallKind { - return false - } - return e.AsCall().FunctionName() == funcName - } -} - -// AllMatcher returns true for all descendants of a NavigableExpr, effectively flattening them into a list. -// -// Such a result would work well with subsequent MatchList calls. -func AllMatcher() ExprMatcher { - return func(NavigableExpr) bool { - return true - } -} - -// MatchDescendants takes a NavigableExpr and ExprMatcher and produces a list of NavigableExpr values of the -// descendants which match. -func MatchDescendants(expr NavigableExpr, matcher ExprMatcher) []NavigableExpr { - return matchListInternal([]NavigableExpr{expr}, matcher, true) -} - -// MatchSubset applies an ExprMatcher to a list of NavigableExpr values and their descendants, producing a -// subset of NavigableExpr values which match. -func MatchSubset(exprs []NavigableExpr, matcher ExprMatcher) []NavigableExpr { - visit := make([]NavigableExpr, len(exprs)) - copy(visit, exprs) - return matchListInternal(visit, matcher, false) -} - -func matchListInternal(visit []NavigableExpr, matcher ExprMatcher, visitDescendants bool) []NavigableExpr { - var matched []NavigableExpr - for len(visit) != 0 { - e := visit[0] - if matcher(e) { - matched = append(matched, e) - } - if visitDescendants { - visit = append(visit[1:], e.Children()...) - } else { - visit = visit[1:] - } - } - return matched -} - -func matchIsConstantValue(e NavigableExpr) bool { - if e.Kind() == LiteralKind { - return true - } - if e.Kind() == StructKind || e.Kind() == MapKind || e.Kind() == ListKind { - for _, child := range e.Children() { - if !matchIsConstantValue(child) { - return false - } - } - return true - } - return false -} - -// NavigableExpr represents the base navigable expression value. -// -// Depending on the `Kind()` value, the NavigableExpr may be converted to a concrete expression types +// Depending on the `Kind()` value, the Expr may be converted to a concrete expression types // as indicated by the `As` methods. -// -// NavigableExpr values and their concrete expression types should be nil-safe. Conversion of an expr -// to the wrong kind should produce a nil value. -type NavigableExpr interface { +type Expr interface { // ID of the expression as it appears in the AST ID() int64 // Kind of the expression node. See ExprKind for the valid enum values. Kind() ExprKind - // Type of the expression node. - Type() *types.Type - - // Parent returns the parent expression node, if one exists. - Parent() (NavigableExpr, bool) - - // Children returns a list of child expression nodes. - Children() []NavigableExpr - - // ToExpr adapts this NavigableExpr to a protobuf representation. - ToExpr() *exprpb.Expr - - // AsCall adapts the expr into a NavigableCallExpr + // AsCall adapts the expr into a CallExpr // // The Kind() must be equal to a CallKind for the conversion to be well-defined. - AsCall() NavigableCallExpr + AsCall() CallExpr - // AsComprehension adapts the expr into a NavigableComprehensionExpr. + // AsComprehension adapts the expr into a ComprehensionExpr. // // The Kind() must be equal to a ComprehensionKind for the conversion to be well-defined. - AsComprehension() NavigableComprehensionExpr + AsComprehension() ComprehensionExpr // AsIdent adapts the expr into an identifier string. // @@ -188,67 +81,123 @@ type NavigableExpr interface { // The Kind() must be equal to a LiteralKind for the conversion to be well-defined. AsLiteral() ref.Val - // AsList adapts the expr into a NavigableListExpr. + // AsList adapts the expr into a ListExpr. // // The Kind() must be equal to a ListKind for the conversion to be well-defined. - AsList() NavigableListExpr + AsList() ListExpr - // AsMap adapts the expr into a NavigableMapExpr. + // AsMap adapts the expr into a MapExpr. // // The Kind() must be equal to a MapKind for the conversion to be well-defined. - AsMap() NavigableMapExpr + AsMap() MapExpr - // AsSelect adapts the expr into a NavigableSelectExpr. + // AsSelect adapts the expr into a SelectExpr. // // The Kind() must be equal to a SelectKind for the conversion to be well-defined. - AsSelect() NavigableSelectExpr + AsSelect() SelectExpr - // AsStruct adapts the expr into a NavigableStructExpr. + // AsStruct adapts the expr into a StructExpr. // // The Kind() must be equal to a StructKind for the conversion to be well-defined. - AsStruct() NavigableStructExpr + AsStruct() StructExpr - // marker interface method - isNavigable() + // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids. + RenumberIDs(IDGenerator) + + // SetKindCase replaces the contents of the current expression with the contents of the other. + // + // The SetKindCase takes ownership of any expression instances references within the input Expr. + // A shallow copy is made of the Expr value itself, but not a deep one. + // + // This method should only be used during AST rewrites using temporary Expr values. + SetKindCase(Expr) + + // isExpr is a marker interface. + isExpr() } -// NavigableCallExpr defines an interface for inspecting a function call and its arugments. -type NavigableCallExpr interface { +// EntryExprKind represents the possible EntryExpr kinds. +type EntryExprKind int + +const ( + // UnspecifiedEntryExprKind indicates that the entry expr is not set. + UnspecifiedEntryExprKind EntryExprKind = iota + + // MapEntryKind indicates that the entry is a MapEntry type with key and value expressions. + MapEntryKind + + // StructFieldKind indicates that the entry is a StructField with a field name and initializer + // expression. + StructFieldKind +) + +// EntryExpr represents the base entry expression in a CEL map or struct literal. +type EntryExpr interface { + // ID of the entry as it appears in the AST. + ID() int64 + + // Kind of the entry expression node. See EntryExprKind for valid enum values. + Kind() EntryExprKind + + // AsMapEntry casts the EntryExpr to a MapEntry. + // + // The Kind() must be equal to MapEntryKind for the conversion to be well-defined. + AsMapEntry() MapEntry + + // AsStructField casts the EntryExpr to a StructField + // + // The Kind() must be equal to StructFieldKind for the conversion to be well-defined. + AsStructField() StructField + + // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids. + RenumberIDs(IDGenerator) + + isEntryExpr() +} + +// IDGenerator produces unique ids suitable for tagging expression nodes +type IDGenerator func(originalID int64) int64 + +// CallExpr defines an interface for inspecting a function call and its arugments. +type CallExpr interface { // FunctionName returns the name of the function. FunctionName() string + // IsMemberFunction returns whether the call has a non-nil target indicating it is a member function + IsMemberFunction() bool + // Target returns the target of the expression if one is present. - Target() NavigableExpr + Target() Expr // Args returns the list of call arguments, excluding the target. - Args() []NavigableExpr - - // ReturnType returns the result type of the call. - ReturnType() *types.Type + Args() []Expr // marker interface method - isNavigable() + isExpr() } -// NavigableListExpr defines an interface for inspecting a list literal expression. -type NavigableListExpr interface { +// ListExpr defines an interface for inspecting a list literal expression. +type ListExpr interface { // Elements returns the list elements as navigable expressions. - Elements() []NavigableExpr + Elements() []Expr // OptionalIndicies returns the list of optional indices in the list literal. OptionalIndices() []int32 + // IsOptional indicates whether the given element index is optional. + IsOptional(int32) bool + // Size returns the number of elements in the list. Size() int // marker interface method - isNavigable() + isExpr() } -// NavigableSelectExpr defines an interface for inspecting a select expression. -type NavigableSelectExpr interface { +// SelectExpr defines an interface for inspecting a select expression. +type SelectExpr interface { // Operand returns the selection operand expression. - Operand() NavigableExpr + Operand() Expr // FieldName returns the field name being selected from the operand. FieldName() string @@ -257,67 +206,67 @@ type NavigableSelectExpr interface { IsTestOnly() bool // marker interface method - isNavigable() + isExpr() } -// NavigableMapExpr defines an interface for inspecting a map expression. -type NavigableMapExpr interface { - // Entries returns the map key value pairs as NavigableEntry values. - Entries() []NavigableEntry +// MapExpr defines an interface for inspecting a map expression. +type MapExpr interface { + // Entries returns the map key value pairs as EntryExpr values. + Entries() []EntryExpr // Size returns the number of entries in the map. Size() int // marker interface method - isNavigable() + isExpr() } -// NavigableEntry defines an interface for inspecting a map entry. -type NavigableEntry interface { +// MapEntry defines an interface for inspecting a map entry. +type MapEntry interface { // Key returns the map entry key expression. - Key() NavigableExpr + Key() Expr // Value returns the map entry value expression. - Value() NavigableExpr + Value() Expr // IsOptional returns whether the entry is optional. IsOptional() bool // marker interface method - isNavigable() + isEntryExpr() } -// NavigableStructExpr defines an interfaces for inspecting a struct and its field initializers. -type NavigableStructExpr interface { +// StructExpr defines an interfaces for inspecting a struct and its field initializers. +type StructExpr interface { // TypeName returns the struct type name. TypeName() string - // Fields returns the set of field initializers in the struct expression as NavigableField values. - Fields() []NavigableField + // Fields returns the set of field initializers in the struct expression as EntryExpr values. + Fields() []EntryExpr // marker interface method - isNavigable() + isExpr() } -// NavigableField defines an interface for inspecting a struct field initialization. -type NavigableField interface { - // FieldName returns the name of the field. - FieldName() string +// StructField defines an interface for inspecting a struct field initialization. +type StructField interface { + // Name returns the name of the field. + Name() string // Value returns the field initialization expression. - Value() NavigableExpr + Value() Expr // IsOptional returns whether the field is optional. IsOptional() bool // marker interface method - isNavigable() + isEntryExpr() } -// NavigableComprehensionExpr defines an interface for inspecting a comprehension expression. -type NavigableComprehensionExpr interface { +// ComprehensionExpr defines an interface for inspecting a comprehension expression. +type ComprehensionExpr interface { // IterRange returns the iteration range expression. - IterRange() NavigableExpr + IterRange() Expr // IterVar returns the iteration variable name. IterVar() string @@ -326,384 +275,586 @@ type NavigableComprehensionExpr interface { AccuVar() string // AccuInit returns the accumulation variable initialization expression. - AccuInit() NavigableExpr + AccuInit() Expr // LoopCondition returns the loop condition expression. - LoopCondition() NavigableExpr + LoopCondition() Expr // LoopStep returns the loop step expression. - LoopStep() NavigableExpr + LoopStep() Expr // Result returns the comprehension result expression. - Result() NavigableExpr + Result() Expr // marker interface method - isNavigable() + isExpr() } -func newNavigableExpr(parent NavigableExpr, expr *exprpb.Expr, typeMap map[int64]*types.Type) NavigableExpr { - kind, factory := kindOf(expr) - nav := &navigableExprImpl{ - parent: parent, - kind: kind, - expr: expr, - typeMap: typeMap, - createChildren: factory, +var _ Expr = &expr{} + +type expr struct { + id int64 + exprKindCase +} + +type exprKindCase interface { + Kind() ExprKind + + renumberIDs(IDGenerator) + + isExpr() +} + +func (e *expr) ID() int64 { + if e == nil { + return 0 } - return nav + return e.id } -type navigableExprImpl struct { - parent NavigableExpr - kind ExprKind - expr *exprpb.Expr - typeMap map[int64]*types.Type - createChildren childFactory +func (e *expr) Kind() ExprKind { + if e == nil || e.exprKindCase == nil { + return UnspecifiedExprKind + } + return e.exprKindCase.Kind() } -func (nav *navigableExprImpl) ID() int64 { - return nav.ToExpr().GetId() +func (e *expr) AsCall() CallExpr { + if e.Kind() != CallKind { + return nilCall + } + return e.exprKindCase.(CallExpr) } -func (nav *navigableExprImpl) Kind() ExprKind { - return nav.kind +func (e *expr) AsComprehension() ComprehensionExpr { + if e.Kind() != ComprehensionKind { + return nilCompre + } + return e.exprKindCase.(ComprehensionExpr) } -func (nav *navigableExprImpl) Type() *types.Type { - if t, found := nav.typeMap[nav.ID()]; found { - return t +func (e *expr) AsIdent() string { + if e.Kind() != IdentKind { + return "" } - return types.DynType + return string(e.exprKindCase.(baseIdentExpr)) } -func (nav *navigableExprImpl) Parent() (NavigableExpr, bool) { - if nav.parent != nil { - return nav.parent, true +func (e *expr) AsLiteral() ref.Val { + if e.Kind() != LiteralKind { + return nil } - return nil, false + return e.exprKindCase.(*baseLiteral).Val } -func (nav *navigableExprImpl) Children() []NavigableExpr { - return nav.createChildren(nav) +func (e *expr) AsList() ListExpr { + if e.Kind() != ListKind { + return nilList + } + return e.exprKindCase.(ListExpr) } -func (nav *navigableExprImpl) ToExpr() *exprpb.Expr { - return nav.expr +func (e *expr) AsMap() MapExpr { + if e.Kind() != MapKind { + return nilMap + } + return e.exprKindCase.(MapExpr) } -func (nav *navigableExprImpl) AsCall() NavigableCallExpr { - return navigableCallImpl{navigableExprImpl: nav} +func (e *expr) AsSelect() SelectExpr { + if e.Kind() != SelectKind { + return nilSel + } + return e.exprKindCase.(SelectExpr) } -func (nav *navigableExprImpl) AsComprehension() NavigableComprehensionExpr { - return navigableComprehensionImpl{navigableExprImpl: nav} +func (e *expr) AsStruct() StructExpr { + if e.Kind() != StructKind { + return nilStruct + } + return e.exprKindCase.(StructExpr) } -func (nav *navigableExprImpl) AsIdent() string { - return nav.ToExpr().GetIdentExpr().GetName() +func (e *expr) SetKindCase(other Expr) { + if e == nil { + return + } + if other == nil { + e.exprKindCase = nil + return + } + switch other.Kind() { + case CallKind: + c := other.AsCall() + e.exprKindCase = &baseCallExpr{ + function: c.FunctionName(), + target: c.Target(), + args: c.Args(), + isMember: c.IsMemberFunction(), + } + case ComprehensionKind: + c := other.AsComprehension() + e.exprKindCase = &baseComprehensionExpr{ + iterRange: c.IterRange(), + iterVar: c.IterVar(), + accuVar: c.AccuVar(), + accuInit: c.AccuInit(), + loopCond: c.LoopCondition(), + loopStep: c.LoopStep(), + result: c.Result(), + } + case IdentKind: + e.exprKindCase = baseIdentExpr(other.AsIdent()) + case ListKind: + l := other.AsList() + optIndexMap := make(map[int32]struct{}, len(l.OptionalIndices())) + for _, idx := range l.OptionalIndices() { + optIndexMap[idx] = struct{}{} + } + e.exprKindCase = &baseListExpr{ + elements: l.Elements(), + optIndices: l.OptionalIndices(), + optIndexMap: optIndexMap, + } + case LiteralKind: + e.exprKindCase = &baseLiteral{Val: other.AsLiteral()} + case MapKind: + e.exprKindCase = &baseMapExpr{ + entries: other.AsMap().Entries(), + } + case SelectKind: + s := other.AsSelect() + e.exprKindCase = &baseSelectExpr{ + operand: s.Operand(), + field: s.FieldName(), + testOnly: s.IsTestOnly(), + } + case StructKind: + s := other.AsStruct() + e.exprKindCase = &baseStructExpr{ + typeName: s.TypeName(), + fields: s.Fields(), + } + case UnspecifiedExprKind: + e.exprKindCase = nil + } } -func (nav *navigableExprImpl) AsLiteral() ref.Val { - if nav.Kind() != LiteralKind { - return nil +func (e *expr) RenumberIDs(idGen IDGenerator) { + if e == nil { + return } - val, err := ConstantToVal(nav.ToExpr().GetConstExpr()) - if err != nil { - panic(err) + e.id = idGen(e.id) + if e.exprKindCase != nil { + e.exprKindCase.renumberIDs(idGen) } - return val } -func (nav *navigableExprImpl) AsList() NavigableListExpr { - return navigableListImpl{navigableExprImpl: nav} +type baseCallExpr struct { + function string + target Expr + args []Expr + isMember bool } -func (nav *navigableExprImpl) AsMap() NavigableMapExpr { - return navigableMapImpl{navigableExprImpl: nav} +func (*baseCallExpr) Kind() ExprKind { + return CallKind } -func (nav *navigableExprImpl) AsSelect() NavigableSelectExpr { - return navigableSelectImpl{navigableExprImpl: nav} +func (e *baseCallExpr) FunctionName() string { + if e == nil { + return "" + } + return e.function } -func (nav *navigableExprImpl) AsStruct() NavigableStructExpr { - return navigableStructImpl{navigableExprImpl: nav} +func (e *baseCallExpr) IsMemberFunction() bool { + if e == nil { + return false + } + return e.isMember } -func (nav *navigableExprImpl) createChild(e *exprpb.Expr) NavigableExpr { - return newNavigableExpr(nav, e, nav.typeMap) +func (e *baseCallExpr) Target() Expr { + if e == nil || !e.IsMemberFunction() { + return nilExpr + } + return e.target } -func (nav *navigableExprImpl) isNavigable() {} +func (e *baseCallExpr) Args() []Expr { + if e == nil { + return []Expr{} + } + return e.args +} -type navigableCallImpl struct { - *navigableExprImpl +func (e *baseCallExpr) renumberIDs(idGen IDGenerator) { + if e.IsMemberFunction() { + e.Target().RenumberIDs(idGen) + } + for _, arg := range e.Args() { + arg.RenumberIDs(idGen) + } } -func (call navigableCallImpl) FunctionName() string { - return call.ToExpr().GetCallExpr().GetFunction() +func (*baseCallExpr) isExpr() {} + +var _ ComprehensionExpr = &baseComprehensionExpr{} + +type baseComprehensionExpr struct { + iterRange Expr + iterVar string + accuVar string + accuInit Expr + loopCond Expr + loopStep Expr + result Expr } -func (call navigableCallImpl) Target() NavigableExpr { - t := call.ToExpr().GetCallExpr().GetTarget() - if t != nil { - return call.createChild(t) - } - return nil +func (*baseComprehensionExpr) Kind() ExprKind { + return ComprehensionKind } -func (call navigableCallImpl) Args() []NavigableExpr { - args := call.ToExpr().GetCallExpr().GetArgs() - navArgs := make([]NavigableExpr, len(args)) - for i, a := range args { - navArgs[i] = call.createChild(a) +func (e *baseComprehensionExpr) IterRange() Expr { + if e == nil { + return nilExpr } - return navArgs + return e.iterRange } -func (call navigableCallImpl) ReturnType() *types.Type { - return call.Type() +func (e *baseComprehensionExpr) IterVar() string { + return e.iterVar } -type navigableComprehensionImpl struct { - *navigableExprImpl +func (e *baseComprehensionExpr) AccuVar() string { + return e.accuVar } -func (comp navigableComprehensionImpl) IterRange() NavigableExpr { - return comp.createChild(comp.ToExpr().GetComprehensionExpr().GetIterRange()) +func (e *baseComprehensionExpr) AccuInit() Expr { + if e == nil { + return nilExpr + } + return e.accuInit } -func (comp navigableComprehensionImpl) IterVar() string { - return comp.ToExpr().GetComprehensionExpr().GetIterVar() +func (e *baseComprehensionExpr) LoopCondition() Expr { + if e == nil { + return nilExpr + } + return e.loopCond } -func (comp navigableComprehensionImpl) AccuVar() string { - return comp.ToExpr().GetComprehensionExpr().GetAccuVar() +func (e *baseComprehensionExpr) LoopStep() Expr { + if e == nil { + return nilExpr + } + return e.loopStep } -func (comp navigableComprehensionImpl) AccuInit() NavigableExpr { - return comp.createChild(comp.ToExpr().GetComprehensionExpr().GetAccuInit()) +func (e *baseComprehensionExpr) Result() Expr { + if e == nil { + return nilExpr + } + return e.result } -func (comp navigableComprehensionImpl) LoopCondition() NavigableExpr { - return comp.createChild(comp.ToExpr().GetComprehensionExpr().GetLoopCondition()) +func (e *baseComprehensionExpr) renumberIDs(idGen IDGenerator) { + e.IterRange().RenumberIDs(idGen) + e.AccuInit().RenumberIDs(idGen) + e.LoopCondition().RenumberIDs(idGen) + e.LoopStep().RenumberIDs(idGen) + e.Result().RenumberIDs(idGen) } -func (comp navigableComprehensionImpl) LoopStep() NavigableExpr { - return comp.createChild(comp.ToExpr().GetComprehensionExpr().GetLoopStep()) +func (*baseComprehensionExpr) isExpr() {} + +var _ exprKindCase = baseIdentExpr("") + +type baseIdentExpr string + +func (baseIdentExpr) Kind() ExprKind { + return IdentKind } -func (comp navigableComprehensionImpl) Result() NavigableExpr { - return comp.createChild(comp.ToExpr().GetComprehensionExpr().GetResult()) +func (e baseIdentExpr) renumberIDs(IDGenerator) {} + +func (baseIdentExpr) isExpr() {} + +var _ exprKindCase = &baseLiteral{} +var _ ref.Val = &baseLiteral{} + +type baseLiteral struct { + ref.Val } -type navigableListImpl struct { - *navigableExprImpl +func (*baseLiteral) Kind() ExprKind { + return LiteralKind } -func (l navigableListImpl) Elements() []NavigableExpr { - return l.Children() +func (l *baseLiteral) renumberIDs(IDGenerator) {} + +func (*baseLiteral) isExpr() {} + +var _ ListExpr = &baseListExpr{} + +type baseListExpr struct { + elements []Expr + optIndices []int32 + optIndexMap map[int32]struct{} } -func (l navigableListImpl) OptionalIndices() []int32 { - return l.ToExpr().GetListExpr().GetOptionalIndices() +func (*baseListExpr) Kind() ExprKind { + return ListKind } -func (l navigableListImpl) Size() int { - return len(l.ToExpr().GetListExpr().GetElements()) +func (e *baseListExpr) Elements() []Expr { + if e == nil { + return []Expr{} + } + return e.elements } -type navigableMapImpl struct { - *navigableExprImpl +func (e *baseListExpr) IsOptional(index int32) bool { + _, found := e.optIndexMap[index] + return found } -func (m navigableMapImpl) Entries() []NavigableEntry { - mapExpr := m.ToExpr().GetStructExpr() - entries := make([]NavigableEntry, len(mapExpr.GetEntries())) - for i, e := range mapExpr.GetEntries() { - entries[i] = navigableEntryImpl{ - key: m.createChild(e.GetMapKey()), - val: m.createChild(e.GetValue()), - isOpt: e.GetOptionalEntry(), - } +func (e *baseListExpr) OptionalIndices() []int32 { + if e == nil { + return []int32{} } - return entries + return e.optIndices } -func (m navigableMapImpl) Size() int { - return len(m.ToExpr().GetStructExpr().GetEntries()) +func (e *baseListExpr) Size() int { + return len(e.Elements()) } -type navigableEntryImpl struct { - key NavigableExpr - val NavigableExpr - isOpt bool +func (e *baseListExpr) renumberIDs(idGen IDGenerator) { + for _, elem := range e.Elements() { + elem.RenumberIDs(idGen) + } } -func (e navigableEntryImpl) Key() NavigableExpr { - return e.key -} +func (*baseListExpr) isExpr() {} -func (e navigableEntryImpl) Value() NavigableExpr { - return e.val +type baseMapExpr struct { + entries []EntryExpr } -func (e navigableEntryImpl) IsOptional() bool { - return e.isOpt +func (*baseMapExpr) Kind() ExprKind { + return MapKind } -func (e navigableEntryImpl) isNavigable() {} +func (e *baseMapExpr) Entries() []EntryExpr { + if e == nil { + return []EntryExpr{} + } + return e.entries +} -type navigableSelectImpl struct { - *navigableExprImpl +func (e *baseMapExpr) Size() int { + return len(e.Entries()) } -func (sel navigableSelectImpl) FieldName() string { - return sel.ToExpr().GetSelectExpr().GetField() +func (e *baseMapExpr) renumberIDs(idGen IDGenerator) { + for _, entry := range e.Entries() { + entry.RenumberIDs(idGen) + } } -func (sel navigableSelectImpl) IsTestOnly() bool { - return sel.ToExpr().GetSelectExpr().GetTestOnly() +func (*baseMapExpr) isExpr() {} + +type baseSelectExpr struct { + operand Expr + field string + testOnly bool } -func (sel navigableSelectImpl) Operand() NavigableExpr { - return sel.createChild(sel.ToExpr().GetSelectExpr().GetOperand()) +func (*baseSelectExpr) Kind() ExprKind { + return SelectKind } -type navigableStructImpl struct { - *navigableExprImpl +func (e *baseSelectExpr) Operand() Expr { + if e == nil || e.operand == nil { + return nilExpr + } + return e.operand } -func (s navigableStructImpl) TypeName() string { - return s.ToExpr().GetStructExpr().GetMessageName() +func (e *baseSelectExpr) FieldName() string { + if e == nil { + return "" + } + return e.field } -func (s navigableStructImpl) Fields() []NavigableField { - fieldInits := s.ToExpr().GetStructExpr().GetEntries() - fields := make([]NavigableField, len(fieldInits)) - for i, f := range fieldInits { - fields[i] = navigableFieldImpl{ - name: f.GetFieldKey(), - val: s.createChild(f.GetValue()), - isOpt: f.GetOptionalEntry(), - } +func (e *baseSelectExpr) IsTestOnly() bool { + if e == nil { + return false } - return fields + return e.testOnly } -type navigableFieldImpl struct { - name string - val NavigableExpr - isOpt bool +func (e *baseSelectExpr) renumberIDs(idGen IDGenerator) { + e.Operand().RenumberIDs(idGen) } -func (f navigableFieldImpl) FieldName() string { - return f.name +func (*baseSelectExpr) isExpr() {} + +type baseStructExpr struct { + typeName string + fields []EntryExpr } -func (f navigableFieldImpl) Value() NavigableExpr { - return f.val +func (*baseStructExpr) Kind() ExprKind { + return StructKind } -func (f navigableFieldImpl) IsOptional() bool { - return f.isOpt +func (e *baseStructExpr) TypeName() string { + if e == nil { + return "" + } + return e.typeName } -func (f navigableFieldImpl) isNavigable() {} +func (e *baseStructExpr) Fields() []EntryExpr { + if e == nil { + return []EntryExpr{} + } + return e.fields +} -func kindOf(expr *exprpb.Expr) (ExprKind, childFactory) { - switch expr.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - return LiteralKind, noopFactory - case *exprpb.Expr_IdentExpr: - return IdentKind, noopFactory - case *exprpb.Expr_SelectExpr: - return SelectKind, selectFactory - case *exprpb.Expr_CallExpr: - return CallKind, callArgFactory - case *exprpb.Expr_ListExpr: - return ListKind, listElemFactory - case *exprpb.Expr_StructExpr: - if expr.GetStructExpr().GetMessageName() != "" { - return StructKind, structEntryFactory - } - return MapKind, mapEntryFactory - case *exprpb.Expr_ComprehensionExpr: - return ComprehensionKind, comprehensionFactory - default: - return UnspecifiedKind, noopFactory +func (e *baseStructExpr) renumberIDs(idGen IDGenerator) { + for _, f := range e.Fields() { + f.RenumberIDs(idGen) } } -type childFactory func(*navigableExprImpl) []NavigableExpr +func (*baseStructExpr) isExpr() {} + +type entryExprKindCase interface { + Kind() EntryExprKind + + renumberIDs(IDGenerator) + + isEntryExpr() +} + +var _ EntryExpr = &entryExpr{} + +type entryExpr struct { + id int64 + entryExprKindCase +} -func noopFactory(*navigableExprImpl) []NavigableExpr { - return nil +func (e *entryExpr) ID() int64 { + return e.id } -func selectFactory(nav *navigableExprImpl) []NavigableExpr { - return []NavigableExpr{ - nav.createChild(nav.ToExpr().GetSelectExpr().GetOperand()), +func (e *entryExpr) AsMapEntry() MapEntry { + if e.Kind() != MapEntryKind { + return nilMapEntry } + return e.entryExprKindCase.(MapEntry) } -func callArgFactory(nav *navigableExprImpl) []NavigableExpr { - call := nav.ToExpr().GetCallExpr() - argCount := len(call.GetArgs()) - if call.GetTarget() != nil { - argCount++ +func (e *entryExpr) AsStructField() StructField { + if e.Kind() != StructFieldKind { + return nilStructField } - navExprs := make([]NavigableExpr, argCount) - i := 0 - if call.GetTarget() != nil { - navExprs[i] = nav.createChild(call.GetTarget()) - i++ + return e.entryExprKindCase.(StructField) +} + +func (e *entryExpr) RenumberIDs(idGen IDGenerator) { + e.id = idGen(e.id) + e.entryExprKindCase.renumberIDs(idGen) +} + +type baseMapEntry struct { + key Expr + value Expr + isOptional bool +} + +func (e *baseMapEntry) Kind() EntryExprKind { + return MapEntryKind +} + +func (e *baseMapEntry) Key() Expr { + if e == nil { + return nilExpr } - for _, arg := range call.GetArgs() { - navExprs[i] = nav.createChild(arg) - i++ + return e.key +} + +func (e *baseMapEntry) Value() Expr { + if e == nil { + return nilExpr } - return navExprs + return e.value } -func listElemFactory(nav *navigableExprImpl) []NavigableExpr { - l := nav.ToExpr().GetListExpr() - navExprs := make([]NavigableExpr, len(l.GetElements())) - for i, e := range l.GetElements() { - navExprs[i] = nav.createChild(e) +func (e *baseMapEntry) IsOptional() bool { + if e == nil { + return false } - return navExprs + return e.isOptional } -func structEntryFactory(nav *navigableExprImpl) []NavigableExpr { - s := nav.ToExpr().GetStructExpr() - entries := make([]NavigableExpr, len(s.GetEntries())) - for i, e := range s.GetEntries() { +func (e *baseMapEntry) renumberIDs(idGen IDGenerator) { + e.Key().RenumberIDs(idGen) + e.Value().RenumberIDs(idGen) +} + +func (*baseMapEntry) isEntryExpr() {} + +type baseStructField struct { + field string + value Expr + isOptional bool +} + +func (f *baseStructField) Kind() EntryExprKind { + return StructFieldKind +} - entries[i] = nav.createChild(e.GetValue()) +func (f *baseStructField) Name() string { + if f == nil { + return "" } - return entries + return f.field } -func mapEntryFactory(nav *navigableExprImpl) []NavigableExpr { - s := nav.ToExpr().GetStructExpr() - entries := make([]NavigableExpr, len(s.GetEntries())*2) - j := 0 - for _, e := range s.GetEntries() { - entries[j] = nav.createChild(e.GetMapKey()) - entries[j+1] = nav.createChild(e.GetValue()) - j += 2 +func (f *baseStructField) Value() Expr { + if f == nil { + return nilExpr } - return entries + return f.value } -func comprehensionFactory(nav *navigableExprImpl) []NavigableExpr { - compre := nav.ToExpr().GetComprehensionExpr() - return []NavigableExpr{ - nav.createChild(compre.GetIterRange()), - nav.createChild(compre.GetAccuInit()), - nav.createChild(compre.GetLoopCondition()), - nav.createChild(compre.GetLoopStep()), - nav.createChild(compre.GetResult()), +func (f *baseStructField) IsOptional() bool { + if f == nil { + return false } + return f.isOptional } + +func (f *baseStructField) renumberIDs(idGen IDGenerator) { + f.Value().RenumberIDs(idGen) +} + +func (*baseStructField) isEntryExpr() {} + +var ( + nilExpr *expr = nil + nilCall *baseCallExpr = nil + nilCompre *baseComprehensionExpr = nil + nilList *baseListExpr = nil + nilMap *baseMapExpr = nil + nilMapEntry *baseMapEntry = nil + nilSel *baseSelectExpr = nil + nilStruct *baseStructExpr = nil + nilStructField *baseStructField = nil +) diff --git a/vendor/github.com/google/cel-go/common/ast/factory.go b/vendor/github.com/google/cel-go/common/ast/factory.go new file mode 100644 index 000000000..b7f36e72a --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/factory.go @@ -0,0 +1,303 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "github.com/google/cel-go/common/types/ref" + +// ExprFactory interfaces defines a set of methods necessary for building native expression values. +type ExprFactory interface { + // CopyExpr creates a deep copy of the input Expr value. + CopyExpr(Expr) Expr + + // CopyEntryExpr creates a deep copy of the input EntryExpr value. + CopyEntryExpr(EntryExpr) EntryExpr + + // NewCall creates an Expr value representing a global function call. + NewCall(id int64, function string, args ...Expr) Expr + + // NewComprehension creates an Expr value representing a comprehension over a value range. + NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCondition, loopStep, result Expr) Expr + + // NewMemberCall creates an Expr value representing a member function call. + NewMemberCall(id int64, function string, receiver Expr, args ...Expr) Expr + + // NewIdent creates an Expr value representing an identifier. + NewIdent(id int64, name string) Expr + + // NewAccuIdent creates an Expr value representing an accumulator identifier within a + //comprehension. + NewAccuIdent(id int64) Expr + + // NewLiteral creates an Expr value representing a literal value, such as a string or integer. + NewLiteral(id int64, value ref.Val) Expr + + // NewList creates an Expr value representing a list literal expression with optional indices. + // + // Optional indicies will typically be empty unless the CEL optional types are enabled. + NewList(id int64, elems []Expr, optIndices []int32) Expr + + // NewMap creates an Expr value representing a map literal expression + NewMap(id int64, entries []EntryExpr) Expr + + // NewMapEntry creates a MapEntry with a given key, value, and a flag indicating whether + // the key is optionally set. + NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr + + // NewPresenceTest creates an Expr representing a field presence test on an operand expression. + NewPresenceTest(id int64, operand Expr, field string) Expr + + // NewSelect creates an Expr representing a field selection on an operand expression. + NewSelect(id int64, operand Expr, field string) Expr + + // NewStruct creates an Expr value representing a struct literal with a given type name and a + // set of field initializers. + NewStruct(id int64, typeName string, fields []EntryExpr) Expr + + // NewStructField creates a StructField with a given field name, value, and a flag indicating + // whether the field is optionally set. + NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr + + // NewUnspecifiedExpr creates an empty expression node. + NewUnspecifiedExpr(id int64) Expr + + isExprFactory() +} + +type baseExprFactory struct{} + +// NewExprFactory creates an ExprFactory instance. +func NewExprFactory() ExprFactory { + return &baseExprFactory{} +} + +func (fac *baseExprFactory) NewCall(id int64, function string, args ...Expr) Expr { + if len(args) == 0 { + args = []Expr{} + } + return fac.newExpr( + id, + &baseCallExpr{ + function: function, + target: nilExpr, + args: args, + isMember: false, + }) +} + +func (fac *baseExprFactory) NewMemberCall(id int64, function string, target Expr, args ...Expr) Expr { + if len(args) == 0 { + args = []Expr{} + } + return fac.newExpr( + id, + &baseCallExpr{ + function: function, + target: target, + args: args, + isMember: true, + }) +} + +func (fac *baseExprFactory) NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCond, loopStep, result Expr) Expr { + return fac.newExpr( + id, + &baseComprehensionExpr{ + iterRange: iterRange, + iterVar: iterVar, + accuVar: accuVar, + accuInit: accuInit, + loopCond: loopCond, + loopStep: loopStep, + result: result, + }) +} + +func (fac *baseExprFactory) NewIdent(id int64, name string) Expr { + return fac.newExpr(id, baseIdentExpr(name)) +} + +func (fac *baseExprFactory) NewAccuIdent(id int64) Expr { + return fac.NewIdent(id, "__result__") +} + +func (fac *baseExprFactory) NewLiteral(id int64, value ref.Val) Expr { + return fac.newExpr(id, &baseLiteral{Val: value}) +} + +func (fac *baseExprFactory) NewList(id int64, elems []Expr, optIndices []int32) Expr { + optIndexMap := make(map[int32]struct{}, len(optIndices)) + for _, idx := range optIndices { + optIndexMap[idx] = struct{}{} + } + return fac.newExpr(id, + &baseListExpr{ + elements: elems, + optIndices: optIndices, + optIndexMap: optIndexMap, + }) +} + +func (fac *baseExprFactory) NewMap(id int64, entries []EntryExpr) Expr { + return fac.newExpr(id, &baseMapExpr{entries: entries}) +} + +func (fac *baseExprFactory) NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr { + return fac.newEntryExpr( + id, + &baseMapEntry{ + key: key, + value: value, + isOptional: isOptional, + }) +} + +func (fac *baseExprFactory) NewPresenceTest(id int64, operand Expr, field string) Expr { + return fac.newExpr( + id, + &baseSelectExpr{ + operand: operand, + field: field, + testOnly: true, + }) +} + +func (fac *baseExprFactory) NewSelect(id int64, operand Expr, field string) Expr { + return fac.newExpr( + id, + &baseSelectExpr{ + operand: operand, + field: field, + }) +} + +func (fac *baseExprFactory) NewStruct(id int64, typeName string, fields []EntryExpr) Expr { + return fac.newExpr( + id, + &baseStructExpr{ + typeName: typeName, + fields: fields, + }) +} + +func (fac *baseExprFactory) NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr { + return fac.newEntryExpr( + id, + &baseStructField{ + field: field, + value: value, + isOptional: isOptional, + }) +} + +func (fac *baseExprFactory) NewUnspecifiedExpr(id int64) Expr { + return fac.newExpr(id, nil) +} + +func (fac *baseExprFactory) CopyExpr(e Expr) Expr { + // unwrap navigable expressions to avoid unnecessary allocations during copying. + if nav, ok := e.(*navigableExprImpl); ok { + e = nav.Expr + } + switch e.Kind() { + case CallKind: + c := e.AsCall() + argsCopy := make([]Expr, len(c.Args())) + for i, arg := range c.Args() { + argsCopy[i] = fac.CopyExpr(arg) + } + if !c.IsMemberFunction() { + return fac.NewCall(e.ID(), c.FunctionName(), argsCopy...) + } + return fac.NewMemberCall(e.ID(), c.FunctionName(), fac.CopyExpr(c.Target()), argsCopy...) + case ComprehensionKind: + compre := e.AsComprehension() + return fac.NewComprehension(e.ID(), + fac.CopyExpr(compre.IterRange()), + compre.IterVar(), + compre.AccuVar(), + fac.CopyExpr(compre.AccuInit()), + fac.CopyExpr(compre.LoopCondition()), + fac.CopyExpr(compre.LoopStep()), + fac.CopyExpr(compre.Result())) + case IdentKind: + return fac.NewIdent(e.ID(), e.AsIdent()) + case ListKind: + l := e.AsList() + elemsCopy := make([]Expr, l.Size()) + for i, elem := range l.Elements() { + elemsCopy[i] = fac.CopyExpr(elem) + } + return fac.NewList(e.ID(), elemsCopy, l.OptionalIndices()) + case LiteralKind: + return fac.NewLiteral(e.ID(), e.AsLiteral()) + case MapKind: + m := e.AsMap() + entriesCopy := make([]EntryExpr, m.Size()) + for i, entry := range m.Entries() { + entriesCopy[i] = fac.CopyEntryExpr(entry) + } + return fac.NewMap(e.ID(), entriesCopy) + case SelectKind: + s := e.AsSelect() + if s.IsTestOnly() { + return fac.NewPresenceTest(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName()) + } + return fac.NewSelect(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName()) + case StructKind: + s := e.AsStruct() + fieldsCopy := make([]EntryExpr, len(s.Fields())) + for i, field := range s.Fields() { + fieldsCopy[i] = fac.CopyEntryExpr(field) + } + return fac.NewStruct(e.ID(), s.TypeName(), fieldsCopy) + default: + return fac.NewUnspecifiedExpr(e.ID()) + } +} + +func (fac *baseExprFactory) CopyEntryExpr(e EntryExpr) EntryExpr { + switch e.Kind() { + case MapEntryKind: + entry := e.AsMapEntry() + return fac.NewMapEntry(e.ID(), + fac.CopyExpr(entry.Key()), fac.CopyExpr(entry.Value()), entry.IsOptional()) + case StructFieldKind: + field := e.AsStructField() + return fac.NewStructField(e.ID(), + field.Name(), fac.CopyExpr(field.Value()), field.IsOptional()) + default: + return fac.newEntryExpr(e.ID(), nil) + } +} + +func (*baseExprFactory) isExprFactory() {} + +func (fac *baseExprFactory) newExpr(id int64, e exprKindCase) Expr { + return &expr{ + id: id, + exprKindCase: e, + } +} + +func (fac *baseExprFactory) newEntryExpr(id int64, e entryExprKindCase) EntryExpr { + return &entryExpr{ + id: id, + entryExprKindCase: e, + } +} + +var ( + defaultFactory = &baseExprFactory{} +) diff --git a/vendor/github.com/google/cel-go/common/ast/navigable.go b/vendor/github.com/google/cel-go/common/ast/navigable.go new file mode 100644 index 000000000..f5ddf6aac --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/navigable.go @@ -0,0 +1,652 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" +) + +// NavigableExpr represents the base navigable expression value with methods to inspect the +// parent and child expressions. +type NavigableExpr interface { + Expr + + // Type of the expression. + // + // If the expression is type-checked, the type check metadata is returned. If the expression + // has not been type-checked, the types.DynType value is returned. + Type() *types.Type + + // Parent returns the parent expression node, if one exists. + Parent() (NavigableExpr, bool) + + // Children returns a list of child expression nodes. + Children() []NavigableExpr + + // Depth indicates the depth in the expression tree. + // + // The root expression has depth 0. + Depth() int +} + +// NavigateAST converts an AST to a NavigableExpr +func NavigateAST(ast *AST) NavigableExpr { + return NavigateExpr(ast, ast.Expr()) +} + +// NavigateExpr creates a NavigableExpr whose type information is backed by the input AST. +// +// If the expression is already a NavigableExpr, the parent and depth information will be +// propagated on the new NavigableExpr value; otherwise, the expr value will be treated +// as though it is the root of the expression graph with a depth of 0. +func NavigateExpr(ast *AST, expr Expr) NavigableExpr { + depth := 0 + var parent NavigableExpr = nil + if nav, ok := expr.(NavigableExpr); ok { + depth = nav.Depth() + parent, _ = nav.Parent() + } + return newNavigableExpr(ast, parent, expr, depth) +} + +// ExprMatcher takes a NavigableExpr in and indicates whether the value is a match. +// +// This function type should be use with the `Match` and `MatchList` calls. +type ExprMatcher func(NavigableExpr) bool + +// ConstantValueMatcher returns an ExprMatcher which will return true if the input NavigableExpr +// is comprised of all constant values, such as a simple literal or even list and map literal. +func ConstantValueMatcher() ExprMatcher { + return matchIsConstantValue +} + +// KindMatcher returns an ExprMatcher which will return true if the input NavigableExpr.Kind() matches +// the specified `kind`. +func KindMatcher(kind ExprKind) ExprMatcher { + return func(e NavigableExpr) bool { + return e.Kind() == kind + } +} + +// FunctionMatcher returns an ExprMatcher which will match NavigableExpr nodes of CallKind type whose +// function name is equal to `funcName`. +func FunctionMatcher(funcName string) ExprMatcher { + return func(e NavigableExpr) bool { + if e.Kind() != CallKind { + return false + } + return e.AsCall().FunctionName() == funcName + } +} + +// AllMatcher returns true for all descendants of a NavigableExpr, effectively flattening them into a list. +// +// Such a result would work well with subsequent MatchList calls. +func AllMatcher() ExprMatcher { + return func(NavigableExpr) bool { + return true + } +} + +// MatchDescendants takes a NavigableExpr and ExprMatcher and produces a list of NavigableExpr values +// matching the input criteria in post-order (bottom up). +func MatchDescendants(expr NavigableExpr, matcher ExprMatcher) []NavigableExpr { + matches := []NavigableExpr{} + navVisitor := &baseVisitor{ + visitExpr: func(e Expr) { + nav := e.(NavigableExpr) + if matcher(nav) { + matches = append(matches, nav) + } + }, + } + visit(expr, navVisitor, postOrder, 0, 0) + return matches +} + +// MatchSubset applies an ExprMatcher to a list of NavigableExpr values and their descendants, producing a +// subset of NavigableExpr values which match. +func MatchSubset(exprs []NavigableExpr, matcher ExprMatcher) []NavigableExpr { + matches := []NavigableExpr{} + navVisitor := &baseVisitor{ + visitExpr: func(e Expr) { + nav := e.(NavigableExpr) + if matcher(nav) { + matches = append(matches, nav) + } + }, + } + for _, expr := range exprs { + visit(expr, navVisitor, postOrder, 0, 1) + } + return matches +} + +// Visitor defines an object for visiting Expr and EntryExpr nodes within an expression graph. +type Visitor interface { + // VisitExpr visits the input expression. + VisitExpr(Expr) + + // VisitEntryExpr visits the input entry expression, i.e. a struct field or map entry. + VisitEntryExpr(EntryExpr) +} + +type baseVisitor struct { + visitExpr func(Expr) + visitEntryExpr func(EntryExpr) +} + +// VisitExpr visits the Expr if the internal expr visitor has been configured. +func (v *baseVisitor) VisitExpr(e Expr) { + if v.visitExpr != nil { + v.visitExpr(e) + } +} + +// VisitEntryExpr visits the entry if the internal expr entry visitor has been configured. +func (v *baseVisitor) VisitEntryExpr(e EntryExpr) { + if v.visitEntryExpr != nil { + v.visitEntryExpr(e) + } +} + +// NewExprVisitor creates a visitor which only visits expression nodes. +func NewExprVisitor(v func(Expr)) Visitor { + return &baseVisitor{ + visitExpr: v, + visitEntryExpr: nil, + } +} + +// PostOrderVisit walks the expression graph and calls the visitor in post-order (bottom-up). +func PostOrderVisit(expr Expr, visitor Visitor) { + visit(expr, visitor, postOrder, 0, 0) +} + +// PreOrderVisit walks the expression graph and calls the visitor in pre-order (top-down). +func PreOrderVisit(expr Expr, visitor Visitor) { + visit(expr, visitor, preOrder, 0, 0) +} + +type visitOrder int + +const ( + preOrder = iota + 1 + postOrder +) + +// TODO: consider exposing a way to configure a limit for the max visit depth. +// It's possible that we could want to configure this on the NewExprVisitor() +// and through MatchDescendents() / MaxID(). +func visit(expr Expr, visitor Visitor, order visitOrder, depth, maxDepth int) { + if maxDepth > 0 && depth == maxDepth { + return + } + if order == preOrder { + visitor.VisitExpr(expr) + } + switch expr.Kind() { + case CallKind: + c := expr.AsCall() + if c.IsMemberFunction() { + visit(c.Target(), visitor, order, depth+1, maxDepth) + } + for _, arg := range c.Args() { + visit(arg, visitor, order, depth+1, maxDepth) + } + case ComprehensionKind: + c := expr.AsComprehension() + visit(c.IterRange(), visitor, order, depth+1, maxDepth) + visit(c.AccuInit(), visitor, order, depth+1, maxDepth) + visit(c.LoopCondition(), visitor, order, depth+1, maxDepth) + visit(c.LoopStep(), visitor, order, depth+1, maxDepth) + visit(c.Result(), visitor, order, depth+1, maxDepth) + case ListKind: + l := expr.AsList() + for _, elem := range l.Elements() { + visit(elem, visitor, order, depth+1, maxDepth) + } + case MapKind: + m := expr.AsMap() + for _, e := range m.Entries() { + if order == preOrder { + visitor.VisitEntryExpr(e) + } + entry := e.AsMapEntry() + visit(entry.Key(), visitor, order, depth+1, maxDepth) + visit(entry.Value(), visitor, order, depth+1, maxDepth) + if order == postOrder { + visitor.VisitEntryExpr(e) + } + } + case SelectKind: + visit(expr.AsSelect().Operand(), visitor, order, depth+1, maxDepth) + case StructKind: + s := expr.AsStruct() + for _, f := range s.Fields() { + visitor.VisitEntryExpr(f) + visit(f.AsStructField().Value(), visitor, order, depth+1, maxDepth) + } + } + if order == postOrder { + visitor.VisitExpr(expr) + } +} + +func matchIsConstantValue(e NavigableExpr) bool { + if e.Kind() == LiteralKind { + return true + } + if e.Kind() == StructKind || e.Kind() == MapKind || e.Kind() == ListKind { + for _, child := range e.Children() { + if !matchIsConstantValue(child) { + return false + } + } + return true + } + return false +} + +func newNavigableExpr(ast *AST, parent NavigableExpr, expr Expr, depth int) NavigableExpr { + // Reduce navigable expression nesting by unwrapping the embedded Expr value. + if nav, ok := expr.(*navigableExprImpl); ok { + expr = nav.Expr + } + nav := &navigableExprImpl{ + Expr: expr, + depth: depth, + ast: ast, + parent: parent, + createChildren: getChildFactory(expr), + } + return nav +} + +type navigableExprImpl struct { + Expr + depth int + ast *AST + parent NavigableExpr + createChildren childFactory +} + +func (nav *navigableExprImpl) Parent() (NavigableExpr, bool) { + if nav.parent != nil { + return nav.parent, true + } + return nil, false +} + +func (nav *navigableExprImpl) ID() int64 { + return nav.Expr.ID() +} + +func (nav *navigableExprImpl) Kind() ExprKind { + return nav.Expr.Kind() +} + +func (nav *navigableExprImpl) Type() *types.Type { + return nav.ast.GetType(nav.ID()) +} + +func (nav *navigableExprImpl) Children() []NavigableExpr { + return nav.createChildren(nav) +} + +func (nav *navigableExprImpl) Depth() int { + return nav.depth +} + +func (nav *navigableExprImpl) AsCall() CallExpr { + return navigableCallImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsComprehension() ComprehensionExpr { + return navigableComprehensionImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsIdent() string { + return nav.Expr.AsIdent() +} + +func (nav *navigableExprImpl) AsList() ListExpr { + return navigableListImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsLiteral() ref.Val { + return nav.Expr.AsLiteral() +} + +func (nav *navigableExprImpl) AsMap() MapExpr { + return navigableMapImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsSelect() SelectExpr { + return navigableSelectImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsStruct() StructExpr { + return navigableStructImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) createChild(e Expr) NavigableExpr { + return newNavigableExpr(nav.ast, nav, e, nav.depth+1) +} + +func (nav *navigableExprImpl) isExpr() {} + +type navigableCallImpl struct { + *navigableExprImpl +} + +func (call navigableCallImpl) FunctionName() string { + return call.Expr.AsCall().FunctionName() +} + +func (call navigableCallImpl) IsMemberFunction() bool { + return call.Expr.AsCall().IsMemberFunction() +} + +func (call navigableCallImpl) Target() Expr { + t := call.Expr.AsCall().Target() + if t != nil { + return call.createChild(t) + } + return nil +} + +func (call navigableCallImpl) Args() []Expr { + args := call.Expr.AsCall().Args() + navArgs := make([]Expr, len(args)) + for i, a := range args { + navArgs[i] = call.createChild(a) + } + return navArgs +} + +type navigableComprehensionImpl struct { + *navigableExprImpl +} + +func (comp navigableComprehensionImpl) IterRange() Expr { + return comp.createChild(comp.Expr.AsComprehension().IterRange()) +} + +func (comp navigableComprehensionImpl) IterVar() string { + return comp.Expr.AsComprehension().IterVar() +} + +func (comp navigableComprehensionImpl) AccuVar() string { + return comp.Expr.AsComprehension().AccuVar() +} + +func (comp navigableComprehensionImpl) AccuInit() Expr { + return comp.createChild(comp.Expr.AsComprehension().AccuInit()) +} + +func (comp navigableComprehensionImpl) LoopCondition() Expr { + return comp.createChild(comp.Expr.AsComprehension().LoopCondition()) +} + +func (comp navigableComprehensionImpl) LoopStep() Expr { + return comp.createChild(comp.Expr.AsComprehension().LoopStep()) +} + +func (comp navigableComprehensionImpl) Result() Expr { + return comp.createChild(comp.Expr.AsComprehension().Result()) +} + +type navigableListImpl struct { + *navigableExprImpl +} + +func (l navigableListImpl) Elements() []Expr { + pbElems := l.Expr.AsList().Elements() + elems := make([]Expr, len(pbElems)) + for i := 0; i < len(pbElems); i++ { + elems[i] = l.createChild(pbElems[i]) + } + return elems +} + +func (l navigableListImpl) IsOptional(index int32) bool { + return l.Expr.AsList().IsOptional(index) +} + +func (l navigableListImpl) OptionalIndices() []int32 { + return l.Expr.AsList().OptionalIndices() +} + +func (l navigableListImpl) Size() int { + return l.Expr.AsList().Size() +} + +type navigableMapImpl struct { + *navigableExprImpl +} + +func (m navigableMapImpl) Entries() []EntryExpr { + mapExpr := m.Expr.AsMap() + entries := make([]EntryExpr, len(mapExpr.Entries())) + for i, e := range mapExpr.Entries() { + entry := e.AsMapEntry() + entries[i] = &entryExpr{ + id: e.ID(), + entryExprKindCase: navigableEntryImpl{ + key: m.createChild(entry.Key()), + val: m.createChild(entry.Value()), + isOpt: entry.IsOptional(), + }, + } + } + return entries +} + +func (m navigableMapImpl) Size() int { + return m.Expr.AsMap().Size() +} + +type navigableEntryImpl struct { + key NavigableExpr + val NavigableExpr + isOpt bool +} + +func (e navigableEntryImpl) Kind() EntryExprKind { + return MapEntryKind +} + +func (e navigableEntryImpl) Key() Expr { + return e.key +} + +func (e navigableEntryImpl) Value() Expr { + return e.val +} + +func (e navigableEntryImpl) IsOptional() bool { + return e.isOpt +} + +func (e navigableEntryImpl) renumberIDs(IDGenerator) {} + +func (e navigableEntryImpl) isEntryExpr() {} + +type navigableSelectImpl struct { + *navigableExprImpl +} + +func (sel navigableSelectImpl) FieldName() string { + return sel.Expr.AsSelect().FieldName() +} + +func (sel navigableSelectImpl) IsTestOnly() bool { + return sel.Expr.AsSelect().IsTestOnly() +} + +func (sel navigableSelectImpl) Operand() Expr { + return sel.createChild(sel.Expr.AsSelect().Operand()) +} + +type navigableStructImpl struct { + *navigableExprImpl +} + +func (s navigableStructImpl) TypeName() string { + return s.Expr.AsStruct().TypeName() +} + +func (s navigableStructImpl) Fields() []EntryExpr { + fieldInits := s.Expr.AsStruct().Fields() + fields := make([]EntryExpr, len(fieldInits)) + for i, f := range fieldInits { + field := f.AsStructField() + fields[i] = &entryExpr{ + id: f.ID(), + entryExprKindCase: navigableFieldImpl{ + name: field.Name(), + val: s.createChild(field.Value()), + isOpt: field.IsOptional(), + }, + } + } + return fields +} + +type navigableFieldImpl struct { + name string + val NavigableExpr + isOpt bool +} + +func (f navigableFieldImpl) Kind() EntryExprKind { + return StructFieldKind +} + +func (f navigableFieldImpl) Name() string { + return f.name +} + +func (f navigableFieldImpl) Value() Expr { + return f.val +} + +func (f navigableFieldImpl) IsOptional() bool { + return f.isOpt +} + +func (f navigableFieldImpl) renumberIDs(IDGenerator) {} + +func (f navigableFieldImpl) isEntryExpr() {} + +func getChildFactory(expr Expr) childFactory { + if expr == nil { + return noopFactory + } + switch expr.Kind() { + case LiteralKind: + return noopFactory + case IdentKind: + return noopFactory + case SelectKind: + return selectFactory + case CallKind: + return callArgFactory + case ListKind: + return listElemFactory + case MapKind: + return mapEntryFactory + case StructKind: + return structEntryFactory + case ComprehensionKind: + return comprehensionFactory + default: + return noopFactory + } +} + +type childFactory func(*navigableExprImpl) []NavigableExpr + +func noopFactory(*navigableExprImpl) []NavigableExpr { + return nil +} + +func selectFactory(nav *navigableExprImpl) []NavigableExpr { + return []NavigableExpr{nav.createChild(nav.AsSelect().Operand())} +} + +func callArgFactory(nav *navigableExprImpl) []NavigableExpr { + call := nav.Expr.AsCall() + argCount := len(call.Args()) + if call.IsMemberFunction() { + argCount++ + } + navExprs := make([]NavigableExpr, argCount) + i := 0 + if call.IsMemberFunction() { + navExprs[i] = nav.createChild(call.Target()) + i++ + } + for _, arg := range call.Args() { + navExprs[i] = nav.createChild(arg) + i++ + } + return navExprs +} + +func listElemFactory(nav *navigableExprImpl) []NavigableExpr { + l := nav.Expr.AsList() + navExprs := make([]NavigableExpr, len(l.Elements())) + for i, e := range l.Elements() { + navExprs[i] = nav.createChild(e) + } + return navExprs +} + +func structEntryFactory(nav *navigableExprImpl) []NavigableExpr { + s := nav.Expr.AsStruct() + entries := make([]NavigableExpr, len(s.Fields())) + for i, e := range s.Fields() { + f := e.AsStructField() + entries[i] = nav.createChild(f.Value()) + } + return entries +} + +func mapEntryFactory(nav *navigableExprImpl) []NavigableExpr { + m := nav.Expr.AsMap() + entries := make([]NavigableExpr, len(m.Entries())*2) + j := 0 + for _, e := range m.Entries() { + mapEntry := e.AsMapEntry() + entries[j] = nav.createChild(mapEntry.Key()) + entries[j+1] = nav.createChild(mapEntry.Value()) + j += 2 + } + return entries +} + +func comprehensionFactory(nav *navigableExprImpl) []NavigableExpr { + compre := nav.Expr.AsComprehension() + return []NavigableExpr{ + nav.createChild(compre.IterRange()), + nav.createChild(compre.AccuInit()), + nav.createChild(compre.LoopCondition()), + nav.createChild(compre.LoopStep()), + nav.createChild(compre.Result()), + } +} diff --git a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel b/vendor/github.com/google/cel-go/common/containers/BUILD.bazel index 3f3f07887..81197f064 100644 --- a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/containers/BUILD.bazel @@ -12,7 +12,7 @@ go_library( ], importpath = "github.com/google/cel-go/common/containers", deps = [ - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", ], ) @@ -26,6 +26,6 @@ go_test( ":go_default_library", ], deps = [ - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/common/containers/container.go b/vendor/github.com/google/cel-go/common/containers/container.go index d46698d3c..52153d4cd 100644 --- a/vendor/github.com/google/cel-go/common/containers/container.go +++ b/vendor/github.com/google/cel-go/common/containers/container.go @@ -20,7 +20,7 @@ import ( "fmt" "strings" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" ) var ( @@ -297,19 +297,19 @@ func Name(name string) ContainerOption { // ToQualifiedName converts an expression AST into a qualified name if possible, with a boolean // 'found' value that indicates if the conversion is successful. -func ToQualifiedName(e *exprpb.Expr) (string, bool) { - switch e.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - id := e.GetIdentExpr() - return id.GetName(), true - case *exprpb.Expr_SelectExpr: - sel := e.GetSelectExpr() +func ToQualifiedName(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.IdentKind: + id := e.AsIdent() + return id, true + case ast.SelectKind: + sel := e.AsSelect() // Test only expressions are not valid as qualified names. - if sel.GetTestOnly() { + if sel.IsTestOnly() { return "", false } - if qual, found := ToQualifiedName(sel.GetOperand()); found { - return qual + "." + sel.GetField(), true + if qual, found := ToQualifiedName(sel.Operand()); found { + return qual + "." + sel.FieldName(), true } } return "", false diff --git a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel b/vendor/github.com/google/cel-go/common/debug/BUILD.bazel index 1f029839c..724ed3404 100644 --- a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/debug/BUILD.bazel @@ -13,6 +13,8 @@ go_library( importpath = "github.com/google/cel-go/common/debug", deps = [ "//common:go_default_library", - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/common/debug/debug.go b/vendor/github.com/google/cel-go/common/debug/debug.go index 5dab156ef..e4c01ac6e 100644 --- a/vendor/github.com/google/cel-go/common/debug/debug.go +++ b/vendor/github.com/google/cel-go/common/debug/debug.go @@ -22,7 +22,9 @@ import ( "strconv" "strings" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) // Adorner returns debug metadata that will be tacked on to the string @@ -38,7 +40,7 @@ type Writer interface { // Buffer pushes an expression into an internal queue of expressions to // write to a string. - Buffer(e *exprpb.Expr) + Buffer(e ast.Expr) } type emptyDebugAdorner struct { @@ -51,12 +53,12 @@ func (a *emptyDebugAdorner) GetMetadata(e any) string { } // ToDebugString gives the unadorned string representation of the Expr. -func ToDebugString(e *exprpb.Expr) string { +func ToDebugString(e ast.Expr) string { return ToAdornedDebugString(e, emptyAdorner) } // ToAdornedDebugString gives the adorned string representation of the Expr. -func ToAdornedDebugString(e *exprpb.Expr, adorner Adorner) string { +func ToAdornedDebugString(e ast.Expr, adorner Adorner) string { w := newDebugWriter(adorner) w.Buffer(e) return w.String() @@ -78,49 +80,51 @@ func newDebugWriter(a Adorner) *debugWriter { } } -func (w *debugWriter) Buffer(e *exprpb.Expr) { +func (w *debugWriter) Buffer(e ast.Expr) { if e == nil { return } - switch e.ExprKind.(type) { - case *exprpb.Expr_ConstExpr: - w.append(formatLiteral(e.GetConstExpr())) - case *exprpb.Expr_IdentExpr: - w.append(e.GetIdentExpr().Name) - case *exprpb.Expr_SelectExpr: - w.appendSelect(e.GetSelectExpr()) - case *exprpb.Expr_CallExpr: - w.appendCall(e.GetCallExpr()) - case *exprpb.Expr_ListExpr: - w.appendList(e.GetListExpr()) - case *exprpb.Expr_StructExpr: - w.appendStruct(e.GetStructExpr()) - case *exprpb.Expr_ComprehensionExpr: - w.appendComprehension(e.GetComprehensionExpr()) + switch e.Kind() { + case ast.LiteralKind: + w.append(formatLiteral(e.AsLiteral())) + case ast.IdentKind: + w.append(e.AsIdent()) + case ast.SelectKind: + w.appendSelect(e.AsSelect()) + case ast.CallKind: + w.appendCall(e.AsCall()) + case ast.ListKind: + w.appendList(e.AsList()) + case ast.MapKind: + w.appendMap(e.AsMap()) + case ast.StructKind: + w.appendStruct(e.AsStruct()) + case ast.ComprehensionKind: + w.appendComprehension(e.AsComprehension()) } w.adorn(e) } -func (w *debugWriter) appendSelect(sel *exprpb.Expr_Select) { - w.Buffer(sel.GetOperand()) +func (w *debugWriter) appendSelect(sel ast.SelectExpr) { + w.Buffer(sel.Operand()) w.append(".") - w.append(sel.GetField()) - if sel.TestOnly { + w.append(sel.FieldName()) + if sel.IsTestOnly() { w.append("~test-only~") } } -func (w *debugWriter) appendCall(call *exprpb.Expr_Call) { - if call.Target != nil { - w.Buffer(call.GetTarget()) +func (w *debugWriter) appendCall(call ast.CallExpr) { + if call.IsMemberFunction() { + w.Buffer(call.Target()) w.append(".") } - w.append(call.GetFunction()) + w.append(call.FunctionName()) w.append("(") - if len(call.GetArgs()) > 0 { + if len(call.Args()) > 0 { w.addIndent() w.appendLine() - for i, arg := range call.GetArgs() { + for i, arg := range call.Args() { if i > 0 { w.append(",") w.appendLine() @@ -133,12 +137,12 @@ func (w *debugWriter) appendCall(call *exprpb.Expr_Call) { w.append(")") } -func (w *debugWriter) appendList(list *exprpb.Expr_CreateList) { +func (w *debugWriter) appendList(list ast.ListExpr) { w.append("[") - if len(list.GetElements()) > 0 { + if len(list.Elements()) > 0 { w.appendLine() w.addIndent() - for i, elem := range list.GetElements() { + for i, elem := range list.Elements() { if i > 0 { w.append(",") w.appendLine() @@ -151,32 +155,25 @@ func (w *debugWriter) appendList(list *exprpb.Expr_CreateList) { w.append("]") } -func (w *debugWriter) appendStruct(obj *exprpb.Expr_CreateStruct) { - if obj.MessageName != "" { - w.appendObject(obj) - } else { - w.appendMap(obj) - } -} - -func (w *debugWriter) appendObject(obj *exprpb.Expr_CreateStruct) { - w.append(obj.GetMessageName()) +func (w *debugWriter) appendStruct(obj ast.StructExpr) { + w.append(obj.TypeName()) w.append("{") - if len(obj.GetEntries()) > 0 { + if len(obj.Fields()) > 0 { w.appendLine() w.addIndent() - for i, entry := range obj.GetEntries() { + for i, f := range obj.Fields() { + field := f.AsStructField() if i > 0 { w.append(",") w.appendLine() } - if entry.GetOptionalEntry() { + if field.IsOptional() { w.append("?") } - w.append(entry.GetFieldKey()) + w.append(field.Name()) w.append(":") - w.Buffer(entry.GetValue()) - w.adorn(entry) + w.Buffer(field.Value()) + w.adorn(f) } w.removeIndent() w.appendLine() @@ -184,23 +181,24 @@ func (w *debugWriter) appendObject(obj *exprpb.Expr_CreateStruct) { w.append("}") } -func (w *debugWriter) appendMap(obj *exprpb.Expr_CreateStruct) { +func (w *debugWriter) appendMap(m ast.MapExpr) { w.append("{") - if len(obj.GetEntries()) > 0 { + if m.Size() > 0 { w.appendLine() w.addIndent() - for i, entry := range obj.GetEntries() { + for i, e := range m.Entries() { + entry := e.AsMapEntry() if i > 0 { w.append(",") w.appendLine() } - if entry.GetOptionalEntry() { + if entry.IsOptional() { w.append("?") } - w.Buffer(entry.GetMapKey()) + w.Buffer(entry.Key()) w.append(":") - w.Buffer(entry.GetValue()) - w.adorn(entry) + w.Buffer(entry.Value()) + w.adorn(e) } w.removeIndent() w.appendLine() @@ -208,62 +206,62 @@ func (w *debugWriter) appendMap(obj *exprpb.Expr_CreateStruct) { w.append("}") } -func (w *debugWriter) appendComprehension(comprehension *exprpb.Expr_Comprehension) { +func (w *debugWriter) appendComprehension(comprehension ast.ComprehensionExpr) { w.append("__comprehension__(") w.addIndent() w.appendLine() w.append("// Variable") w.appendLine() - w.append(comprehension.GetIterVar()) + w.append(comprehension.IterVar()) w.append(",") w.appendLine() w.append("// Target") w.appendLine() - w.Buffer(comprehension.GetIterRange()) + w.Buffer(comprehension.IterRange()) w.append(",") w.appendLine() w.append("// Accumulator") w.appendLine() - w.append(comprehension.GetAccuVar()) + w.append(comprehension.AccuVar()) w.append(",") w.appendLine() w.append("// Init") w.appendLine() - w.Buffer(comprehension.GetAccuInit()) + w.Buffer(comprehension.AccuInit()) w.append(",") w.appendLine() w.append("// LoopCondition") w.appendLine() - w.Buffer(comprehension.GetLoopCondition()) + w.Buffer(comprehension.LoopCondition()) w.append(",") w.appendLine() w.append("// LoopStep") w.appendLine() - w.Buffer(comprehension.GetLoopStep()) + w.Buffer(comprehension.LoopStep()) w.append(",") w.appendLine() w.append("// Result") w.appendLine() - w.Buffer(comprehension.GetResult()) + w.Buffer(comprehension.Result()) w.append(")") w.removeIndent() } -func formatLiteral(c *exprpb.Constant) string { - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - return fmt.Sprintf("%t", c.GetBoolValue()) - case *exprpb.Constant_BytesValue: - return fmt.Sprintf("b\"%s\"", string(c.GetBytesValue())) - case *exprpb.Constant_DoubleValue: - return fmt.Sprintf("%v", c.GetDoubleValue()) - case *exprpb.Constant_Int64Value: - return fmt.Sprintf("%d", c.GetInt64Value()) - case *exprpb.Constant_StringValue: - return strconv.Quote(c.GetStringValue()) - case *exprpb.Constant_Uint64Value: - return fmt.Sprintf("%du", c.GetUint64Value()) - case *exprpb.Constant_NullValue: +func formatLiteral(c ref.Val) string { + switch v := c.(type) { + case types.Bool: + return fmt.Sprintf("%t", v) + case types.Bytes: + return fmt.Sprintf("b\"%s\"", string(v)) + case types.Double: + return fmt.Sprintf("%v", float64(v)) + case types.Int: + return fmt.Sprintf("%d", int64(v)) + case types.String: + return strconv.Quote(string(v)) + case types.Uint: + return fmt.Sprintf("%du", uint64(v)) + case types.Null: return "null" default: panic("Unknown constant type") diff --git a/vendor/github.com/google/cel-go/common/decls/decls.go b/vendor/github.com/google/cel-go/common/decls/decls.go index 0284f8dbc..734ebe57e 100644 --- a/vendor/github.com/google/cel-go/common/decls/decls.go +++ b/vendor/github.com/google/cel-go/common/decls/decls.go @@ -243,7 +243,8 @@ func (f *FunctionDecl) Bindings() ([]*functions.Overload, error) { // performs dynamic dispatch to the proper overload based on the argument types. bindings := append([]*functions.Overload{}, overloads...) funcDispatch := func(args ...ref.Val) ref.Val { - for _, o := range f.overloads { + for _, oID := range f.overloadOrdinals { + o := f.overloads[oID] // During dynamic dispatch over multiple functions, signature agreement checks // are preserved in order to assist with the function resolution step. switch len(args) { diff --git a/vendor/github.com/google/cel-go/common/errors.go b/vendor/github.com/google/cel-go/common/errors.go index 63919714e..25adc73d8 100644 --- a/vendor/github.com/google/cel-go/common/errors.go +++ b/vendor/github.com/google/cel-go/common/errors.go @@ -64,7 +64,7 @@ func (e *Errors) GetErrors() []*Error { // Append creates a new Errors object with the current and input errors. func (e *Errors) Append(errs []*Error) *Errors { return &Errors{ - errors: append(e.errors, errs...), + errors: append(e.errors[:], errs...), source: e.source, numErrors: e.numErrors + len(errs), maxErrorsToReport: e.maxErrorsToReport, diff --git a/vendor/github.com/google/cel-go/common/types/provider.go b/vendor/github.com/google/cel-go/common/types/provider.go index e80b4622e..d301aa38a 100644 --- a/vendor/github.com/google/cel-go/common/types/provider.go +++ b/vendor/github.com/google/cel-go/common/types/provider.go @@ -54,6 +54,10 @@ type Provider interface { // Returns false if not found. FindStructType(structType string) (*Type, bool) + // FindStructFieldNames returns thet field names associated with the type, if the type + // is found. + FindStructFieldNames(structType string) ([]string, bool) + // FieldStructFieldType returns the field type for a checked type value. Returns // false if the field could not be found. FindStructFieldType(structType, fieldName string) (*FieldType, bool) @@ -154,7 +158,7 @@ func (p *Registry) EnumValue(enumName string) ref.Val { return Int(enumVal.Value()) } -// FieldFieldType returns the field type for a checked type value. Returns false if +// FindFieldType returns the field type for a checked type value. Returns false if // the field could not be found. // // Deprecated: use FindStructFieldType @@ -173,7 +177,24 @@ func (p *Registry) FindFieldType(structType, fieldName string) (*ref.FieldType, GetFrom: field.GetFrom}, true } -// FieldStructFieldType returns the field type for a checked type value. Returns +// FindStructFieldNames returns the set of field names for the given struct type, +// if the type exists in the registry. +func (p *Registry) FindStructFieldNames(structType string) ([]string, bool) { + msgType, found := p.pbdb.DescribeType(structType) + if !found { + return []string{}, false + } + fieldMap := msgType.FieldMap() + fields := make([]string, len(fieldMap)) + idx := 0 + for f := range fieldMap { + fields[idx] = f + idx++ + } + return fields, true +} + +// FindStructFieldType returns the field type for a checked type value. Returns // false if the field could not be found. func (p *Registry) FindStructFieldType(structType, fieldName string) (*FieldType, bool) { msgType, found := p.pbdb.DescribeType(structType) diff --git a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel b/vendor/github.com/google/cel-go/interpreter/BUILD.bazel index 3a5219eb5..220e23d47 100644 --- a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel +++ b/vendor/github.com/google/cel-go/interpreter/BUILD.bazel @@ -14,7 +14,6 @@ go_library( "decorators.go", "dispatcher.go", "evalstate.go", - "formatting.go", "interpretable.go", "interpreter.go", "optimizations.go", diff --git a/vendor/github.com/google/cel-go/interpreter/formatting.go b/vendor/github.com/google/cel-go/interpreter/formatting.go deleted file mode 100644 index e3f753374..000000000 --- a/vendor/github.com/google/cel-go/interpreter/formatting.go +++ /dev/null @@ -1,383 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package interpreter - -import ( - "errors" - "fmt" - "strconv" - "strings" - "unicode" - - "github.com/google/cel-go/common/types" - "github.com/google/cel-go/common/types/ref" -) - -type typeVerifier func(int64, ...ref.Type) (bool, error) - -// InterpolateFormattedString checks the syntax and cardinality of any string.format calls present in the expression and reports -// any errors at compile time. -func InterpolateFormattedString(verifier typeVerifier) InterpretableDecorator { - return func(inter Interpretable) (Interpretable, error) { - call, ok := inter.(InterpretableCall) - if !ok { - return inter, nil - } - if call.OverloadID() != "string_format" { - return inter, nil - } - args := call.Args() - if len(args) != 2 { - return nil, fmt.Errorf("wrong number of arguments to string.format (expected 2, got %d)", len(args)) - } - fmtStrInter, ok := args[0].(InterpretableConst) - if !ok { - return inter, nil - } - var fmtArgsInter InterpretableConstructor - fmtArgsInter, ok = args[1].(InterpretableConstructor) - if !ok { - return inter, nil - } - if fmtArgsInter.Type() != types.ListType { - // don't necessarily return an error since the list may be DynType - return inter, nil - } - formatStr := fmtStrInter.Value().Value().(string) - initVals := fmtArgsInter.InitVals() - - formatCheck := &formatCheck{ - args: initVals, - verifier: verifier, - } - // use a placeholder locale, since locale doesn't affect syntax - _, err := ParseFormatString(formatStr, formatCheck, formatCheck, "en_US") - if err != nil { - return nil, err - } - seenArgs := formatCheck.argsRequested - if len(initVals) > seenArgs { - return nil, fmt.Errorf("too many arguments supplied to string.format (expected %d, got %d)", seenArgs, len(initVals)) - } - return inter, nil - } -} - -type formatCheck struct { - args []Interpretable - argsRequested int - curArgIndex int64 - enableCheckArgTypes bool - verifier typeVerifier -} - -func (c *formatCheck) String(arg ref.Val, locale string) (string, error) { - valid, err := verifyString(c.args[c.curArgIndex], c.verifier) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("string clause can only be used on strings, bools, bytes, ints, doubles, maps, lists, types, durations, and timestamps") - } - return "", nil -} - -func (c *formatCheck) Decimal(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("integer clause can only be used on integers") - } - return "", nil -} - -func (c *formatCheck) Fixed(precision *int) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - // we allow StringType since "NaN", "Infinity", and "-Infinity" are also valid values - valid, err := c.verifier(id, types.DoubleType, types.StringType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("fixed-point clause can only be used on doubles") - } - return "", nil - } -} - -func (c *formatCheck) Scientific(precision *int) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.DoubleType, types.StringType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("scientific clause can only be used on doubles") - } - return "", nil - } -} - -func (c *formatCheck) Binary(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType, types.BoolType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("only integers and bools can be formatted as binary") - } - return "", nil -} - -func (c *formatCheck) Hex(useUpper bool) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType, types.StringType, types.BytesType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("only integers, byte buffers, and strings can be formatted as hex") - } - return "", nil - } -} - -func (c *formatCheck) Octal(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("octal clause can only be used on integers") - } - return "", nil -} - -func (c *formatCheck) Arg(index int64) (ref.Val, error) { - c.argsRequested++ - c.curArgIndex = index - // return a dummy value - this is immediately passed to back to us - // through one of the FormatCallback functions, so anything will do - return types.Int(0), nil -} - -func (c *formatCheck) ArgSize() int64 { - return int64(len(c.args)) -} - -func verifyString(sub Interpretable, verifier typeVerifier) (bool, error) { - subVerified, err := verifier(sub.ID(), - types.ListType, types.MapType, types.IntType, types.UintType, types.DoubleType, - types.BoolType, types.StringType, types.TimestampType, types.BytesType, types.DurationType, types.TypeType, types.NullType) - if err != nil { - return false, err - } - if !subVerified { - return false, nil - } - con, ok := sub.(InterpretableConstructor) - if ok { - members := con.InitVals() - for _, m := range members { - // recursively verify if we're dealing with a list/map - verified, err := verifyString(m, verifier) - if err != nil { - return false, err - } - if !verified { - return false, nil - } - } - } - return true, nil - -} - -// FormatStringInterpolator is an interface that allows user-defined behavior -// for formatting clause implementations, as well as argument retrieval. -// Each function is expected to support the appropriate types as laid out in -// the string.format documentation, and to return an error if given an inappropriate type. -type FormatStringInterpolator interface { - // String takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a string, or an error if one occurred. - String(ref.Val, string) (string, error) - - // Decimal takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a decimal integer, or an error if one occurred. - Decimal(ref.Val, string) (string, error) - - // Fixed takes an int pointer representing precision (or nil if none was given) and - // returns a function operating in a similar manner to String and Decimal, taking a - // ref.Val and locale and returning the appropriate string. A closure is returned - // so precision can be set without needing an additional function call/configuration. - Fixed(*int) func(ref.Val, string) (string, error) - - // Scientific functions identically to Fixed, except the string returned from the closure - // is expected to be in scientific notation. - Scientific(*int) func(ref.Val, string) (string, error) - - // Binary takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a binary integer, or an error if one occurred. - Binary(ref.Val, string) (string, error) - - // Hex takes a boolean that, if true, indicates the hex string output by the returned - // closure should use uppercase letters for A-F. - Hex(bool) func(ref.Val, string) (string, error) - - // Octal takes a ref.Val and a string representing the current locale identifier and - // returns the Val formatted in octal, or an error if one occurred. - Octal(ref.Val, string) (string, error) -} - -// FormatList is an interface that allows user-defined list-like datatypes to be used -// for formatting clause implementations. -type FormatList interface { - // Arg returns the ref.Val at the given index, or an error if one occurred. - Arg(int64) (ref.Val, error) - // ArgSize returns the length of the argument list. - ArgSize() int64 -} - -type clauseImpl func(ref.Val, string) (string, error) - -// ParseFormatString formats a string according to the string.format syntax, taking the clause implementations -// from the provided FormatCallback and the args from the given FormatList. -func ParseFormatString(formatStr string, callback FormatStringInterpolator, list FormatList, locale string) (string, error) { - i := 0 - argIndex := 0 - var builtStr strings.Builder - for i < len(formatStr) { - if formatStr[i] == '%' { - if i+1 < len(formatStr) && formatStr[i+1] == '%' { - err := builtStr.WriteByte('%') - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i += 2 - continue - } else { - argAny, err := list.Arg(int64(argIndex)) - if err != nil { - return "", err - } - if i+1 >= len(formatStr) { - return "", errors.New("unexpected end of string") - } - if int64(argIndex) >= list.ArgSize() { - return "", fmt.Errorf("index %d out of range", argIndex) - } - numRead, val, refErr := parseAndFormatClause(formatStr[i:], argAny, callback, list, locale) - if refErr != nil { - return "", refErr - } - _, err = builtStr.WriteString(val) - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i += numRead - argIndex++ - } - } else { - err := builtStr.WriteByte(formatStr[i]) - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i++ - } - } - return builtStr.String(), nil -} - -// parseAndFormatClause parses the format clause at the start of the given string with val, and returns -// how many characters were consumed and the substituted string form of val, or an error if one occurred. -func parseAndFormatClause(formatStr string, val ref.Val, callback FormatStringInterpolator, list FormatList, locale string) (int, string, error) { - i := 1 - read, formatter, err := parseFormattingClause(formatStr[i:], callback) - i += read - if err != nil { - return -1, "", fmt.Errorf("could not parse formatting clause: %s", err) - } - - valStr, err := formatter(val, locale) - if err != nil { - return -1, "", fmt.Errorf("error during formatting: %s", err) - } - return i, valStr, nil -} - -func parseFormattingClause(formatStr string, callback FormatStringInterpolator) (int, clauseImpl, error) { - i := 0 - read, precision, err := parsePrecision(formatStr[i:]) - i += read - if err != nil { - return -1, nil, fmt.Errorf("error while parsing precision: %w", err) - } - r := rune(formatStr[i]) - i++ - switch r { - case 's': - return i, callback.String, nil - case 'd': - return i, callback.Decimal, nil - case 'f': - return i, callback.Fixed(precision), nil - case 'e': - return i, callback.Scientific(precision), nil - case 'b': - return i, callback.Binary, nil - case 'x', 'X': - return i, callback.Hex(unicode.IsUpper(r)), nil - case 'o': - return i, callback.Octal, nil - default: - return -1, nil, fmt.Errorf("unrecognized formatting clause \"%c\"", r) - } -} - -func parsePrecision(formatStr string) (int, *int, error) { - i := 0 - if formatStr[i] != '.' { - return i, nil, nil - } - i++ - var buffer strings.Builder - for { - if i >= len(formatStr) { - return -1, nil, errors.New("could not find end of precision specifier") - } - if !isASCIIDigit(rune(formatStr[i])) { - break - } - buffer.WriteByte(formatStr[i]) - i++ - } - precision, err := strconv.Atoi(buffer.String()) - if err != nil { - return -1, nil, fmt.Errorf("error while converting precision to integer: %w", err) - } - return i, &precision, nil -} - -func isASCIIDigit(r rune) bool { - return r <= unicode.MaxASCII && unicode.IsDigit(r) -} diff --git a/vendor/github.com/google/cel-go/interpreter/interpreter.go b/vendor/github.com/google/cel-go/interpreter/interpreter.go index 00fc74732..0aca74d88 100644 --- a/vendor/github.com/google/cel-go/interpreter/interpreter.go +++ b/vendor/github.com/google/cel-go/interpreter/interpreter.go @@ -22,19 +22,13 @@ import ( "github.com/google/cel-go/common/containers" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // Interpreter generates a new Interpretable from a checked or unchecked expression. type Interpreter interface { // NewInterpretable creates an Interpretable from a checked expression and an // optional list of InterpretableDecorator values. - NewInterpretable(checked *ast.CheckedAST, decorators ...InterpretableDecorator) (Interpretable, error) - - // NewUncheckedInterpretable returns an Interpretable from a parsed expression - // and an optional list of InterpretableDecorator values. - NewUncheckedInterpretable(expr *exprpb.Expr, decorators ...InterpretableDecorator) (Interpretable, error) + NewInterpretable(exprAST *ast.AST, decorators ...InterpretableDecorator) (Interpretable, error) } // EvalObserver is a functional interface that accepts an expression id and an observed value. @@ -177,7 +171,7 @@ func NewInterpreter(dispatcher Dispatcher, // NewIntepretable implements the Interpreter interface method. func (i *exprInterpreter) NewInterpretable( - checked *ast.CheckedAST, + checked *ast.AST, decorators ...InterpretableDecorator) (Interpretable, error) { p := newPlanner( i.dispatcher, @@ -187,19 +181,5 @@ func (i *exprInterpreter) NewInterpretable( i.container, checked, decorators...) - return p.Plan(checked.Expr) -} - -// NewUncheckedIntepretable implements the Interpreter interface method. -func (i *exprInterpreter) NewUncheckedInterpretable( - expr *exprpb.Expr, - decorators ...InterpretableDecorator) (Interpretable, error) { - p := newUncheckedPlanner( - i.dispatcher, - i.provider, - i.adapter, - i.attrFactory, - i.container, - decorators...) - return p.Plan(expr) + return p.Plan(checked.Expr()) } diff --git a/vendor/github.com/google/cel-go/interpreter/planner.go b/vendor/github.com/google/cel-go/interpreter/planner.go index 757cd080e..cf371f95d 100644 --- a/vendor/github.com/google/cel-go/interpreter/planner.go +++ b/vendor/github.com/google/cel-go/interpreter/planner.go @@ -23,15 +23,12 @@ import ( "github.com/google/cel-go/common/functions" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/types" - "github.com/google/cel-go/common/types/ref" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // interpretablePlanner creates an Interpretable evaluation plan from a proto Expr value. type interpretablePlanner interface { // Plan generates an Interpretable value (or error) from the input proto Expr. - Plan(expr *exprpb.Expr) (Interpretable, error) + Plan(expr ast.Expr) (Interpretable, error) } // newPlanner creates an interpretablePlanner which references a Dispatcher, TypeProvider, @@ -43,28 +40,7 @@ func newPlanner(disp Dispatcher, adapter types.Adapter, attrFactory AttributeFactory, cont *containers.Container, - checked *ast.CheckedAST, - decorators ...InterpretableDecorator) interpretablePlanner { - return &planner{ - disp: disp, - provider: provider, - adapter: adapter, - attrFactory: attrFactory, - container: cont, - refMap: checked.ReferenceMap, - typeMap: checked.TypeMap, - decorators: decorators, - } -} - -// newUncheckedPlanner creates an interpretablePlanner which references a Dispatcher, TypeProvider, -// TypeAdapter, and Container to resolve functions and types at plan time. Namespaces present in -// Select expressions are resolved lazily at evaluation time. -func newUncheckedPlanner(disp Dispatcher, - provider types.Provider, - adapter types.Adapter, - attrFactory AttributeFactory, - cont *containers.Container, + exprAST *ast.AST, decorators ...InterpretableDecorator) interpretablePlanner { return &planner{ disp: disp, @@ -72,8 +48,8 @@ func newUncheckedPlanner(disp Dispatcher, adapter: adapter, attrFactory: attrFactory, container: cont, - refMap: make(map[int64]*ast.ReferenceInfo), - typeMap: make(map[int64]*types.Type), + refMap: exprAST.ReferenceMap(), + typeMap: exprAST.TypeMap(), decorators: decorators, } } @@ -95,22 +71,24 @@ type planner struct { // useful for layering functionality into the evaluation that is not natively understood by CEL, // such as state-tracking, expression re-write, and possibly efficient thread-safe memoization of // repeated expressions. -func (p *planner) Plan(expr *exprpb.Expr) (Interpretable, error) { - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: +func (p *planner) Plan(expr ast.Expr) (Interpretable, error) { + switch expr.Kind() { + case ast.CallKind: return p.decorate(p.planCall(expr)) - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: return p.decorate(p.planIdent(expr)) - case *exprpb.Expr_SelectExpr: + case ast.LiteralKind: + return p.decorate(p.planConst(expr)) + case ast.SelectKind: return p.decorate(p.planSelect(expr)) - case *exprpb.Expr_ListExpr: + case ast.ListKind: return p.decorate(p.planCreateList(expr)) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + return p.decorate(p.planCreateMap(expr)) + case ast.StructKind: return p.decorate(p.planCreateStruct(expr)) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: return p.decorate(p.planComprehension(expr)) - case *exprpb.Expr_ConstExpr: - return p.decorate(p.planConst(expr)) } return nil, fmt.Errorf("unsupported expr: %v", expr) } @@ -132,16 +110,16 @@ func (p *planner) decorate(i Interpretable, err error) (Interpretable, error) { } // planIdent creates an Interpretable that resolves an identifier from an Activation. -func (p *planner) planIdent(expr *exprpb.Expr) (Interpretable, error) { +func (p *planner) planIdent(expr ast.Expr) (Interpretable, error) { // Establish whether the identifier is in the reference map. - if identRef, found := p.refMap[expr.GetId()]; found { - return p.planCheckedIdent(expr.GetId(), identRef) + if identRef, found := p.refMap[expr.ID()]; found { + return p.planCheckedIdent(expr.ID(), identRef) } // Create the possible attribute list for the unresolved reference. - ident := expr.GetIdentExpr() + ident := expr.AsIdent() return &evalAttr{ adapter: p.adapter, - attr: p.attrFactory.MaybeAttribute(expr.GetId(), ident.Name), + attr: p.attrFactory.MaybeAttribute(expr.ID(), ident), }, nil } @@ -174,20 +152,20 @@ func (p *planner) planCheckedIdent(id int64, identRef *ast.ReferenceInfo) (Inter // a) selects a field from a map or proto. // b) creates a field presence test for a select within a has() macro. // c) resolves the select expression to a namespaced identifier. -func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { +func (p *planner) planSelect(expr ast.Expr) (Interpretable, error) { // If the Select id appears in the reference map from the CheckedExpr proto then it is either // a namespaced identifier or enum value. - if identRef, found := p.refMap[expr.GetId()]; found { - return p.planCheckedIdent(expr.GetId(), identRef) + if identRef, found := p.refMap[expr.ID()]; found { + return p.planCheckedIdent(expr.ID(), identRef) } - sel := expr.GetSelectExpr() + sel := expr.AsSelect() // Plan the operand evaluation. - op, err := p.Plan(sel.GetOperand()) + op, err := p.Plan(sel.Operand()) if err != nil { return nil, err } - opType := p.typeMap[sel.GetOperand().GetId()] + opType := p.typeMap[sel.Operand().ID()] // If the Select was marked TestOnly, this is a presence test. // @@ -211,14 +189,14 @@ func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { } // Build a qualifier for the attribute. - qual, err := p.attrFactory.NewQualifier(opType, expr.GetId(), sel.GetField(), false) + qual, err := p.attrFactory.NewQualifier(opType, expr.ID(), sel.FieldName(), false) if err != nil { return nil, err } // Modify the attribute to be test-only. - if sel.GetTestOnly() { + if sel.IsTestOnly() { attr = &evalTestOnly{ - id: expr.GetId(), + id: expr.ID(), InterpretableAttribute: attr, } } @@ -230,10 +208,10 @@ func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { // planCall creates a callable Interpretable while specializing for common functions and invocation // patterns. Specifically, conditional operators &&, ||, ?:, and (in)equality functions result in // optimized Interpretable values. -func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { - call := expr.GetCallExpr() +func (p *planner) planCall(expr ast.Expr) (Interpretable, error) { + call := expr.AsCall() target, fnName, oName := p.resolveFunction(expr) - argCount := len(call.GetArgs()) + argCount := len(call.Args()) var offset int if target != nil { argCount++ @@ -248,7 +226,7 @@ func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { } args[0] = arg } - for i, argExpr := range call.GetArgs() { + for i, argExpr := range call.Args() { arg, err := p.Plan(argExpr) if err != nil { return nil, err @@ -307,7 +285,7 @@ func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { } // planCallZero generates a zero-arity callable Interpretable. -func (p *planner) planCallZero(expr *exprpb.Expr, +func (p *planner) planCallZero(expr ast.Expr, function string, overload string, impl *functions.Overload) (Interpretable, error) { @@ -315,7 +293,7 @@ func (p *planner) planCallZero(expr *exprpb.Expr, return nil, fmt.Errorf("no such overload: %s()", function) } return &evalZeroArity{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, impl: impl.Function, @@ -323,7 +301,7 @@ func (p *planner) planCallZero(expr *exprpb.Expr, } // planCallUnary generates a unary callable Interpretable. -func (p *planner) planCallUnary(expr *exprpb.Expr, +func (p *planner) planCallUnary(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -340,7 +318,7 @@ func (p *planner) planCallUnary(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalUnary{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, arg: args[0], @@ -351,7 +329,7 @@ func (p *planner) planCallUnary(expr *exprpb.Expr, } // planCallBinary generates a binary callable Interpretable. -func (p *planner) planCallBinary(expr *exprpb.Expr, +func (p *planner) planCallBinary(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -368,7 +346,7 @@ func (p *planner) planCallBinary(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalBinary{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, lhs: args[0], @@ -380,7 +358,7 @@ func (p *planner) planCallBinary(expr *exprpb.Expr, } // planCallVarArgs generates a variable argument callable Interpretable. -func (p *planner) planCallVarArgs(expr *exprpb.Expr, +func (p *planner) planCallVarArgs(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -397,7 +375,7 @@ func (p *planner) planCallVarArgs(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalVarArgs{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, args: args, @@ -408,41 +386,41 @@ func (p *planner) planCallVarArgs(expr *exprpb.Expr, } // planCallEqual generates an equals (==) Interpretable. -func (p *planner) planCallEqual(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalEq{ - id: expr.GetId(), + id: expr.ID(), lhs: args[0], rhs: args[1], }, nil } // planCallNotEqual generates a not equals (!=) Interpretable. -func (p *planner) planCallNotEqual(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallNotEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalNe{ - id: expr.GetId(), + id: expr.ID(), lhs: args[0], rhs: args[1], }, nil } // planCallLogicalAnd generates a logical and (&&) Interpretable. -func (p *planner) planCallLogicalAnd(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallLogicalAnd(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalAnd{ - id: expr.GetId(), + id: expr.ID(), terms: args, }, nil } // planCallLogicalOr generates a logical or (||) Interpretable. -func (p *planner) planCallLogicalOr(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallLogicalOr(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalOr{ - id: expr.GetId(), + id: expr.ID(), terms: args, }, nil } // planCallConditional generates a conditional / ternary (c ? t : f) Interpretable. -func (p *planner) planCallConditional(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallConditional(expr ast.Expr, args []Interpretable) (Interpretable, error) { cond := args[0] t := args[1] var tAttr Attribute @@ -464,13 +442,13 @@ func (p *planner) planCallConditional(expr *exprpb.Expr, args []Interpretable) ( return &evalAttr{ adapter: p.adapter, - attr: p.attrFactory.ConditionalAttribute(expr.GetId(), cond, tAttr, fAttr), + attr: p.attrFactory.ConditionalAttribute(expr.ID(), cond, tAttr, fAttr), }, nil } // planCallIndex either extends an attribute with the argument to the index operation, or creates // a relative attribute based on the return of a function call or operation. -func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optional bool) (Interpretable, error) { +func (p *planner) planCallIndex(expr ast.Expr, args []Interpretable, optional bool) (Interpretable, error) { op := args[0] ind := args[1] opType := p.typeMap[op.ID()] @@ -489,11 +467,11 @@ func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optiona var qual Qualifier switch ind := ind.(type) { case InterpretableConst: - qual, err = p.attrFactory.NewQualifier(opType, expr.GetId(), ind.Value(), optional) + qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind.Value(), optional) case InterpretableAttribute: - qual, err = p.attrFactory.NewQualifier(opType, expr.GetId(), ind, optional) + qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind, optional) default: - qual, err = p.relativeAttr(expr.GetId(), ind, optional) + qual, err = p.relativeAttr(expr.ID(), ind, optional) } if err != nil { return nil, err @@ -505,10 +483,10 @@ func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optiona } // planCreateList generates a list construction Interpretable. -func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { - list := expr.GetListExpr() - optionalIndices := list.GetOptionalIndices() - elements := list.GetElements() +func (p *planner) planCreateList(expr ast.Expr) (Interpretable, error) { + list := expr.AsList() + optionalIndices := list.OptionalIndices() + elements := list.Elements() optionals := make([]bool, len(elements)) for _, index := range optionalIndices { if index < 0 || index >= int32(len(elements)) { @@ -525,7 +503,7 @@ func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { elems[i] = elemVal } return &evalList{ - id: expr.GetId(), + id: expr.ID(), elems: elems, optionals: optionals, hasOptionals: len(optionals) != 0, @@ -534,31 +512,29 @@ func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { } // planCreateStruct generates a map or object construction Interpretable. -func (p *planner) planCreateStruct(expr *exprpb.Expr) (Interpretable, error) { - str := expr.GetStructExpr() - if len(str.MessageName) != 0 { - return p.planCreateObj(expr) - } - entries := str.GetEntries() +func (p *planner) planCreateMap(expr ast.Expr) (Interpretable, error) { + m := expr.AsMap() + entries := m.Entries() optionals := make([]bool, len(entries)) keys := make([]Interpretable, len(entries)) vals := make([]Interpretable, len(entries)) - for i, entry := range entries { - keyVal, err := p.Plan(entry.GetMapKey()) + for i, e := range entries { + entry := e.AsMapEntry() + keyVal, err := p.Plan(entry.Key()) if err != nil { return nil, err } keys[i] = keyVal - valVal, err := p.Plan(entry.GetValue()) + valVal, err := p.Plan(entry.Value()) if err != nil { return nil, err } vals[i] = valVal - optionals[i] = entry.GetOptionalEntry() + optionals[i] = entry.IsOptional() } return &evalMap{ - id: expr.GetId(), + id: expr.ID(), keys: keys, vals: vals, optionals: optionals, @@ -568,27 +544,28 @@ func (p *planner) planCreateStruct(expr *exprpb.Expr) (Interpretable, error) { } // planCreateObj generates an object construction Interpretable. -func (p *planner) planCreateObj(expr *exprpb.Expr) (Interpretable, error) { - obj := expr.GetStructExpr() - typeName, defined := p.resolveTypeName(obj.GetMessageName()) +func (p *planner) planCreateStruct(expr ast.Expr) (Interpretable, error) { + obj := expr.AsStruct() + typeName, defined := p.resolveTypeName(obj.TypeName()) if !defined { - return nil, fmt.Errorf("unknown type: %s", obj.GetMessageName()) - } - entries := obj.GetEntries() - optionals := make([]bool, len(entries)) - fields := make([]string, len(entries)) - vals := make([]Interpretable, len(entries)) - for i, entry := range entries { - fields[i] = entry.GetFieldKey() - val, err := p.Plan(entry.GetValue()) + return nil, fmt.Errorf("unknown type: %s", obj.TypeName()) + } + objFields := obj.Fields() + optionals := make([]bool, len(objFields)) + fields := make([]string, len(objFields)) + vals := make([]Interpretable, len(objFields)) + for i, f := range objFields { + field := f.AsStructField() + fields[i] = field.Name() + val, err := p.Plan(field.Value()) if err != nil { return nil, err } vals[i] = val - optionals[i] = entry.GetOptionalEntry() + optionals[i] = field.IsOptional() } return &evalObj{ - id: expr.GetId(), + id: expr.ID(), typeName: typeName, fields: fields, vals: vals, @@ -599,33 +576,33 @@ func (p *planner) planCreateObj(expr *exprpb.Expr) (Interpretable, error) { } // planComprehension generates an Interpretable fold operation. -func (p *planner) planComprehension(expr *exprpb.Expr) (Interpretable, error) { - fold := expr.GetComprehensionExpr() - accu, err := p.Plan(fold.GetAccuInit()) +func (p *planner) planComprehension(expr ast.Expr) (Interpretable, error) { + fold := expr.AsComprehension() + accu, err := p.Plan(fold.AccuInit()) if err != nil { return nil, err } - iterRange, err := p.Plan(fold.GetIterRange()) + iterRange, err := p.Plan(fold.IterRange()) if err != nil { return nil, err } - cond, err := p.Plan(fold.GetLoopCondition()) + cond, err := p.Plan(fold.LoopCondition()) if err != nil { return nil, err } - step, err := p.Plan(fold.GetLoopStep()) + step, err := p.Plan(fold.LoopStep()) if err != nil { return nil, err } - result, err := p.Plan(fold.GetResult()) + result, err := p.Plan(fold.Result()) if err != nil { return nil, err } return &evalFold{ - id: expr.GetId(), - accuVar: fold.AccuVar, + id: expr.ID(), + accuVar: fold.AccuVar(), accu: accu, - iterVar: fold.IterVar, + iterVar: fold.IterVar(), iterRange: iterRange, cond: cond, step: step, @@ -635,37 +612,8 @@ func (p *planner) planComprehension(expr *exprpb.Expr) (Interpretable, error) { } // planConst generates a constant valued Interpretable. -func (p *planner) planConst(expr *exprpb.Expr) (Interpretable, error) { - val, err := p.constValue(expr.GetConstExpr()) - if err != nil { - return nil, err - } - return NewConstValue(expr.GetId(), val), nil -} - -// constValue converts a proto Constant value to a ref.Val. -func (p *planner) constValue(c *exprpb.Constant) (ref.Val, error) { - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - return p.adapter.NativeToValue(c.GetBoolValue()), nil - case *exprpb.Constant_BytesValue: - return p.adapter.NativeToValue(c.GetBytesValue()), nil - case *exprpb.Constant_DoubleValue: - return p.adapter.NativeToValue(c.GetDoubleValue()), nil - case *exprpb.Constant_DurationValue: - return p.adapter.NativeToValue(c.GetDurationValue().AsDuration()), nil - case *exprpb.Constant_Int64Value: - return p.adapter.NativeToValue(c.GetInt64Value()), nil - case *exprpb.Constant_NullValue: - return p.adapter.NativeToValue(c.GetNullValue()), nil - case *exprpb.Constant_StringValue: - return p.adapter.NativeToValue(c.GetStringValue()), nil - case *exprpb.Constant_TimestampValue: - return p.adapter.NativeToValue(c.GetTimestampValue().AsTime()), nil - case *exprpb.Constant_Uint64Value: - return p.adapter.NativeToValue(c.GetUint64Value()), nil - } - return nil, fmt.Errorf("unknown constant type: %v", c) +func (p *planner) planConst(expr ast.Expr) (Interpretable, error) { + return NewConstValue(expr.ID(), expr.AsLiteral()), nil } // resolveTypeName takes a qualified string constructed at parse time, applies the proto @@ -687,17 +635,20 @@ func (p *planner) resolveTypeName(typeName string) (string, bool) { // - The target expression may only consist of ident and select expressions. // - The function is declared in the environment using its fully-qualified name. // - The fully-qualified function name matches the string serialized target value. -func (p *planner) resolveFunction(expr *exprpb.Expr) (*exprpb.Expr, string, string) { +func (p *planner) resolveFunction(expr ast.Expr) (ast.Expr, string, string) { // Note: similar logic exists within the `checker/checker.go`. If making changes here // please consider the impact on checker.go and consolidate implementations or mirror code // as appropriate. - call := expr.GetCallExpr() - target := call.GetTarget() - fnName := call.GetFunction() + call := expr.AsCall() + var target ast.Expr = nil + if call.IsMemberFunction() { + target = call.Target() + } + fnName := call.FunctionName() // Checked expressions always have a reference map entry, and _should_ have the fully qualified // function name as the fnName value. - oRef, hasOverload := p.refMap[expr.GetId()] + oRef, hasOverload := p.refMap[expr.ID()] if hasOverload { if len(oRef.OverloadIDs) == 1 { return target, fnName, oRef.OverloadIDs[0] @@ -771,16 +722,30 @@ func (p *planner) relativeAttr(id int64, eval Interpretable, opt bool) (Interpre // toQualifiedName converts an expression AST into a qualified name if possible, with a boolean // 'found' value that indicates if the conversion is successful. -func (p *planner) toQualifiedName(operand *exprpb.Expr) (string, bool) { +func (p *planner) toQualifiedName(operand ast.Expr) (string, bool) { // If the checker identified the expression as an attribute by the type-checker, then it can't // possibly be part of qualified name in a namespace. - _, isAttr := p.refMap[operand.GetId()] + _, isAttr := p.refMap[operand.ID()] if isAttr { return "", false } // Since functions cannot be both namespaced and receiver functions, if the operand is not an // qualified variable name, return the (possibly) qualified name given the expressions. - return containers.ToQualifiedName(operand) + switch operand.Kind() { + case ast.IdentKind: + id := operand.AsIdent() + return id, true + case ast.SelectKind: + sel := operand.AsSelect() + // Test only expressions are not valid as qualified names. + if sel.IsTestOnly() { + return "", false + } + if qual, found := p.toQualifiedName(sel.Operand()); found { + return qual + "." + sel.FieldName(), true + } + } + return "", false } func stripLeadingDot(name string) string { diff --git a/vendor/github.com/google/cel-go/interpreter/prune.go b/vendor/github.com/google/cel-go/interpreter/prune.go index b8834b1cb..410d80dc4 100644 --- a/vendor/github.com/google/cel-go/interpreter/prune.go +++ b/vendor/github.com/google/cel-go/interpreter/prune.go @@ -15,19 +15,18 @@ package interpreter import ( + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" - structpb "google.golang.org/protobuf/types/known/structpb" ) type astPruner struct { - expr *exprpb.Expr - macroCalls map[int64]*exprpb.Expr + ast.ExprFactory + expr ast.Expr + macroCalls map[int64]ast.Expr state EvalState nextExprID int64 } @@ -67,84 +66,44 @@ type astPruner struct { // compiled and constant folded expressions, but is not willing to constant // fold(and thus cache results of) some external calls, then they can prepare // the overloads accordingly. -func PruneAst(expr *exprpb.Expr, macroCalls map[int64]*exprpb.Expr, state EvalState) *exprpb.ParsedExpr { +func PruneAst(expr ast.Expr, macroCalls map[int64]ast.Expr, state EvalState) *ast.AST { pruneState := NewEvalState() for _, id := range state.IDs() { v, _ := state.Value(id) pruneState.SetValue(id, v) } pruner := &astPruner{ - expr: expr, - macroCalls: macroCalls, - state: pruneState, - nextExprID: getMaxID(expr)} + ExprFactory: ast.NewExprFactory(), + expr: expr, + macroCalls: macroCalls, + state: pruneState, + nextExprID: getMaxID(expr)} newExpr, _ := pruner.maybePrune(expr) - return &exprpb.ParsedExpr{ - Expr: newExpr, - SourceInfo: &exprpb.SourceInfo{MacroCalls: pruner.macroCalls}, - } -} - -func (p *astPruner) createLiteral(id int64, val *exprpb.Constant) *exprpb.Expr { - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_ConstExpr{ - ConstExpr: val, - }, + newInfo := ast.NewSourceInfo(nil) + for id, call := range pruner.macroCalls { + newInfo.SetMacroCall(id, call) } + return ast.NewAST(newExpr, newInfo) } -func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, bool) { +func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (ast.Expr, bool) { switch v := val.(type) { - case types.Bool: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: bool(v)}}), true - case types.Bytes: + case types.Bool, types.Bytes, types.Double, types.Int, types.Null, types.String, types.Uint: p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: []byte(v)}}), true - case types.Double: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: float64(v)}}), true + return p.NewLiteral(id, val), true case types.Duration: p.state.SetValue(id, val) - durationString := string(v.ConvertToType(types.StringType).(types.String)) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Function: overloads.TypeConvertDuration, - Args: []*exprpb.Expr{ - p.createLiteral(p.nextID(), - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: durationString}}), - }, - }, - }, - }, true - case types.Int: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: int64(v)}}), true - case types.Uint: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: uint64(v)}}), true - case types.String: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: string(v)}}), true - case types.Null: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: v.Value().(structpb.NullValue)}}), true + durationString := v.ConvertToType(types.StringType).(types.String) + return p.NewCall(id, overloads.TypeConvertDuration, p.NewLiteral(p.nextID(), durationString)), true + case types.Timestamp: + timestampString := v.ConvertToType(types.StringType).(types.String) + return p.NewCall(id, overloads.TypeConvertTimestamp, p.NewLiteral(p.nextID(), timestampString)), true } // Attempt to build a list literal. if list, isList := val.(traits.Lister); isList { sz := list.Size().(types.Int) - elemExprs := make([]*exprpb.Expr, sz) + elemExprs := make([]ast.Expr, sz) for i := types.Int(0); i < sz; i++ { elem := list.Get(i) if types.IsUnknownOrError(elem) { @@ -157,20 +116,13 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo elemExprs[i] = elemExpr } p.state.SetValue(id, val) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: elemExprs, - }, - }, - }, true + return p.NewList(id, elemExprs, []int32{}), true } // Create a map literal if possible. if mp, isMap := val.(traits.Mapper); isMap { it := mp.Iterator() - entries := make([]*exprpb.Expr_CreateStruct_Entry, mp.Size().(types.Int)) + entries := make([]ast.EntryExpr, mp.Size().(types.Int)) i := 0 for it.HasNext() != types.False { key := it.Next() @@ -186,25 +138,12 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo if !ok { return nil, false } - entry := &exprpb.Expr_CreateStruct_Entry{ - Id: p.nextID(), - KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: keyExpr, - }, - Value: valExpr, - } + entry := p.NewMapEntry(p.nextID(), keyExpr, valExpr, false) entries[i] = entry i++ } p.state.SetValue(id, val) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - Entries: entries, - }, - }, - }, true + return p.NewMap(id, entries), true } // TODO(issues/377) To construct message literals, the type provider will need to support @@ -212,215 +151,206 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo return nil, false } -func (p *astPruner) maybePruneOptional(elem *exprpb.Expr) (*exprpb.Expr, bool) { - elemVal, found := p.value(elem.GetId()) +func (p *astPruner) maybePruneOptional(elem ast.Expr) (ast.Expr, bool) { + elemVal, found := p.value(elem.ID()) if found && elemVal.Type() == types.OptionalType { opt := elemVal.(*types.Optional) if !opt.HasValue() { return nil, true } - if newElem, pruned := p.maybeCreateLiteral(elem.GetId(), opt.GetValue()); pruned { + if newElem, pruned := p.maybeCreateLiteral(elem.ID(), opt.GetValue()); pruned { return newElem, true } } return elem, false } -func (p *astPruner) maybePruneIn(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) maybePruneIn(node ast.Expr) (ast.Expr, bool) { // elem in list - call := node.GetCallExpr() - val, exists := p.maybeValue(call.GetArgs()[1].GetId()) + call := node.AsCall() + val, exists := p.maybeValue(call.Args()[1].ID()) if !exists { return nil, false } if sz, ok := val.(traits.Sizer); ok && sz.Size() == types.IntZero { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } return nil, false } -func (p *astPruner) maybePruneLogicalNot(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() - arg := call.GetArgs()[0] - val, exists := p.maybeValue(arg.GetId()) +func (p *astPruner) maybePruneLogicalNot(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() + arg := call.Args()[0] + val, exists := p.maybeValue(arg.ID()) if !exists { return nil, false } if b, ok := val.(types.Bool); ok { - return p.maybeCreateLiteral(node.GetId(), !b) + return p.maybeCreateLiteral(node.ID(), !b) } return nil, false } -func (p *astPruner) maybePruneOr(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() +func (p *astPruner) maybePruneOr(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() // We know result is unknown, so we have at least one unknown arg // and if one side is a known value, we know we can ignore it. - if v, exists := p.maybeValue(call.GetArgs()[0].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[0].ID()); exists { if v == types.True { - return p.maybeCreateLiteral(node.GetId(), types.True) + return p.maybeCreateLiteral(node.ID(), types.True) } - return call.GetArgs()[1], true + return call.Args()[1], true } - if v, exists := p.maybeValue(call.GetArgs()[1].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[1].ID()); exists { if v == types.True { - return p.maybeCreateLiteral(node.GetId(), types.True) + return p.maybeCreateLiteral(node.ID(), types.True) } - return call.GetArgs()[0], true + return call.Args()[0], true } return nil, false } -func (p *astPruner) maybePruneAnd(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() +func (p *astPruner) maybePruneAnd(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() // We know result is unknown, so we have at least one unknown arg // and if one side is a known value, we know we can ignore it. - if v, exists := p.maybeValue(call.GetArgs()[0].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[0].ID()); exists { if v == types.False { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } - return call.GetArgs()[1], true + return call.Args()[1], true } - if v, exists := p.maybeValue(call.GetArgs()[1].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[1].ID()); exists { if v == types.False { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } - return call.GetArgs()[0], true + return call.Args()[0], true } return nil, false } -func (p *astPruner) maybePruneConditional(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() - cond, exists := p.maybeValue(call.GetArgs()[0].GetId()) +func (p *astPruner) maybePruneConditional(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() + cond, exists := p.maybeValue(call.Args()[0].ID()) if !exists { return nil, false } if cond.Value().(bool) { - return call.GetArgs()[1], true + return call.Args()[1], true } - return call.GetArgs()[2], true + return call.Args()[2], true } -func (p *astPruner) maybePruneFunction(node *exprpb.Expr) (*exprpb.Expr, bool) { - if _, exists := p.value(node.GetId()); !exists { +func (p *astPruner) maybePruneFunction(node ast.Expr) (ast.Expr, bool) { + if _, exists := p.value(node.ID()); !exists { return nil, false } - call := node.GetCallExpr() - if call.Function == operators.LogicalOr { + call := node.AsCall() + if call.FunctionName() == operators.LogicalOr { return p.maybePruneOr(node) } - if call.Function == operators.LogicalAnd { + if call.FunctionName() == operators.LogicalAnd { return p.maybePruneAnd(node) } - if call.Function == operators.Conditional { + if call.FunctionName() == operators.Conditional { return p.maybePruneConditional(node) } - if call.Function == operators.In { + if call.FunctionName() == operators.In { return p.maybePruneIn(node) } - if call.Function == operators.LogicalNot { + if call.FunctionName() == operators.LogicalNot { return p.maybePruneLogicalNot(node) } return nil, false } -func (p *astPruner) maybePrune(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) maybePrune(node ast.Expr) (ast.Expr, bool) { return p.prune(node) } -func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) prune(node ast.Expr) (ast.Expr, bool) { if node == nil { return node, false } - val, valueExists := p.maybeValue(node.GetId()) + val, valueExists := p.maybeValue(node.ID()) if valueExists { - if newNode, ok := p.maybeCreateLiteral(node.GetId(), val); ok { - delete(p.macroCalls, node.GetId()) + if newNode, ok := p.maybeCreateLiteral(node.ID(), val); ok { + delete(p.macroCalls, node.ID()) return newNode, true } } - if macro, found := p.macroCalls[node.GetId()]; found { + if macro, found := p.macroCalls[node.ID()]; found { // Ensure that intermediate values for the comprehension are cleared during pruning - compre := node.GetComprehensionExpr() - if compre != nil { - visit(macro, clearIterVarVisitor(compre.IterVar, p.state)) + if node.Kind() == ast.ComprehensionKind { + compre := node.AsComprehension() + visit(macro, clearIterVarVisitor(compre.IterVar(), p.state)) } // prune the expression in terms of the macro call instead of the expanded form. if newMacro, pruned := p.prune(macro); pruned { - p.macroCalls[node.GetId()] = newMacro + p.macroCalls[node.ID()] = newMacro } } // We have either an unknown/error value, or something we don't want to // transform, or expression was not evaluated. If possible, drill down // more. - switch node.GetExprKind().(type) { - case *exprpb.Expr_SelectExpr: - if operand, pruned := p.maybePrune(node.GetSelectExpr().GetOperand()); pruned { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{ - Operand: operand, - Field: node.GetSelectExpr().GetField(), - TestOnly: node.GetSelectExpr().GetTestOnly(), - }, - }, - }, true - } - case *exprpb.Expr_CallExpr: - var prunedCall bool - call := node.GetCallExpr() - args := call.GetArgs() - newArgs := make([]*exprpb.Expr, len(args)) - newCall := &exprpb.Expr_Call{ - Function: call.GetFunction(), - Target: call.GetTarget(), - Args: newArgs, - } - for i, arg := range args { - newArgs[i] = arg - if newArg, prunedArg := p.maybePrune(arg); prunedArg { - prunedCall = true - newArgs[i] = newArg + switch node.Kind() { + case ast.SelectKind: + sel := node.AsSelect() + if operand, isPruned := p.maybePrune(sel.Operand()); isPruned { + if sel.IsTestOnly() { + return p.NewPresenceTest(node.ID(), operand, sel.FieldName()), true } + return p.NewSelect(node.ID(), operand, sel.FieldName()), true } - if newTarget, prunedTarget := p.maybePrune(call.GetTarget()); prunedTarget { - prunedCall = true - newCall.Target = newTarget + case ast.CallKind: + argsPruned := false + call := node.AsCall() + args := call.Args() + newArgs := make([]ast.Expr, len(args)) + for i, a := range args { + newArgs[i] = a + if arg, isPruned := p.maybePrune(a); isPruned { + argsPruned = true + newArgs[i] = arg + } } - newNode := &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: newCall, - }, + if !call.IsMemberFunction() { + newCall := p.NewCall(node.ID(), call.FunctionName(), newArgs...) + if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned { + return prunedCall, true + } + return newCall, argsPruned } - if newExpr, pruned := p.maybePruneFunction(newNode); pruned { - newExpr, _ = p.maybePrune(newExpr) - return newExpr, true + newTarget := call.Target() + targetPruned := false + if prunedTarget, isPruned := p.maybePrune(call.Target()); isPruned { + targetPruned = true + newTarget = prunedTarget } - if prunedCall { - return newNode, true + newCall := p.NewMemberCall(node.ID(), call.FunctionName(), newTarget, newArgs...) + if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned { + return prunedCall, true } - case *exprpb.Expr_ListExpr: - elems := node.GetListExpr().GetElements() - optIndices := node.GetListExpr().GetOptionalIndices() + return newCall, targetPruned || argsPruned + case ast.ListKind: + l := node.AsList() + elems := l.Elements() + optIndices := l.OptionalIndices() optIndexMap := map[int32]bool{} for _, i := range optIndices { optIndexMap[i] = true } newOptIndexMap := make(map[int32]bool, len(optIndexMap)) - newElems := make([]*exprpb.Expr, 0, len(elems)) - var prunedList bool - + newElems := make([]ast.Expr, 0, len(elems)) + var listPruned bool prunedIdx := 0 for i, elem := range elems { _, isOpt := optIndexMap[int32(i)] if isOpt { newElem, pruned := p.maybePruneOptional(elem) if pruned { - prunedList = true + listPruned = true if newElem != nil { newElems = append(newElems, newElem) prunedIdx++ @@ -431,7 +361,7 @@ func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { } if newElem, prunedElem := p.maybePrune(elem); prunedElem { newElems = append(newElems, newElem) - prunedList = true + listPruned = true } else { newElems = append(newElems, elem) } @@ -443,76 +373,64 @@ func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { optIndices[idx] = i idx++ } - if prunedList { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: newElems, - OptionalIndices: optIndices, - }, - }, - }, true + if listPruned { + return p.NewList(node.ID(), newElems, optIndices), true } - case *exprpb.Expr_StructExpr: - var prunedStruct bool - entries := node.GetStructExpr().GetEntries() - messageType := node.GetStructExpr().GetMessageName() - newEntries := make([]*exprpb.Expr_CreateStruct_Entry, len(entries)) + case ast.MapKind: + var mapPruned bool + m := node.AsMap() + entries := m.Entries() + newEntries := make([]ast.EntryExpr, len(entries)) for i, entry := range entries { newEntries[i] = entry - newKey, prunedKey := p.maybePrune(entry.GetMapKey()) - newValue, prunedValue := p.maybePrune(entry.GetValue()) - if !prunedKey && !prunedValue { + e := entry.AsMapEntry() + newKey, keyPruned := p.maybePrune(e.Key()) + newValue, valuePruned := p.maybePrune(e.Value()) + if !keyPruned && !valuePruned { continue } - prunedStruct = true - newEntry := &exprpb.Expr_CreateStruct_Entry{ - Value: newValue, - } - if messageType != "" { - newEntry.KeyKind = &exprpb.Expr_CreateStruct_Entry_FieldKey{ - FieldKey: entry.GetFieldKey(), - } - } else { - newEntry.KeyKind = &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: newKey, - } - } - newEntry.OptionalEntry = entry.GetOptionalEntry() + mapPruned = true + newEntry := p.NewMapEntry(entry.ID(), newKey, newValue, e.IsOptional()) newEntries[i] = newEntry } - if prunedStruct { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: messageType, - Entries: newEntries, - }, - }, - }, true + if mapPruned { + return p.NewMap(node.ID(), newEntries), true } - case *exprpb.Expr_ComprehensionExpr: - compre := node.GetComprehensionExpr() + case ast.StructKind: + var structPruned bool + obj := node.AsStruct() + fields := obj.Fields() + newFields := make([]ast.EntryExpr, len(fields)) + for i, field := range fields { + newFields[i] = field + f := field.AsStructField() + newValue, prunedValue := p.maybePrune(f.Value()) + if !prunedValue { + continue + } + structPruned = true + newEntry := p.NewStructField(field.ID(), f.Name(), newValue, f.IsOptional()) + newFields[i] = newEntry + } + if structPruned { + return p.NewStruct(node.ID(), obj.TypeName(), newFields), true + } + case ast.ComprehensionKind: + compre := node.AsComprehension() // Only the range of the comprehension is pruned since the state tracking only records // the last iteration of the comprehension and not each step in the evaluation which // means that the any residuals computed in between might be inaccurate. - if newRange, pruned := p.maybePrune(compre.GetIterRange()); pruned { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_ComprehensionExpr{ - ComprehensionExpr: &exprpb.Expr_Comprehension{ - IterVar: compre.GetIterVar(), - IterRange: newRange, - AccuVar: compre.GetAccuVar(), - AccuInit: compre.GetAccuInit(), - LoopCondition: compre.GetLoopCondition(), - LoopStep: compre.GetLoopStep(), - Result: compre.GetResult(), - }, - }, - }, true + if newRange, pruned := p.maybePrune(compre.IterRange()); pruned { + return p.NewComprehension( + node.ID(), + newRange, + compre.IterVar(), + compre.AccuVar(), + compre.AccuInit(), + compre.LoopCondition(), + compre.LoopStep(), + compre.Result(), + ), true } } return node, false @@ -539,12 +457,12 @@ func (p *astPruner) nextID() int64 { type astVisitor struct { // visitEntry is called on every expr node, including those within a map/struct entry. - visitExpr func(expr *exprpb.Expr) + visitExpr func(expr ast.Expr) // visitEntry is called before entering the key, value of a map/struct entry. - visitEntry func(entry *exprpb.Expr_CreateStruct_Entry) + visitEntry func(entry ast.EntryExpr) } -func getMaxID(expr *exprpb.Expr) int64 { +func getMaxID(expr ast.Expr) int64 { maxID := int64(1) visit(expr, maxIDVisitor(&maxID)) return maxID @@ -552,10 +470,9 @@ func getMaxID(expr *exprpb.Expr) int64 { func clearIterVarVisitor(varName string, state EvalState) astVisitor { return astVisitor{ - visitExpr: func(e *exprpb.Expr) { - ident := e.GetIdentExpr() - if ident != nil && ident.GetName() == varName { - state.SetValue(e.GetId(), nil) + visitExpr: func(e ast.Expr) { + if e.Kind() == ast.IdentKind && e.AsIdent() == varName { + state.SetValue(e.ID(), nil) } }, } @@ -563,56 +480,63 @@ func clearIterVarVisitor(varName string, state EvalState) astVisitor { func maxIDVisitor(maxID *int64) astVisitor { return astVisitor{ - visitExpr: func(e *exprpb.Expr) { - if e.GetId() >= *maxID { - *maxID = e.GetId() + 1 + visitExpr: func(e ast.Expr) { + if e.ID() >= *maxID { + *maxID = e.ID() + 1 } }, - visitEntry: func(e *exprpb.Expr_CreateStruct_Entry) { - if e.GetId() >= *maxID { - *maxID = e.GetId() + 1 + visitEntry: func(e ast.EntryExpr) { + if e.ID() >= *maxID { + *maxID = e.ID() + 1 } }, } } -func visit(expr *exprpb.Expr, visitor astVisitor) { - exprs := []*exprpb.Expr{expr} +func visit(expr ast.Expr, visitor astVisitor) { + exprs := []ast.Expr{expr} for len(exprs) != 0 { e := exprs[0] if visitor.visitExpr != nil { visitor.visitExpr(e) } exprs = exprs[1:] - switch e.GetExprKind().(type) { - case *exprpb.Expr_SelectExpr: - exprs = append(exprs, e.GetSelectExpr().GetOperand()) - case *exprpb.Expr_CallExpr: - call := e.GetCallExpr() - if call.GetTarget() != nil { - exprs = append(exprs, call.GetTarget()) + switch e.Kind() { + case ast.SelectKind: + exprs = append(exprs, e.AsSelect().Operand()) + case ast.CallKind: + call := e.AsCall() + if call.Target() != nil { + exprs = append(exprs, call.Target()) } - exprs = append(exprs, call.GetArgs()...) - case *exprpb.Expr_ComprehensionExpr: - compre := e.GetComprehensionExpr() + exprs = append(exprs, call.Args()...) + case ast.ComprehensionKind: + compre := e.AsComprehension() exprs = append(exprs, - compre.GetIterRange(), - compre.GetAccuInit(), - compre.GetLoopCondition(), - compre.GetLoopStep(), - compre.GetResult()) - case *exprpb.Expr_ListExpr: - list := e.GetListExpr() - exprs = append(exprs, list.GetElements()...) - case *exprpb.Expr_StructExpr: - for _, entry := range e.GetStructExpr().GetEntries() { + compre.IterRange(), + compre.AccuInit(), + compre.LoopCondition(), + compre.LoopStep(), + compre.Result()) + case ast.ListKind: + list := e.AsList() + exprs = append(exprs, list.Elements()...) + case ast.MapKind: + for _, entry := range e.AsMap().Entries() { + e := entry.AsMapEntry() if visitor.visitEntry != nil { visitor.visitEntry(entry) } - if entry.GetMapKey() != nil { - exprs = append(exprs, entry.GetMapKey()) + exprs = append(exprs, e.Key()) + exprs = append(exprs, e.Value()) + } + case ast.StructKind: + for _, entry := range e.AsStruct().Fields() { + f := entry.AsStructField() + if visitor.visitEntry != nil { + visitor.visitEntry(entry) } - exprs = append(exprs, entry.GetValue()) + exprs = append(exprs, f.Value()) } } } diff --git a/vendor/github.com/google/cel-go/parser/BUILD.bazel b/vendor/github.com/google/cel-go/parser/BUILD.bazel index 67ecc9554..a4a09ad75 100644 --- a/vendor/github.com/google/cel-go/parser/BUILD.bazel +++ b/vendor/github.com/google/cel-go/parser/BUILD.bazel @@ -20,8 +20,11 @@ go_library( visibility = ["//visibility:public"], deps = [ "//common:go_default_library", + "//common/ast:go_default_library", "//common/operators:go_default_library", "//common/runes:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", "//parser/gen:go_default_library", "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", @@ -43,7 +46,9 @@ go_test( ":go_default_library", ], deps = [ + "//common/ast:go_default_library", "//common/debug:go_default_library", + "//common/types:go_default_library", "//parser/gen:go_default_library", "//test:go_default_library", "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", diff --git a/vendor/github.com/google/cel-go/parser/helper.go b/vendor/github.com/google/cel-go/parser/helper.go index a5f29e3d7..07656e139 100644 --- a/vendor/github.com/google/cel-go/parser/helper.go +++ b/vendor/github.com/google/cel-go/parser/helper.go @@ -20,281 +20,206 @@ import ( antlr "github.com/antlr/antlr4/runtime/Go/antlr/v4" "github.com/google/cel-go/common" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) type parserHelper struct { - source common.Source - nextID int64 - positions map[int64]int32 - macroCalls map[int64]*exprpb.Expr + exprFactory ast.ExprFactory + source common.Source + sourceInfo *ast.SourceInfo + nextID int64 } -func newParserHelper(source common.Source) *parserHelper { +func newParserHelper(source common.Source, fac ast.ExprFactory) *parserHelper { return &parserHelper{ - source: source, - nextID: 1, - positions: make(map[int64]int32), - macroCalls: make(map[int64]*exprpb.Expr), + exprFactory: fac, + source: source, + sourceInfo: ast.NewSourceInfo(source), + nextID: 1, } } -func (p *parserHelper) getSourceInfo() *exprpb.SourceInfo { - return &exprpb.SourceInfo{ - Location: p.source.Description(), - Positions: p.positions, - LineOffsets: p.source.LineOffsets(), - MacroCalls: p.macroCalls} +func (p *parserHelper) getSourceInfo() *ast.SourceInfo { + return p.sourceInfo } -func (p *parserHelper) newLiteral(ctx any, value *exprpb.Constant) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ConstExpr{ConstExpr: value} - return exprNode +func (p *parserHelper) newLiteral(ctx any, value ref.Val) ast.Expr { + return p.exprFactory.NewLiteral(p.newID(ctx), value) } -func (p *parserHelper) newLiteralBool(ctx any, value bool) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: value}}) +func (p *parserHelper) newLiteralBool(ctx any, value bool) ast.Expr { + return p.newLiteral(ctx, types.Bool(value)) } -func (p *parserHelper) newLiteralString(ctx any, value string) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: value}}) +func (p *parserHelper) newLiteralString(ctx any, value string) ast.Expr { + return p.newLiteral(ctx, types.String(value)) } -func (p *parserHelper) newLiteralBytes(ctx any, value []byte) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: value}}) +func (p *parserHelper) newLiteralBytes(ctx any, value []byte) ast.Expr { + return p.newLiteral(ctx, types.Bytes(value)) } -func (p *parserHelper) newLiteralInt(ctx any, value int64) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: value}}) +func (p *parserHelper) newLiteralInt(ctx any, value int64) ast.Expr { + return p.newLiteral(ctx, types.Int(value)) } -func (p *parserHelper) newLiteralUint(ctx any, value uint64) *exprpb.Expr { - return p.newLiteral(ctx, &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: value}}) +func (p *parserHelper) newLiteralUint(ctx any, value uint64) ast.Expr { + return p.newLiteral(ctx, types.Uint(value)) } -func (p *parserHelper) newLiteralDouble(ctx any, value float64) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: value}}) +func (p *parserHelper) newLiteralDouble(ctx any, value float64) ast.Expr { + return p.newLiteral(ctx, types.Double(value)) } -func (p *parserHelper) newIdent(ctx any, name string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_IdentExpr{IdentExpr: &exprpb.Expr_Ident{Name: name}} - return exprNode +func (p *parserHelper) newIdent(ctx any, name string) ast.Expr { + return p.exprFactory.NewIdent(p.newID(ctx), name) } -func (p *parserHelper) newSelect(ctx any, operand *exprpb.Expr, field string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{Operand: operand, Field: field}} - return exprNode +func (p *parserHelper) newSelect(ctx any, operand ast.Expr, field string) ast.Expr { + return p.exprFactory.NewSelect(p.newID(ctx), operand, field) } -func (p *parserHelper) newPresenceTest(ctx any, operand *exprpb.Expr, field string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{Operand: operand, Field: field, TestOnly: true}} - return exprNode +func (p *parserHelper) newPresenceTest(ctx any, operand ast.Expr, field string) ast.Expr { + return p.exprFactory.NewPresenceTest(p.newID(ctx), operand, field) } -func (p *parserHelper) newGlobalCall(ctx any, function string, args ...*exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{Function: function, Args: args}} - return exprNode +func (p *parserHelper) newGlobalCall(ctx any, function string, args ...ast.Expr) ast.Expr { + return p.exprFactory.NewCall(p.newID(ctx), function, args...) } -func (p *parserHelper) newReceiverCall(ctx any, function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{Function: function, Target: target, Args: args}} - return exprNode +func (p *parserHelper) newReceiverCall(ctx any, function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return p.exprFactory.NewMemberCall(p.newID(ctx), function, target, args...) } -func (p *parserHelper) newList(ctx any, elements []*exprpb.Expr, optionals ...int32) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: elements, - OptionalIndices: optionals, - }} - return exprNode +func (p *parserHelper) newList(ctx any, elements []ast.Expr, optionals ...int32) ast.Expr { + return p.exprFactory.NewList(p.newID(ctx), elements, optionals) } -func (p *parserHelper) newMap(ctx any, entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{Entries: entries}} - return exprNode +func (p *parserHelper) newMap(ctx any, entries ...ast.EntryExpr) ast.Expr { + return p.exprFactory.NewMap(p.newID(ctx), entries) } -func (p *parserHelper) newMapEntry(entryID int64, key *exprpb.Expr, value *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return &exprpb.Expr_CreateStruct_Entry{ - Id: entryID, - KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{MapKey: key}, - Value: value, - OptionalEntry: optional, - } +func (p *parserHelper) newMapEntry(entryID int64, key ast.Expr, value ast.Expr, optional bool) ast.EntryExpr { + return p.exprFactory.NewMapEntry(entryID, key, value, optional) } -func (p *parserHelper) newObject(ctx any, typeName string, entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: typeName, - Entries: entries, - }, - } - return exprNode +func (p *parserHelper) newObject(ctx any, typeName string, fields ...ast.EntryExpr) ast.Expr { + return p.exprFactory.NewStruct(p.newID(ctx), typeName, fields) } -func (p *parserHelper) newObjectField(fieldID int64, field string, value *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return &exprpb.Expr_CreateStruct_Entry{ - Id: fieldID, - KeyKind: &exprpb.Expr_CreateStruct_Entry_FieldKey{FieldKey: field}, - Value: value, - OptionalEntry: optional, - } +func (p *parserHelper) newObjectField(fieldID int64, field string, value ast.Expr, optional bool) ast.EntryExpr { + return p.exprFactory.NewStructField(fieldID, field, value, optional) } -func (p *parserHelper) newComprehension(ctx any, iterVar string, - iterRange *exprpb.Expr, +func (p *parserHelper) newComprehension(ctx any, + iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ComprehensionExpr{ - ComprehensionExpr: &exprpb.Expr_Comprehension{ - AccuVar: accuVar, - AccuInit: accuInit, - IterVar: iterVar, - IterRange: iterRange, - LoopCondition: condition, - LoopStep: step, - Result: result}} - return exprNode -} - -func (p *parserHelper) newExpr(ctx any) *exprpb.Expr { - id, isID := ctx.(int64) - if isID { - return &exprpb.Expr{Id: id} + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr { + return p.exprFactory.NewComprehension( + p.newID(ctx), iterRange, iterVar, accuVar, accuInit, condition, step, result) +} + +func (p *parserHelper) newID(ctx any) int64 { + if id, isID := ctx.(int64); isID { + return id } - return &exprpb.Expr{Id: p.id(ctx)} + return p.id(ctx) +} + +func (p *parserHelper) newExpr(ctx any) ast.Expr { + return p.exprFactory.NewUnspecifiedExpr(p.newID(ctx)) } func (p *parserHelper) id(ctx any) int64 { - var location common.Location + var offset ast.OffsetRange switch c := ctx.(type) { case antlr.ParserRuleContext: - token := c.GetStart() - location = p.source.NewLocation(token.GetLine(), token.GetColumn()) + start, stop := c.GetStart(), c.GetStop() + if stop == nil { + stop = start + } + offset.Start = p.sourceInfo.ComputeOffset(int32(start.GetLine()), int32(start.GetColumn())) + offset.Stop = p.sourceInfo.ComputeOffset(int32(stop.GetLine()), int32(stop.GetColumn())) case antlr.Token: - token := c - location = p.source.NewLocation(token.GetLine(), token.GetColumn()) + offset.Start = p.sourceInfo.ComputeOffset(int32(c.GetLine()), int32(c.GetColumn())) + offset.Stop = offset.Start case common.Location: - location = c + offset.Start = p.sourceInfo.ComputeOffset(int32(c.Line()), int32(c.Column())) + offset.Stop = offset.Start + case ast.OffsetRange: + offset = c default: // This should only happen if the ctx is nil return -1 } id := p.nextID - p.positions[id], _ = p.source.LocationOffset(location) + p.sourceInfo.SetOffsetRange(id, offset) p.nextID++ return id } func (p *parserHelper) getLocation(id int64) common.Location { - characterOffset := p.positions[id] - location, _ := p.source.OffsetLocation(characterOffset) - return location + return p.sourceInfo.GetStartLocation(id) } // buildMacroCallArg iterates the expression and returns a new expression // where all macros have been replaced by their IDs in MacroCalls -func (p *parserHelper) buildMacroCallArg(expr *exprpb.Expr) *exprpb.Expr { - if _, found := p.macroCalls[expr.GetId()]; found { - return &exprpb.Expr{Id: expr.GetId()} +func (p *parserHelper) buildMacroCallArg(expr ast.Expr) ast.Expr { + if _, found := p.sourceInfo.GetMacroCall(expr.ID()); found { + return p.exprFactory.NewUnspecifiedExpr(expr.ID()) } - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: + switch expr.Kind() { + case ast.CallKind: // Iterate the AST from `expr` recursively looking for macros. Because we are at most // starting from the top level macro, this recursion is bounded by the size of the AST. This // means that the depth check on the AST during parsing will catch recursion overflows // before we get to here. - macroTarget := expr.GetCallExpr().GetTarget() - if macroTarget != nil { - macroTarget = p.buildMacroCallArg(macroTarget) - } - macroArgs := make([]*exprpb.Expr, len(expr.GetCallExpr().GetArgs())) - for index, arg := range expr.GetCallExpr().GetArgs() { + call := expr.AsCall() + macroArgs := make([]ast.Expr, len(call.Args())) + for index, arg := range call.Args() { macroArgs[index] = p.buildMacroCallArg(arg) } - return &exprpb.Expr{ - Id: expr.GetId(), - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Target: macroTarget, - Function: expr.GetCallExpr().GetFunction(), - Args: macroArgs, - }, - }, + if !call.IsMemberFunction() { + return p.exprFactory.NewCall(expr.ID(), call.FunctionName(), macroArgs...) } - case *exprpb.Expr_ListExpr: - listExpr := expr.GetListExpr() - macroListArgs := make([]*exprpb.Expr, len(listExpr.GetElements())) - for i, elem := range listExpr.GetElements() { + macroTarget := p.buildMacroCallArg(call.Target()) + return p.exprFactory.NewMemberCall(expr.ID(), call.FunctionName(), macroTarget, macroArgs...) + case ast.ListKind: + list := expr.AsList() + macroListArgs := make([]ast.Expr, list.Size()) + for i, elem := range list.Elements() { macroListArgs[i] = p.buildMacroCallArg(elem) } - return &exprpb.Expr{ - Id: expr.GetId(), - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: macroListArgs, - OptionalIndices: listExpr.GetOptionalIndices(), - }, - }, - } + return p.exprFactory.NewList(expr.ID(), macroListArgs, list.OptionalIndices()) } - return expr } // addMacroCall adds the macro the the MacroCalls map in source info. If a macro has args/subargs/target // that are macros, their ID will be stored instead for later self-lookups. -func (p *parserHelper) addMacroCall(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) { - macroTarget := target - if target != nil { - if _, found := p.macroCalls[target.GetId()]; found { - macroTarget = &exprpb.Expr{Id: target.GetId()} - } else { - macroTarget = p.buildMacroCallArg(target) - } - } - - macroArgs := make([]*exprpb.Expr, len(args)) +func (p *parserHelper) addMacroCall(exprID int64, function string, target ast.Expr, args ...ast.Expr) { + macroArgs := make([]ast.Expr, len(args)) for index, arg := range args { macroArgs[index] = p.buildMacroCallArg(arg) } - - p.macroCalls[exprID] = &exprpb.Expr{ - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Target: macroTarget, - Function: function, - Args: macroArgs, - }, - }, + if target == nil { + p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewCall(0, function, macroArgs...)) + return } + macroTarget := target + if _, found := p.sourceInfo.GetMacroCall(target.ID()); found { + macroTarget = p.exprFactory.NewUnspecifiedExpr(target.ID()) + } else { + macroTarget = p.buildMacroCallArg(target) + } + p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewMemberCall(0, function, macroTarget, macroArgs...)) } // logicManager compacts logical trees into a more efficient structure which is semantically @@ -309,71 +234,71 @@ func (p *parserHelper) addMacroCall(exprID int64, function string, target *exprp // controversial choice as it alters the traditional order of execution assumptions present in most // expressions. type logicManager struct { - helper *parserHelper + exprFactory ast.ExprFactory function string - terms []*exprpb.Expr + terms []ast.Expr ops []int64 variadicASTs bool } // newVariadicLogicManager creates a logic manager instance bound to a specific function and its first term. -func newVariadicLogicManager(h *parserHelper, function string, term *exprpb.Expr) *logicManager { +func newVariadicLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager { return &logicManager{ - helper: h, + exprFactory: fac, function: function, - terms: []*exprpb.Expr{term}, + terms: []ast.Expr{term}, ops: []int64{}, variadicASTs: true, } } // newBalancingLogicManager creates a logic manager instance bound to a specific function and its first term. -func newBalancingLogicManager(h *parserHelper, function string, term *exprpb.Expr) *logicManager { +func newBalancingLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager { return &logicManager{ - helper: h, + exprFactory: fac, function: function, - terms: []*exprpb.Expr{term}, + terms: []ast.Expr{term}, ops: []int64{}, variadicASTs: false, } } // addTerm adds an operation identifier and term to the set of terms to be balanced. -func (l *logicManager) addTerm(op int64, term *exprpb.Expr) { +func (l *logicManager) addTerm(op int64, term ast.Expr) { l.terms = append(l.terms, term) l.ops = append(l.ops, op) } // toExpr renders the logic graph into an Expr value, either balancing a tree of logical // operations or creating a variadic representation of the logical operator. -func (l *logicManager) toExpr() *exprpb.Expr { +func (l *logicManager) toExpr() ast.Expr { if len(l.terms) == 1 { return l.terms[0] } if l.variadicASTs { - return l.helper.newGlobalCall(l.ops[0], l.function, l.terms...) + return l.exprFactory.NewCall(l.ops[0], l.function, l.terms...) } return l.balancedTree(0, len(l.ops)-1) } // balancedTree recursively balances the terms provided to a commutative operator. -func (l *logicManager) balancedTree(lo, hi int) *exprpb.Expr { +func (l *logicManager) balancedTree(lo, hi int) ast.Expr { mid := (lo + hi + 1) / 2 - var left *exprpb.Expr + var left ast.Expr if mid == lo { left = l.terms[mid] } else { left = l.balancedTree(lo, mid-1) } - var right *exprpb.Expr + var right ast.Expr if mid == hi { right = l.terms[mid+1] } else { right = l.balancedTree(mid+1, hi) } - return l.helper.newGlobalCall(l.ops[mid], l.function, left, right) + return l.exprFactory.NewCall(l.ops[mid], l.function, left, right) } type exprHelper struct { @@ -387,202 +312,151 @@ func (e *exprHelper) nextMacroID() int64 { // Copy implements the ExprHelper interface method by producing a copy of the input Expr value // with a fresh set of numeric identifiers the Expr and all its descendants. -func (e *exprHelper) Copy(expr *exprpb.Expr) *exprpb.Expr { - copy := e.parserHelper.newExpr(e.parserHelper.getLocation(expr.GetId())) - switch expr.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - copy.ExprKind = &exprpb.Expr_ConstExpr{ConstExpr: expr.GetConstExpr()} - case *exprpb.Expr_IdentExpr: - copy.ExprKind = &exprpb.Expr_IdentExpr{IdentExpr: expr.GetIdentExpr()} - case *exprpb.Expr_SelectExpr: - op := expr.GetSelectExpr().GetOperand() - copy.ExprKind = &exprpb.Expr_SelectExpr{SelectExpr: &exprpb.Expr_Select{ - Operand: e.Copy(op), - Field: expr.GetSelectExpr().GetField(), - TestOnly: expr.GetSelectExpr().GetTestOnly(), - }} - case *exprpb.Expr_CallExpr: - call := expr.GetCallExpr() - target := call.GetTarget() - if target != nil { - target = e.Copy(target) +func (e *exprHelper) Copy(expr ast.Expr) ast.Expr { + offsetRange, _ := e.parserHelper.sourceInfo.GetOffsetRange(expr.ID()) + copyID := e.parserHelper.newID(offsetRange) + switch expr.Kind() { + case ast.LiteralKind: + return e.exprFactory.NewLiteral(copyID, expr.AsLiteral()) + case ast.IdentKind: + return e.exprFactory.NewIdent(copyID, expr.AsIdent()) + case ast.SelectKind: + sel := expr.AsSelect() + op := e.Copy(sel.Operand()) + if sel.IsTestOnly() { + return e.exprFactory.NewPresenceTest(copyID, op, sel.FieldName()) } - args := call.GetArgs() - argsCopy := make([]*exprpb.Expr, len(args)) + return e.exprFactory.NewSelect(copyID, op, sel.FieldName()) + case ast.CallKind: + call := expr.AsCall() + args := call.Args() + argsCopy := make([]ast.Expr, len(args)) for i, arg := range args { argsCopy[i] = e.Copy(arg) } - copy.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Function: call.GetFunction(), - Target: target, - Args: argsCopy, - }, + if !call.IsMemberFunction() { + return e.exprFactory.NewCall(copyID, call.FunctionName(), argsCopy...) } - case *exprpb.Expr_ListExpr: - elems := expr.GetListExpr().GetElements() - elemsCopy := make([]*exprpb.Expr, len(elems)) + return e.exprFactory.NewMemberCall(copyID, call.FunctionName(), e.Copy(call.Target()), argsCopy...) + case ast.ListKind: + list := expr.AsList() + elems := list.Elements() + elemsCopy := make([]ast.Expr, len(elems)) for i, elem := range elems { elemsCopy[i] = e.Copy(elem) } - copy.ExprKind = &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{Elements: elemsCopy}, - } - case *exprpb.Expr_StructExpr: - entries := expr.GetStructExpr().GetEntries() - entriesCopy := make([]*exprpb.Expr_CreateStruct_Entry, len(entries)) - for i, entry := range entries { - entryCopy := &exprpb.Expr_CreateStruct_Entry{} - entryCopy.Id = e.nextMacroID() - switch entry.GetKeyKind().(type) { - case *exprpb.Expr_CreateStruct_Entry_FieldKey: - entryCopy.KeyKind = &exprpb.Expr_CreateStruct_Entry_FieldKey{ - FieldKey: entry.GetFieldKey(), - } - case *exprpb.Expr_CreateStruct_Entry_MapKey: - entryCopy.KeyKind = &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: e.Copy(entry.GetMapKey()), - } - } - entryCopy.Value = e.Copy(entry.GetValue()) - entriesCopy[i] = entryCopy + return e.exprFactory.NewList(copyID, elemsCopy, list.OptionalIndices()) + case ast.MapKind: + m := expr.AsMap() + entries := m.Entries() + entriesCopy := make([]ast.EntryExpr, len(entries)) + for i, en := range entries { + entry := en.AsMapEntry() + entryID := e.nextMacroID() + entriesCopy[i] = e.exprFactory.NewMapEntry(entryID, + e.Copy(entry.Key()), e.Copy(entry.Value()), entry.IsOptional()) } - copy.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: expr.GetStructExpr().GetMessageName(), - Entries: entriesCopy, - }, - } - case *exprpb.Expr_ComprehensionExpr: - iterRange := e.Copy(expr.GetComprehensionExpr().GetIterRange()) - accuInit := e.Copy(expr.GetComprehensionExpr().GetAccuInit()) - cond := e.Copy(expr.GetComprehensionExpr().GetLoopCondition()) - step := e.Copy(expr.GetComprehensionExpr().GetLoopStep()) - result := e.Copy(expr.GetComprehensionExpr().GetResult()) - copy.ExprKind = &exprpb.Expr_ComprehensionExpr{ - ComprehensionExpr: &exprpb.Expr_Comprehension{ - IterRange: iterRange, - IterVar: expr.GetComprehensionExpr().GetIterVar(), - AccuInit: accuInit, - AccuVar: expr.GetComprehensionExpr().GetAccuVar(), - LoopCondition: cond, - LoopStep: step, - Result: result, - }, + return e.exprFactory.NewMap(copyID, entriesCopy) + case ast.StructKind: + s := expr.AsStruct() + fields := s.Fields() + fieldsCopy := make([]ast.EntryExpr, len(fields)) + for i, f := range fields { + field := f.AsStructField() + fieldID := e.nextMacroID() + fieldsCopy[i] = e.exprFactory.NewStructField(fieldID, + field.Name(), e.Copy(field.Value()), field.IsOptional()) } + return e.exprFactory.NewStruct(copyID, s.TypeName(), fieldsCopy) + case ast.ComprehensionKind: + compre := expr.AsComprehension() + iterRange := e.Copy(compre.IterRange()) + accuInit := e.Copy(compre.AccuInit()) + cond := e.Copy(compre.LoopCondition()) + step := e.Copy(compre.LoopStep()) + result := e.Copy(compre.Result()) + return e.exprFactory.NewComprehension(copyID, + iterRange, compre.IterVar(), compre.AccuVar(), accuInit, cond, step, result) } - return copy -} - -// LiteralBool implements the ExprHelper interface method. -func (e *exprHelper) LiteralBool(value bool) *exprpb.Expr { - return e.parserHelper.newLiteralBool(e.nextMacroID(), value) -} - -// LiteralBytes implements the ExprHelper interface method. -func (e *exprHelper) LiteralBytes(value []byte) *exprpb.Expr { - return e.parserHelper.newLiteralBytes(e.nextMacroID(), value) -} - -// LiteralDouble implements the ExprHelper interface method. -func (e *exprHelper) LiteralDouble(value float64) *exprpb.Expr { - return e.parserHelper.newLiteralDouble(e.nextMacroID(), value) -} - -// LiteralInt implements the ExprHelper interface method. -func (e *exprHelper) LiteralInt(value int64) *exprpb.Expr { - return e.parserHelper.newLiteralInt(e.nextMacroID(), value) + return e.exprFactory.NewUnspecifiedExpr(copyID) } -// LiteralString implements the ExprHelper interface method. -func (e *exprHelper) LiteralString(value string) *exprpb.Expr { - return e.parserHelper.newLiteralString(e.nextMacroID(), value) -} - -// LiteralUint implements the ExprHelper interface method. -func (e *exprHelper) LiteralUint(value uint64) *exprpb.Expr { - return e.parserHelper.newLiteralUint(e.nextMacroID(), value) +// NewLiteral implements the ExprHelper interface method. +func (e *exprHelper) NewLiteral(value ref.Val) ast.Expr { + return e.exprFactory.NewLiteral(e.nextMacroID(), value) } // NewList implements the ExprHelper interface method. -func (e *exprHelper) NewList(elems ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newList(e.nextMacroID(), elems) +func (e *exprHelper) NewList(elems ...ast.Expr) ast.Expr { + return e.exprFactory.NewList(e.nextMacroID(), elems, []int32{}) } // NewMap implements the ExprHelper interface method. -func (e *exprHelper) NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - return e.parserHelper.newMap(e.nextMacroID(), entries...) +func (e *exprHelper) NewMap(entries ...ast.EntryExpr) ast.Expr { + return e.exprFactory.NewMap(e.nextMacroID(), entries) } // NewMapEntry implements the ExprHelper interface method. -func (e *exprHelper) NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return e.parserHelper.newMapEntry(e.nextMacroID(), key, val, optional) +func (e *exprHelper) NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr { + return e.exprFactory.NewMapEntry(e.nextMacroID(), key, val, optional) } -// NewObject implements the ExprHelper interface method. -func (e *exprHelper) NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - return e.parserHelper.newObject(e.nextMacroID(), typeName, fieldInits...) +// NewStruct implements the ExprHelper interface method. +func (e *exprHelper) NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr { + return e.exprFactory.NewStruct(e.nextMacroID(), typeName, fieldInits) } -// NewObjectFieldInit implements the ExprHelper interface method. -func (e *exprHelper) NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return e.parserHelper.newObjectField(e.nextMacroID(), field, init, optional) +// NewStructField implements the ExprHelper interface method. +func (e *exprHelper) NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr { + return e.exprFactory.NewStructField(e.nextMacroID(), field, init, optional) } -// Fold implements the ExprHelper interface method. -func (e *exprHelper) Fold(iterVar string, - iterRange *exprpb.Expr, +// NewComprehension implements the ExprHelper interface method. +func (e *exprHelper) NewComprehension( + iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newComprehension( - e.nextMacroID(), iterVar, iterRange, accuVar, accuInit, condition, step, result) + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr { + return e.exprFactory.NewComprehension( + e.nextMacroID(), iterRange, iterVar, accuVar, accuInit, condition, step, result) } -// Ident implements the ExprHelper interface method. -func (e *exprHelper) Ident(name string) *exprpb.Expr { - return e.parserHelper.newIdent(e.nextMacroID(), name) +// NewIdent implements the ExprHelper interface method. +func (e *exprHelper) NewIdent(name string) ast.Expr { + return e.exprFactory.NewIdent(e.nextMacroID(), name) } -// AccuIdent implements the ExprHelper interface method. -func (e *exprHelper) AccuIdent() *exprpb.Expr { - return e.parserHelper.newIdent(e.nextMacroID(), AccumulatorName) +// NewAccuIdent implements the ExprHelper interface method. +func (e *exprHelper) NewAccuIdent() ast.Expr { + return e.exprFactory.NewAccuIdent(e.nextMacroID()) } -// GlobalCall implements the ExprHelper interface method. -func (e *exprHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newGlobalCall(e.nextMacroID(), function, args...) +// NewGlobalCall implements the ExprHelper interface method. +func (e *exprHelper) NewCall(function string, args ...ast.Expr) ast.Expr { + return e.exprFactory.NewCall(e.nextMacroID(), function, args...) } -// ReceiverCall implements the ExprHelper interface method. -func (e *exprHelper) ReceiverCall(function string, - target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newReceiverCall(e.nextMacroID(), function, target, args...) +// NewMemberCall implements the ExprHelper interface method. +func (e *exprHelper) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return e.exprFactory.NewMemberCall(e.nextMacroID(), function, target, args...) } -// PresenceTest implements the ExprHelper interface method. -func (e *exprHelper) PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr { - return e.parserHelper.newPresenceTest(e.nextMacroID(), operand, field) +// NewPresenceTest implements the ExprHelper interface method. +func (e *exprHelper) NewPresenceTest(operand ast.Expr, field string) ast.Expr { + return e.exprFactory.NewPresenceTest(e.nextMacroID(), operand, field) } -// Select implements the ExprHelper interface method. -func (e *exprHelper) Select(operand *exprpb.Expr, field string) *exprpb.Expr { - return e.parserHelper.newSelect(e.nextMacroID(), operand, field) +// NewSelect implements the ExprHelper interface method. +func (e *exprHelper) NewSelect(operand ast.Expr, field string) ast.Expr { + return e.exprFactory.NewSelect(e.nextMacroID(), operand, field) } // OffsetLocation implements the ExprHelper interface method. func (e *exprHelper) OffsetLocation(exprID int64) common.Location { - offset, found := e.parserHelper.positions[exprID] - if !found { - return common.NoLocation - } - location, found := e.parserHelper.source.OffsetLocation(offset) - if !found { - return common.NoLocation - } - return location + return e.parserHelper.sourceInfo.GetStartLocation(exprID) } // NewError associates an error message with a given expression id, populating the source offset location of the error if possible. diff --git a/vendor/github.com/google/cel-go/parser/macro.go b/vendor/github.com/google/cel-go/parser/macro.go index 6066e8ef4..5b1775bed 100644 --- a/vendor/github.com/google/cel-go/parser/macro.go +++ b/vendor/github.com/google/cel-go/parser/macro.go @@ -18,9 +18,10 @@ import ( "fmt" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) // NewGlobalMacro creates a Macro for a global function with the specified arg count. @@ -142,58 +143,38 @@ func makeVarArgMacroKey(name string, receiverStyle bool) string { // and produces as output an Expr ast node. // // Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil. -type MacroExpander func(eh ExprHelper, - target *exprpb.Expr, - args []*exprpb.Expr) (*exprpb.Expr, *common.Error) +type MacroExpander func(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) -// ExprHelper assists with the manipulation of proto-based Expr values in a manner which is -// consistent with the source position and expression id generation code leveraged by both -// the parser and type-checker. +// ExprHelper assists with the creation of Expr values in a manner which is consistent +// the internal semantics and id generation behaviors of the parser and checker libraries. type ExprHelper interface { // Copy the input expression with a brand new set of identifiers. - Copy(*exprpb.Expr) *exprpb.Expr - - // LiteralBool creates an Expr value for a bool literal. - LiteralBool(value bool) *exprpb.Expr - - // LiteralBytes creates an Expr value for a byte literal. - LiteralBytes(value []byte) *exprpb.Expr - - // LiteralDouble creates an Expr value for double literal. - LiteralDouble(value float64) *exprpb.Expr + Copy(ast.Expr) ast.Expr - // LiteralInt creates an Expr value for an int literal. - LiteralInt(value int64) *exprpb.Expr + // Literal creates an Expr value for a scalar literal value. + NewLiteral(value ref.Val) ast.Expr - // LiteralString creates am Expr value for a string literal. - LiteralString(value string) *exprpb.Expr - - // LiteralUint creates an Expr value for a uint literal. - LiteralUint(value uint64) *exprpb.Expr - - // NewList creates a CreateList instruction where the list is comprised of the optional set - // of elements provided as arguments. - NewList(elems ...*exprpb.Expr) *exprpb.Expr + // NewList creates a list literal instruction with an optional set of elements. + NewList(elems ...ast.Expr) ast.Expr // NewMap creates a CreateStruct instruction for a map where the map is comprised of the // optional set of key, value entries. - NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + NewMap(entries ...ast.EntryExpr) ast.Expr // NewMapEntry creates a Map Entry for the key, value pair. - NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr - // NewObject creates a CreateStruct instruction for an object with a given type name and - // optional set of field initializers. - NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + // NewStruct creates a struct literal expression with an optional set of field initializers. + NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr - // NewObjectFieldInit creates a new Object field initializer from the field name and value. - NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + // NewStructField creates a new struct field initializer from the field name and value. + NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr - // Fold creates a fold comprehension instruction. + // NewComprehension creates a new comprehension instruction. // - // - iterVar is the iteration variable name. // - iterRange represents the expression that resolves to a list or map where the elements or // keys (respectively) will be iterated over. + // - iterVar is the iteration variable name. // - accuVar is the accumulation variable name, typically parser.AccumulatorName. // - accuInit is the initial expression whose value will be set for the accuVar prior to // folding. @@ -204,31 +185,31 @@ type ExprHelper interface { // The accuVar should not shadow variable names that you would like to reference within the // environment in the step and condition expressions. Presently, the name __result__ is commonly // used by built-in macros but this may change in the future. - Fold(iterVar string, - iterRange *exprpb.Expr, + NewComprehension(iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr - // Ident creates an identifier Expr value. - Ident(name string) *exprpb.Expr + // NewIdent creates an identifier Expr value. + NewIdent(name string) ast.Expr - // AccuIdent returns an accumulator identifier for use with comprehension results. - AccuIdent() *exprpb.Expr + // NewAccuIdent returns an accumulator identifier for use with comprehension results. + NewAccuIdent() ast.Expr - // GlobalCall creates a function call Expr value for a global (free) function. - GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr + // NewCall creates a function call Expr value for a global (free) function. + NewCall(function string, args ...ast.Expr) ast.Expr - // ReceiverCall creates a function call Expr value for a receiver-style function. - ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr + // NewMemberCall creates a function call Expr value for a receiver-style function. + NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr - // PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. - PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr + // NewPresenceTest creates a Select TestOnly Expr value for modelling has() semantics. + NewPresenceTest(operand ast.Expr, field string) ast.Expr - // Select create a field traversal Expr value. - Select(operand *exprpb.Expr, field string) *exprpb.Expr + // NewSelect create a field traversal Expr value. + NewSelect(operand ast.Expr, field string) ast.Expr // OffsetLocation returns the Location of the expression identifier. OffsetLocation(exprID int64) common.Location @@ -296,21 +277,21 @@ const ( // MakeAll expands the input call arguments into a comprehension that returns true if all of the // elements in the range match the predicate expressions: // .all(, ) -func MakeAll(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeAll(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierAll, eh, target, args) } // MakeExists expands the input call arguments into a comprehension that returns true if any of the // elements in the range match the predicate expressions: // .exists(, ) -func MakeExists(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeExists(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierExists, eh, target, args) } // MakeExistsOne expands the input call arguments into a comprehension that returns true if exactly // one of the elements in the range match the predicate expressions: // .exists_one(, ) -func MakeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeExistsOne(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierExistsOne, eh, target, args) } @@ -324,14 +305,14 @@ func MakeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*ex // // In the second form only iterVar values which return true when provided to the predicate expression // are transformed. -func MakeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeMap(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - return nil, eh.NewError(args[0].GetId(), "argument is not an identifier") + return nil, eh.NewError(args[0].ID(), "argument is not an identifier") } - var fn *exprpb.Expr - var filter *exprpb.Expr + var fn ast.Expr + var filter ast.Expr if len(args) == 3 { filter = args[1] @@ -341,84 +322,85 @@ func MakeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.E fn = args[1] } - accuExpr := eh.Ident(AccumulatorName) + accuExpr := eh.NewAccuIdent() init := eh.NewList() - condition := eh.LiteralBool(true) - step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(fn)) + condition := eh.NewLiteral(types.True) + step := eh.NewCall(operators.Add, accuExpr, eh.NewList(fn)) if filter != nil { - step = eh.GlobalCall(operators.Conditional, filter, step, accuExpr) + step = eh.NewCall(operators.Conditional, filter, step, accuExpr) } - return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil } // MakeFilter expands the input call arguments into a comprehension which produces a list which contains // only elements which match the provided predicate expression: // .filter(, ) -func MakeFilter(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeFilter(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - return nil, eh.NewError(args[0].GetId(), "argument is not an identifier") + return nil, eh.NewError(args[0].ID(), "argument is not an identifier") } filter := args[1] - accuExpr := eh.Ident(AccumulatorName) + accuExpr := eh.NewAccuIdent() init := eh.NewList() - condition := eh.LiteralBool(true) - step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(args[0])) - step = eh.GlobalCall(operators.Conditional, filter, step, accuExpr) - return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil + condition := eh.NewLiteral(types.True) + step := eh.NewCall(operators.Add, accuExpr, eh.NewList(args[0])) + step = eh.NewCall(operators.Conditional, filter, step, accuExpr) + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil } // MakeHas expands the input call arguments into a presence test, e.g. has(.field) -func MakeHas(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - if s, ok := args[0].ExprKind.(*exprpb.Expr_SelectExpr); ok { - return eh.PresenceTest(s.SelectExpr.GetOperand(), s.SelectExpr.GetField()), nil +func MakeHas(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { + if args[0].Kind() == ast.SelectKind { + s := args[0].AsSelect() + return eh.NewPresenceTest(s.Operand(), s.FieldName()), nil } - return nil, eh.NewError(args[0].GetId(), "invalid argument to has() macro") + return nil, eh.NewError(args[0].ID(), "invalid argument to has() macro") } -func makeQuantifier(kind quantifierKind, eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func makeQuantifier(kind quantifierKind, eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - return nil, eh.NewError(args[0].GetId(), "argument must be a simple name") + return nil, eh.NewError(args[0].ID(), "argument must be a simple name") } - var init *exprpb.Expr - var condition *exprpb.Expr - var step *exprpb.Expr - var result *exprpb.Expr + var init ast.Expr + var condition ast.Expr + var step ast.Expr + var result ast.Expr switch kind { case quantifierAll: - init = eh.LiteralBool(true) - condition = eh.GlobalCall(operators.NotStrictlyFalse, eh.AccuIdent()) - step = eh.GlobalCall(operators.LogicalAnd, eh.AccuIdent(), args[1]) - result = eh.AccuIdent() + init = eh.NewLiteral(types.True) + condition = eh.NewCall(operators.NotStrictlyFalse, eh.NewAccuIdent()) + step = eh.NewCall(operators.LogicalAnd, eh.NewAccuIdent(), args[1]) + result = eh.NewAccuIdent() case quantifierExists: - init = eh.LiteralBool(false) - condition = eh.GlobalCall( + init = eh.NewLiteral(types.False) + condition = eh.NewCall( operators.NotStrictlyFalse, - eh.GlobalCall(operators.LogicalNot, eh.AccuIdent())) - step = eh.GlobalCall(operators.LogicalOr, eh.AccuIdent(), args[1]) - result = eh.AccuIdent() + eh.NewCall(operators.LogicalNot, eh.NewAccuIdent())) + step = eh.NewCall(operators.LogicalOr, eh.NewAccuIdent(), args[1]) + result = eh.NewAccuIdent() case quantifierExistsOne: - zeroExpr := eh.LiteralInt(0) - oneExpr := eh.LiteralInt(1) + zeroExpr := eh.NewLiteral(types.Int(0)) + oneExpr := eh.NewLiteral(types.Int(1)) init = zeroExpr - condition = eh.LiteralBool(true) - step = eh.GlobalCall(operators.Conditional, args[1], - eh.GlobalCall(operators.Add, eh.AccuIdent(), oneExpr), eh.AccuIdent()) - result = eh.GlobalCall(operators.Equals, eh.AccuIdent(), oneExpr) + condition = eh.NewLiteral(types.True) + step = eh.NewCall(operators.Conditional, args[1], + eh.NewCall(operators.Add, eh.NewAccuIdent(), oneExpr), eh.NewAccuIdent()) + result = eh.NewCall(operators.Equals, eh.NewAccuIdent(), oneExpr) default: - return nil, eh.NewError(args[0].GetId(), fmt.Sprintf("unrecognized quantifier '%v'", kind)) + return nil, eh.NewError(args[0].ID(), fmt.Sprintf("unrecognized quantifier '%v'", kind)) } - return eh.Fold(v, target, AccumulatorName, init, condition, step, result), nil + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, result), nil } -func extractIdent(e *exprpb.Expr) (string, bool) { - switch e.ExprKind.(type) { - case *exprpb.Expr_IdentExpr: - return e.GetIdentExpr().GetName(), true +func extractIdent(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.IdentKind: + return e.AsIdent(), true } return "", false } diff --git a/vendor/github.com/google/cel-go/parser/parser.go b/vendor/github.com/google/cel-go/parser/parser.go index 109326a93..1363d1c09 100644 --- a/vendor/github.com/google/cel-go/parser/parser.go +++ b/vendor/github.com/google/cel-go/parser/parser.go @@ -21,17 +21,15 @@ import ( "regexp" "strconv" "strings" - "sync" antlr "github.com/antlr/antlr4/runtime/Go/antlr/v4" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/runes" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser/gen" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" - structpb "google.golang.org/protobuf/types/known/structpb" ) // Parser encapsulates the context necessary to perform parsing for different expressions. @@ -88,11 +86,13 @@ func mustNewParser(opts ...Option) *Parser { } // Parse parses the expression represented by source and returns the result. -func (p *Parser) Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors) { +func (p *Parser) Parse(source common.Source) (*ast.AST, *common.Errors) { errs := common.NewErrors(source) + fac := ast.NewExprFactory() impl := parser{ errors: &parseErrors{errs}, - helper: newParserHelper(source), + exprFactory: fac, + helper: newParserHelper(source, fac), macros: p.macros, maxRecursionDepth: p.maxRecursionDepth, errorReportingLimit: p.errorReportingLimit, @@ -106,18 +106,15 @@ func (p *Parser) Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors if !ok { buf = runes.NewBuffer(source.Content()) } - var e *exprpb.Expr + var out ast.Expr if buf.Len() > p.expressionSizeCodePointLimit { - e = impl.reportError(common.NoLocation, + out = impl.reportError(common.NoLocation, "expression code point size exceeds limit: size: %d, limit %d", buf.Len(), p.expressionSizeCodePointLimit) } else { - e = impl.parse(buf, source.Description()) + out = impl.parse(buf, source.Description()) } - return &exprpb.ParsedExpr{ - Expr: e, - SourceInfo: impl.helper.getSourceInfo(), - }, errs + return ast.NewAST(out, impl.helper.getSourceInfo()), errs } // reservedIds are not legal to use as variables. We exclude them post-parse, as they *are* valid @@ -150,7 +147,7 @@ var reservedIds = map[string]struct{}{ // This function calls ParseWithMacros with AllMacros. // // Deprecated: Use NewParser().Parse() instead. -func Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors) { +func Parse(source common.Source) (*ast.AST, *common.Errors) { return mustNewParser(Macros(AllMacros...)).Parse(source) } @@ -287,6 +284,7 @@ var _ antlr.ErrorStrategy = &recoveryLimitErrorStrategy{} type parser struct { gen.BaseCELVisitor errors *parseErrors + exprFactory ast.ExprFactory helper *parserHelper macros map[string]Macro recursionDepth int @@ -300,53 +298,21 @@ type parser struct { enableVariadicOperatorASTs bool } -var ( - _ gen.CELVisitor = (*parser)(nil) - - lexerPool *sync.Pool = &sync.Pool{ - New: func() any { - l := gen.NewCELLexer(nil) - l.RemoveErrorListeners() - return l - }, - } +var _ gen.CELVisitor = (*parser)(nil) - parserPool *sync.Pool = &sync.Pool{ - New: func() any { - p := gen.NewCELParser(nil) - p.RemoveErrorListeners() - return p - }, - } -) +func (p *parser) parse(expr runes.Buffer, desc string) ast.Expr { + lexer := gen.NewCELLexer(newCharStream(expr, desc)) + lexer.RemoveErrorListeners() + lexer.AddErrorListener(p) -func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr { - // TODO: get rid of these pools once https://github.com/antlr/antlr4/pull/3571 is in a release - lexer := lexerPool.Get().(*gen.CELLexer) - prsr := parserPool.Get().(*gen.CELParser) + prsr := gen.NewCELParser(antlr.NewCommonTokenStream(lexer, 0)) + prsr.RemoveErrorListeners() prsrListener := &recursionListener{ maxDepth: p.maxRecursionDepth, ruleTypeDepth: map[int]*int{}, } - defer func() { - // Unfortunately ANTLR Go runtime is missing (*antlr.BaseParser).RemoveParseListeners, - // so this is good enough until that is exported. - // Reset the lexer and parser before putting them back in the pool. - lexer.RemoveErrorListeners() - prsr.RemoveParseListener(prsrListener) - prsr.RemoveErrorListeners() - lexer.SetInputStream(nil) - prsr.SetInputStream(nil) - lexerPool.Put(lexer) - parserPool.Put(prsr) - }() - - lexer.SetInputStream(newCharStream(expr, desc)) - prsr.SetInputStream(antlr.NewCommonTokenStream(lexer, 0)) - - lexer.AddErrorListener(p) prsr.AddErrorListener(p) prsr.AddParseListener(prsrListener) @@ -373,7 +339,7 @@ func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr { } }() - return p.Visit(prsr.Start()).(*exprpb.Expr) + return p.Visit(prsr.Start()).(ast.Expr) } // Visitor implementations. @@ -470,26 +436,26 @@ func (p *parser) VisitStart(ctx *gen.StartContext) any { // Visit a parse tree produced by CELParser#expr. func (p *parser) VisitExpr(ctx *gen.ExprContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) + result := p.Visit(ctx.GetE()).(ast.Expr) if ctx.GetOp() == nil { return result } opID := p.helper.id(ctx.GetOp()) - ifTrue := p.Visit(ctx.GetE1()).(*exprpb.Expr) - ifFalse := p.Visit(ctx.GetE2()).(*exprpb.Expr) + ifTrue := p.Visit(ctx.GetE1()).(ast.Expr) + ifFalse := p.Visit(ctx.GetE2()).(ast.Expr) return p.globalCallOrMacro(opID, operators.Conditional, result, ifTrue, ifFalse) } // Visit a parse tree produced by CELParser#conditionalOr. func (p *parser) VisitConditionalOr(ctx *gen.ConditionalOrContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) + result := p.Visit(ctx.GetE()).(ast.Expr) l := p.newLogicManager(operators.LogicalOr, result) rest := ctx.GetE1() for i, op := range ctx.GetOps() { if i >= len(rest) { return p.reportError(ctx, "unexpected character, wanted '||'") } - next := p.Visit(rest[i]).(*exprpb.Expr) + next := p.Visit(rest[i]).(ast.Expr) opID := p.helper.id(op) l.addTerm(opID, next) } @@ -498,14 +464,14 @@ func (p *parser) VisitConditionalOr(ctx *gen.ConditionalOrContext) any { // Visit a parse tree produced by CELParser#conditionalAnd. func (p *parser) VisitConditionalAnd(ctx *gen.ConditionalAndContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) + result := p.Visit(ctx.GetE()).(ast.Expr) l := p.newLogicManager(operators.LogicalAnd, result) rest := ctx.GetE1() for i, op := range ctx.GetOps() { if i >= len(rest) { return p.reportError(ctx, "unexpected character, wanted '&&'") } - next := p.Visit(rest[i]).(*exprpb.Expr) + next := p.Visit(rest[i]).(ast.Expr) opID := p.helper.id(op) l.addTerm(opID, next) } @@ -519,9 +485,9 @@ func (p *parser) VisitRelation(ctx *gen.RelationContext) any { opText = ctx.GetOp().GetText() } if op, found := operators.Find(opText); found { - lhs := p.Visit(ctx.Relation(0)).(*exprpb.Expr) + lhs := p.Visit(ctx.Relation(0)).(ast.Expr) opID := p.helper.id(ctx.GetOp()) - rhs := p.Visit(ctx.Relation(1)).(*exprpb.Expr) + rhs := p.Visit(ctx.Relation(1)).(ast.Expr) return p.globalCallOrMacro(opID, op, lhs, rhs) } return p.reportError(ctx, "operator not found") @@ -534,9 +500,9 @@ func (p *parser) VisitCalc(ctx *gen.CalcContext) any { opText = ctx.GetOp().GetText() } if op, found := operators.Find(opText); found { - lhs := p.Visit(ctx.Calc(0)).(*exprpb.Expr) + lhs := p.Visit(ctx.Calc(0)).(ast.Expr) opID := p.helper.id(ctx.GetOp()) - rhs := p.Visit(ctx.Calc(1)).(*exprpb.Expr) + rhs := p.Visit(ctx.Calc(1)).(ast.Expr) return p.globalCallOrMacro(opID, op, lhs, rhs) } return p.reportError(ctx, "operator not found") @@ -552,7 +518,7 @@ func (p *parser) VisitLogicalNot(ctx *gen.LogicalNotContext) any { return p.Visit(ctx.Member()) } opID := p.helper.id(ctx.GetOps()[0]) - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) return p.globalCallOrMacro(opID, operators.LogicalNot, target) } @@ -561,13 +527,13 @@ func (p *parser) VisitNegate(ctx *gen.NegateContext) any { return p.Visit(ctx.Member()) } opID := p.helper.id(ctx.GetOps()[0]) - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) return p.globalCallOrMacro(opID, operators.Negate, target) } // VisitSelect visits a parse tree produced by CELParser#Select. func (p *parser) VisitSelect(ctx *gen.SelectContext) any { - operand := p.Visit(ctx.Member()).(*exprpb.Expr) + operand := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetId() == nil || ctx.GetOp() == nil { return p.helper.newExpr(ctx) @@ -588,7 +554,7 @@ func (p *parser) VisitSelect(ctx *gen.SelectContext) any { // VisitMemberCall visits a parse tree produced by CELParser#MemberCall. func (p *parser) VisitMemberCall(ctx *gen.MemberCallContext) any { - operand := p.Visit(ctx.Member()).(*exprpb.Expr) + operand := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetId() == nil { return p.helper.newExpr(ctx) @@ -600,13 +566,13 @@ func (p *parser) VisitMemberCall(ctx *gen.MemberCallContext) any { // Visit a parse tree produced by CELParser#Index. func (p *parser) VisitIndex(ctx *gen.IndexContext) any { - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetOp() == nil { return p.helper.newExpr(ctx) } opID := p.helper.id(ctx.GetOp()) - index := p.Visit(ctx.GetIndex()).(*exprpb.Expr) + index := p.Visit(ctx.GetIndex()).(ast.Expr) operator := operators.Index if ctx.GetOpt() != nil { if !p.enableOptionalSyntax { @@ -630,7 +596,7 @@ func (p *parser) VisitCreateMessage(ctx *gen.CreateMessageContext) any { messageName = "." + messageName } objID := p.helper.id(ctx.GetOp()) - entries := p.VisitIFieldInitializerList(ctx.GetEntries()).([]*exprpb.Expr_CreateStruct_Entry) + entries := p.VisitIFieldInitializerList(ctx.GetEntries()).([]ast.EntryExpr) return p.helper.newObject(objID, messageName, entries...) } @@ -638,16 +604,16 @@ func (p *parser) VisitCreateMessage(ctx *gen.CreateMessageContext) any { func (p *parser) VisitIFieldInitializerList(ctx gen.IFieldInitializerListContext) any { if ctx == nil || ctx.GetFields() == nil { // This is the result of a syntax error handled elswhere, return empty. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } - result := make([]*exprpb.Expr_CreateStruct_Entry, len(ctx.GetFields())) + result := make([]ast.EntryExpr, len(ctx.GetFields())) cols := ctx.GetCols() vals := ctx.GetValues() for i, f := range ctx.GetFields() { if i >= len(cols) || i >= len(vals) { // This is the result of a syntax error detected elsewhere. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } initID := p.helper.id(cols[i]) optField := f.(*gen.OptFieldContext) @@ -659,10 +625,10 @@ func (p *parser) VisitIFieldInitializerList(ctx gen.IFieldInitializerListContext // The field may be empty due to a prior error. id := optField.IDENTIFIER() if id == nil { - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } fieldName := id.GetText() - value := p.Visit(vals[i]).(*exprpb.Expr) + value := p.Visit(vals[i]).(ast.Expr) field := p.helper.newObjectField(initID, fieldName, value, optional) result[i] = field } @@ -702,9 +668,9 @@ func (p *parser) VisitCreateList(ctx *gen.CreateListContext) any { // Visit a parse tree produced by CELParser#CreateStruct. func (p *parser) VisitCreateStruct(ctx *gen.CreateStructContext) any { structID := p.helper.id(ctx.GetOp()) - entries := []*exprpb.Expr_CreateStruct_Entry{} + entries := []ast.EntryExpr{} if ctx.GetEntries() != nil { - entries = p.Visit(ctx.GetEntries()).([]*exprpb.Expr_CreateStruct_Entry) + entries = p.Visit(ctx.GetEntries()).([]ast.EntryExpr) } return p.helper.newMap(structID, entries...) } @@ -713,17 +679,17 @@ func (p *parser) VisitCreateStruct(ctx *gen.CreateStructContext) any { func (p *parser) VisitMapInitializerList(ctx *gen.MapInitializerListContext) any { if ctx == nil || ctx.GetKeys() == nil { // This is the result of a syntax error handled elswhere, return empty. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } - result := make([]*exprpb.Expr_CreateStruct_Entry, len(ctx.GetCols())) + result := make([]ast.EntryExpr, len(ctx.GetCols())) keys := ctx.GetKeys() vals := ctx.GetValues() for i, col := range ctx.GetCols() { colID := p.helper.id(col) if i >= len(keys) || i >= len(vals) { // This is the result of a syntax error detected elsewhere. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } optKey := keys[i] optional := optKey.GetOpt() != nil @@ -731,8 +697,8 @@ func (p *parser) VisitMapInitializerList(ctx *gen.MapInitializerListContext) any p.reportError(optKey, "unsupported syntax '?'") continue } - key := p.Visit(optKey.GetE()).(*exprpb.Expr) - value := p.Visit(vals[i]).(*exprpb.Expr) + key := p.Visit(optKey.GetE()).(ast.Expr) + value := p.Visit(vals[i]).(ast.Expr) entry := p.helper.newMapEntry(colID, key, value, optional) result[i] = entry } @@ -812,30 +778,27 @@ func (p *parser) VisitBoolFalse(ctx *gen.BoolFalseContext) any { // Visit a parse tree produced by CELParser#Null. func (p *parser) VisitNull(ctx *gen.NullContext) any { - return p.helper.newLiteral(ctx, - &exprpb.Constant{ - ConstantKind: &exprpb.Constant_NullValue{ - NullValue: structpb.NullValue_NULL_VALUE}}) + return p.helper.exprFactory.NewLiteral(p.helper.newID(ctx), types.NullValue) } -func (p *parser) visitExprList(ctx gen.IExprListContext) []*exprpb.Expr { +func (p *parser) visitExprList(ctx gen.IExprListContext) []ast.Expr { if ctx == nil { - return []*exprpb.Expr{} + return []ast.Expr{} } return p.visitSlice(ctx.GetE()) } -func (p *parser) visitListInit(ctx gen.IListInitContext) ([]*exprpb.Expr, []int32) { +func (p *parser) visitListInit(ctx gen.IListInitContext) ([]ast.Expr, []int32) { if ctx == nil { - return []*exprpb.Expr{}, []int32{} + return []ast.Expr{}, []int32{} } elements := ctx.GetElems() - result := make([]*exprpb.Expr, len(elements)) + result := make([]ast.Expr, len(elements)) optionals := []int32{} for i, e := range elements { - ex := p.Visit(e.GetE()).(*exprpb.Expr) + ex := p.Visit(e.GetE()).(ast.Expr) if ex == nil { - return []*exprpb.Expr{}, []int32{} + return []ast.Expr{}, []int32{} } result[i] = ex if e.GetOpt() != nil { @@ -849,13 +812,13 @@ func (p *parser) visitListInit(ctx gen.IListInitContext) ([]*exprpb.Expr, []int3 return result, optionals } -func (p *parser) visitSlice(expressions []gen.IExprContext) []*exprpb.Expr { +func (p *parser) visitSlice(expressions []gen.IExprContext) []ast.Expr { if expressions == nil { - return []*exprpb.Expr{} + return []ast.Expr{} } - result := make([]*exprpb.Expr, len(expressions)) + result := make([]ast.Expr, len(expressions)) for i, e := range expressions { - ex := p.Visit(e).(*exprpb.Expr) + ex := p.Visit(e).(ast.Expr) result[i] = ex } return result @@ -870,24 +833,24 @@ func (p *parser) unquote(ctx any, value string, isBytes bool) string { return text } -func (p *parser) newLogicManager(function string, term *exprpb.Expr) *logicManager { +func (p *parser) newLogicManager(function string, term ast.Expr) *logicManager { if p.enableVariadicOperatorASTs { - return newVariadicLogicManager(p.helper, function, term) + return newVariadicLogicManager(p.exprFactory, function, term) } - return newBalancingLogicManager(p.helper, function, term) + return newBalancingLogicManager(p.exprFactory, function, term) } -func (p *parser) reportError(ctx any, format string, args ...any) *exprpb.Expr { +func (p *parser) reportError(ctx any, format string, args ...any) ast.Expr { var location common.Location err := p.helper.newExpr(ctx) switch c := ctx.(type) { case common.Location: location = c case antlr.Token, antlr.ParserRuleContext: - location = p.helper.getLocation(err.GetId()) + location = p.helper.getLocation(err.ID()) } // Provide arguments to the report error. - p.errors.reportErrorAtID(err.GetId(), location, format, args...) + p.errors.reportErrorAtID(err.ID(), location, format, args...) return err } @@ -924,21 +887,21 @@ func (p *parser) ReportContextSensitivity(recognizer antlr.Parser, dfa *antlr.DF // Intentional } -func (p *parser) globalCallOrMacro(exprID int64, function string, args ...*exprpb.Expr) *exprpb.Expr { +func (p *parser) globalCallOrMacro(exprID int64, function string, args ...ast.Expr) ast.Expr { if expr, found := p.expandMacro(exprID, function, nil, args...); found { return expr } return p.helper.newGlobalCall(exprID, function, args...) } -func (p *parser) receiverCallOrMacro(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { +func (p *parser) receiverCallOrMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) ast.Expr { if expr, found := p.expandMacro(exprID, function, target, args...); found { return expr } return p.helper.newReceiverCall(exprID, function, target, args...) } -func (p *parser) expandMacro(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) (*exprpb.Expr, bool) { +func (p *parser) expandMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) (ast.Expr, bool) { macro, found := p.macros[makeMacroKey(function, len(args), target != nil)] if !found { macro, found = p.macros[makeVarArgMacroKey(function, target != nil)] @@ -964,7 +927,7 @@ func (p *parser) expandMacro(exprID int64, function string, target *exprpb.Expr, return nil, false } if p.populateMacroCalls { - p.helper.addMacroCall(expr.GetId(), function, target, args...) + p.helper.addMacroCall(expr.ID(), function, target, args...) } return expr, true } diff --git a/vendor/github.com/google/cel-go/parser/unparser.go b/vendor/github.com/google/cel-go/parser/unparser.go index c3c40a0dd..91cf72944 100644 --- a/vendor/github.com/google/cel-go/parser/unparser.go +++ b/vendor/github.com/google/cel-go/parser/unparser.go @@ -20,9 +20,9 @@ import ( "strconv" "strings" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" ) // Unparse takes an input expression and source position information and generates a human-readable @@ -39,7 +39,7 @@ import ( // // This function optionally takes in one or more UnparserOption to alter the unparsing behavior, such as // performing word wrapping on expressions. -func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo, opts ...UnparserOption) (string, error) { +func Unparse(expr ast.Expr, info *ast.SourceInfo, opts ...UnparserOption) (string, error) { unparserOpts := &unparserOption{ wrapOnColumn: defaultWrapOnColumn, wrapAfterColumnLimit: defaultWrapAfterColumnLimit, @@ -68,12 +68,12 @@ func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo, opts ...UnparserOption) // unparser visits an expression to reconstruct a human-readable string from an AST. type unparser struct { str strings.Builder - info *exprpb.SourceInfo + info *ast.SourceInfo options *unparserOption lastWrappedIndex int } -func (un *unparser) visit(expr *exprpb.Expr) error { +func (un *unparser) visit(expr ast.Expr) error { if expr == nil { return errors.New("unsupported expression") } @@ -81,27 +81,29 @@ func (un *unparser) visit(expr *exprpb.Expr) error { if visited || err != nil { return err } - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: + switch expr.Kind() { + case ast.CallKind: return un.visitCall(expr) - case *exprpb.Expr_ConstExpr: + case ast.LiteralKind: return un.visitConst(expr) - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: return un.visitIdent(expr) - case *exprpb.Expr_ListExpr: + case ast.ListKind: return un.visitList(expr) - case *exprpb.Expr_SelectExpr: + case ast.MapKind: + return un.visitStructMap(expr) + case ast.SelectKind: return un.visitSelect(expr) - case *exprpb.Expr_StructExpr: - return un.visitStruct(expr) + case ast.StructKind: + return un.visitStructMsg(expr) default: return fmt.Errorf("unsupported expression: %v", expr) } } -func (un *unparser) visitCall(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() +func (un *unparser) visitCall(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() switch fun { // ternary operator case operators.Conditional: @@ -141,10 +143,10 @@ func (un *unparser) visitCall(expr *exprpb.Expr) error { } } -func (un *unparser) visitCallBinary(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() +func (un *unparser) visitCallBinary(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() lhs := args[0] // add parens if the current operator is lower precedence than the lhs expr operator. lhsParen := isComplexOperatorWithRespectTo(fun, lhs) @@ -168,9 +170,9 @@ func (un *unparser) visitCallBinary(expr *exprpb.Expr) error { return un.visitMaybeNested(rhs, rhsParen) } -func (un *unparser) visitCallConditional(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitCallConditional(expr ast.Expr) error { + c := expr.AsCall() + args := c.Args() // add parens if operand is a conditional itself. nested := isSamePrecedence(operators.Conditional, args[0]) || isComplexOperator(args[0]) @@ -196,13 +198,13 @@ func (un *unparser) visitCallConditional(expr *exprpb.Expr) error { return un.visitMaybeNested(args[2], nested) } -func (un *unparser) visitCallFunc(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() - if c.GetTarget() != nil { - nested := isBinaryOrTernaryOperator(c.GetTarget()) - err := un.visitMaybeNested(c.GetTarget(), nested) +func (un *unparser) visitCallFunc(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() + if c.IsMemberFunction() { + nested := isBinaryOrTernaryOperator(c.Target()) + err := un.visitMaybeNested(c.Target(), nested) if err != nil { return err } @@ -223,17 +225,17 @@ func (un *unparser) visitCallFunc(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitCallIndex(expr *exprpb.Expr) error { +func (un *unparser) visitCallIndex(expr ast.Expr) error { return un.visitCallIndexInternal(expr, "[") } -func (un *unparser) visitCallOptIndex(expr *exprpb.Expr) error { +func (un *unparser) visitCallOptIndex(expr ast.Expr) error { return un.visitCallIndexInternal(expr, "[?") } -func (un *unparser) visitCallIndexInternal(expr *exprpb.Expr, op string) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitCallIndexInternal(expr ast.Expr, op string) error { + c := expr.AsCall() + args := c.Args() nested := isBinaryOrTernaryOperator(args[0]) err := un.visitMaybeNested(args[0], nested) if err != nil { @@ -248,10 +250,10 @@ func (un *unparser) visitCallIndexInternal(expr *exprpb.Expr, op string) error { return nil } -func (un *unparser) visitCallUnary(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() +func (un *unparser) visitCallUnary(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() unmangled, found := operators.FindReverse(fun) if !found { return fmt.Errorf("cannot unmangle operator: %s", fun) @@ -261,35 +263,34 @@ func (un *unparser) visitCallUnary(expr *exprpb.Expr) error { return un.visitMaybeNested(args[0], nested) } -func (un *unparser) visitConst(expr *exprpb.Expr) error { - c := expr.GetConstExpr() - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - un.str.WriteString(strconv.FormatBool(c.GetBoolValue())) - case *exprpb.Constant_BytesValue: +func (un *unparser) visitConst(expr ast.Expr) error { + val := expr.AsLiteral() + switch val := val.(type) { + case types.Bool: + un.str.WriteString(strconv.FormatBool(bool(val))) + case types.Bytes: // bytes constants are surrounded with b"" - b := c.GetBytesValue() un.str.WriteString(`b"`) - un.str.WriteString(bytesToOctets(b)) + un.str.WriteString(bytesToOctets([]byte(val))) un.str.WriteString(`"`) - case *exprpb.Constant_DoubleValue: + case types.Double: // represent the float using the minimum required digits - d := strconv.FormatFloat(c.GetDoubleValue(), 'g', -1, 64) + d := strconv.FormatFloat(float64(val), 'g', -1, 64) un.str.WriteString(d) if !strings.Contains(d, ".") { un.str.WriteString(".0") } - case *exprpb.Constant_Int64Value: - i := strconv.FormatInt(c.GetInt64Value(), 10) + case types.Int: + i := strconv.FormatInt(int64(val), 10) un.str.WriteString(i) - case *exprpb.Constant_NullValue: + case types.Null: un.str.WriteString("null") - case *exprpb.Constant_StringValue: + case types.String: // strings will be double quoted with quotes escaped. - un.str.WriteString(strconv.Quote(c.GetStringValue())) - case *exprpb.Constant_Uint64Value: + un.str.WriteString(strconv.Quote(string(val))) + case types.Uint: // uint literals have a 'u' suffix. - ui := strconv.FormatUint(c.GetUint64Value(), 10) + ui := strconv.FormatUint(uint64(val), 10) un.str.WriteString(ui) un.str.WriteString("u") default: @@ -298,16 +299,16 @@ func (un *unparser) visitConst(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitIdent(expr *exprpb.Expr) error { - un.str.WriteString(expr.GetIdentExpr().GetName()) +func (un *unparser) visitIdent(expr ast.Expr) error { + un.str.WriteString(expr.AsIdent()) return nil } -func (un *unparser) visitList(expr *exprpb.Expr) error { - l := expr.GetListExpr() - elems := l.GetElements() +func (un *unparser) visitList(expr ast.Expr) error { + l := expr.AsList() + elems := l.Elements() optIndices := make(map[int]bool, len(elems)) - for _, idx := range l.GetOptionalIndices() { + for _, idx := range l.OptionalIndices() { optIndices[int(idx)] = true } un.str.WriteString("[") @@ -327,20 +328,20 @@ func (un *unparser) visitList(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitOptSelect(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitOptSelect(expr ast.Expr) error { + c := expr.AsCall() + args := c.Args() operand := args[0] - field := args[1].GetConstExpr().GetStringValue() - return un.visitSelectInternal(operand, false, ".?", field) + field := args[1].AsLiteral().(types.String) + return un.visitSelectInternal(operand, false, ".?", string(field)) } -func (un *unparser) visitSelect(expr *exprpb.Expr) error { - sel := expr.GetSelectExpr() - return un.visitSelectInternal(sel.GetOperand(), sel.GetTestOnly(), ".", sel.GetField()) +func (un *unparser) visitSelect(expr ast.Expr) error { + sel := expr.AsSelect() + return un.visitSelectInternal(sel.Operand(), sel.IsTestOnly(), ".", sel.FieldName()) } -func (un *unparser) visitSelectInternal(operand *exprpb.Expr, testOnly bool, op string, field string) error { +func (un *unparser) visitSelectInternal(operand ast.Expr, testOnly bool, op string, field string) error { // handle the case when the select expression was generated by the has() macro. if testOnly { un.str.WriteString("has(") @@ -358,34 +359,25 @@ func (un *unparser) visitSelectInternal(operand *exprpb.Expr, testOnly bool, op return nil } -func (un *unparser) visitStruct(expr *exprpb.Expr) error { - s := expr.GetStructExpr() - // If the message name is non-empty, then this should be treated as message construction. - if s.GetMessageName() != "" { - return un.visitStructMsg(expr) - } - // Otherwise, build a map. - return un.visitStructMap(expr) -} - -func (un *unparser) visitStructMsg(expr *exprpb.Expr) error { - m := expr.GetStructExpr() - entries := m.GetEntries() - un.str.WriteString(m.GetMessageName()) +func (un *unparser) visitStructMsg(expr ast.Expr) error { + m := expr.AsStruct() + fields := m.Fields() + un.str.WriteString(m.TypeName()) un.str.WriteString("{") - for i, entry := range entries { - f := entry.GetFieldKey() - if entry.GetOptionalEntry() { + for i, f := range fields { + field := f.AsStructField() + f := field.Name() + if field.IsOptional() { un.str.WriteString("?") } un.str.WriteString(f) un.str.WriteString(": ") - v := entry.GetValue() + v := field.Value() err := un.visit(v) if err != nil { return err } - if i < len(entries)-1 { + if i < len(fields)-1 { un.str.WriteString(", ") } } @@ -393,13 +385,14 @@ func (un *unparser) visitStructMsg(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitStructMap(expr *exprpb.Expr) error { - m := expr.GetStructExpr() - entries := m.GetEntries() +func (un *unparser) visitStructMap(expr ast.Expr) error { + m := expr.AsMap() + entries := m.Entries() un.str.WriteString("{") - for i, entry := range entries { - k := entry.GetMapKey() - if entry.GetOptionalEntry() { + for i, e := range entries { + entry := e.AsMapEntry() + k := entry.Key() + if entry.IsOptional() { un.str.WriteString("?") } err := un.visit(k) @@ -407,7 +400,7 @@ func (un *unparser) visitStructMap(expr *exprpb.Expr) error { return err } un.str.WriteString(": ") - v := entry.GetValue() + v := entry.Value() err = un.visit(v) if err != nil { return err @@ -420,16 +413,15 @@ func (un *unparser) visitStructMap(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitMaybeMacroCall(expr *exprpb.Expr) (bool, error) { - macroCalls := un.info.GetMacroCalls() - call, found := macroCalls[expr.GetId()] +func (un *unparser) visitMaybeMacroCall(expr ast.Expr) (bool, error) { + call, found := un.info.GetMacroCall(expr.ID()) if !found { return false, nil } return true, un.visit(call) } -func (un *unparser) visitMaybeNested(expr *exprpb.Expr, nested bool) error { +func (un *unparser) visitMaybeNested(expr ast.Expr, nested bool) error { if nested { un.str.WriteString("(") } @@ -453,12 +445,12 @@ func isLeftRecursive(op string) bool { // precedence of the (possible) operation represented in the input Expr. // // If the expr is not a Call, the result is false. -func isSamePrecedence(op string, expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil { +func isSamePrecedence(op string, expr ast.Expr) bool { + if expr.Kind() != ast.CallKind { return false } - c := expr.GetCallExpr() - other := c.GetFunction() + c := expr.AsCall() + other := c.FunctionName() return operators.Precedence(op) == operators.Precedence(other) } @@ -466,16 +458,16 @@ func isSamePrecedence(op string, expr *exprpb.Expr) bool { // than the (possible) operation represented in the input Expr. // // If the expr is not a Call, the result is false. -func isLowerPrecedence(op string, expr *exprpb.Expr) bool { - c := expr.GetCallExpr() - other := c.GetFunction() +func isLowerPrecedence(op string, expr ast.Expr) bool { + c := expr.AsCall() + other := c.FunctionName() return operators.Precedence(op) < operators.Precedence(other) } // Indicates whether the expr is a complex operator, i.e., a call expression // with 2 or more arguments. -func isComplexOperator(expr *exprpb.Expr) bool { - if expr.GetCallExpr() != nil && len(expr.GetCallExpr().GetArgs()) >= 2 { +func isComplexOperator(expr ast.Expr) bool { + if expr.Kind() == ast.CallKind && len(expr.AsCall().Args()) >= 2 { return true } return false @@ -484,19 +476,19 @@ func isComplexOperator(expr *exprpb.Expr) bool { // Indicates whether it is a complex operation compared to another. // expr is *not* considered complex if it is not a call expression or has // less than two arguments, or if it has a higher precedence than op. -func isComplexOperatorWithRespectTo(op string, expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil || len(expr.GetCallExpr().GetArgs()) < 2 { +func isComplexOperatorWithRespectTo(op string, expr ast.Expr) bool { + if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 { return false } return isLowerPrecedence(op, expr) } // Indicate whether this is a binary or ternary operator. -func isBinaryOrTernaryOperator(expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil || len(expr.GetCallExpr().GetArgs()) < 2 { +func isBinaryOrTernaryOperator(expr ast.Expr) bool { + if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 { return false } - _, isBinaryOp := operators.FindReverseBinaryOperator(expr.GetCallExpr().GetFunction()) + _, isBinaryOp := operators.FindReverseBinaryOperator(expr.AsCall().FunctionName()) return isBinaryOp || isSamePrecedence(operators.Conditional, expr) } diff --git a/vendor/github.com/google/gnostic-models/LICENSE b/vendor/github.com/google/gnostic-models/LICENSE new file mode 100644 index 000000000..6b0b1270f --- /dev/null +++ b/vendor/github.com/google/gnostic-models/LICENSE @@ -0,0 +1,203 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/github.com/google/gnostic-models/compiler/README.md b/vendor/github.com/google/gnostic-models/compiler/README.md new file mode 100644 index 000000000..ee9783d23 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/README.md @@ -0,0 +1,4 @@ +# Compiler support code + +This directory contains compiler support code used by Gnostic and Gnostic +extensions. diff --git a/vendor/github.com/google/gnostic-models/compiler/context.go b/vendor/github.com/google/gnostic-models/compiler/context.go new file mode 100644 index 000000000..1bfe96121 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/context.go @@ -0,0 +1,49 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + yaml "gopkg.in/yaml.v3" +) + +// Context contains state of the compiler as it traverses a document. +type Context struct { + Parent *Context + Name string + Node *yaml.Node + ExtensionHandlers *[]ExtensionHandler +} + +// NewContextWithExtensions returns a new object representing the compiler state +func NewContextWithExtensions(name string, node *yaml.Node, parent *Context, extensionHandlers *[]ExtensionHandler) *Context { + return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: extensionHandlers} +} + +// NewContext returns a new object representing the compiler state +func NewContext(name string, node *yaml.Node, parent *Context) *Context { + if parent != nil { + return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: parent.ExtensionHandlers} + } + return &Context{Name: name, Parent: parent, ExtensionHandlers: nil} +} + +// Description returns a text description of the compiler state +func (context *Context) Description() string { + name := context.Name + if context.Parent != nil { + name = context.Parent.Description() + "." + name + } + return name +} diff --git a/vendor/github.com/google/gnostic-models/compiler/error.go b/vendor/github.com/google/gnostic-models/compiler/error.go new file mode 100644 index 000000000..6f40515d6 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/error.go @@ -0,0 +1,70 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import "fmt" + +// Error represents compiler errors and their location in the document. +type Error struct { + Context *Context + Message string +} + +// NewError creates an Error. +func NewError(context *Context, message string) *Error { + return &Error{Context: context, Message: message} +} + +func (err *Error) locationDescription() string { + if err.Context.Node != nil { + return fmt.Sprintf("[%d,%d] %s", err.Context.Node.Line, err.Context.Node.Column, err.Context.Description()) + } + return err.Context.Description() +} + +// Error returns the string value of an Error. +func (err *Error) Error() string { + if err.Context == nil { + return err.Message + } + return err.locationDescription() + " " + err.Message +} + +// ErrorGroup is a container for groups of Error values. +type ErrorGroup struct { + Errors []error +} + +// NewErrorGroupOrNil returns a new ErrorGroup for a slice of errors or nil if the slice is empty. +func NewErrorGroupOrNil(errors []error) error { + if len(errors) == 0 { + return nil + } else if len(errors) == 1 { + return errors[0] + } else { + return &ErrorGroup{Errors: errors} + } +} + +func (group *ErrorGroup) Error() string { + result := "" + for i, err := range group.Errors { + if i > 0 { + result += "\n" + } + result += err.Error() + } + return result +} diff --git a/vendor/github.com/google/gnostic-models/compiler/extensions.go b/vendor/github.com/google/gnostic-models/compiler/extensions.go new file mode 100644 index 000000000..16ae66faa --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/extensions.go @@ -0,0 +1,86 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "bytes" + "fmt" + "os/exec" + "strings" + + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/anypb" + yaml "gopkg.in/yaml.v3" + + extensions "github.com/google/gnostic-models/extensions" +) + +// ExtensionHandler describes a binary that is called by the compiler to handle specification extensions. +type ExtensionHandler struct { + Name string +} + +// CallExtension calls a binary extension handler. +func CallExtension(context *Context, in *yaml.Node, extensionName string) (handled bool, response *anypb.Any, err error) { + if context == nil || context.ExtensionHandlers == nil { + return false, nil, nil + } + handled = false + for _, handler := range *(context.ExtensionHandlers) { + response, err = handler.handle(in, extensionName) + if response == nil { + continue + } else { + handled = true + break + } + } + return handled, response, err +} + +func (extensionHandlers *ExtensionHandler) handle(in *yaml.Node, extensionName string) (*anypb.Any, error) { + if extensionHandlers.Name != "" { + yamlData, _ := yaml.Marshal(in) + request := &extensions.ExtensionHandlerRequest{ + CompilerVersion: &extensions.Version{ + Major: 0, + Minor: 1, + Patch: 0, + }, + Wrapper: &extensions.Wrapper{ + Version: "unknown", // TODO: set this to the type/version of spec being parsed. + Yaml: string(yamlData), + ExtensionName: extensionName, + }, + } + requestBytes, _ := proto.Marshal(request) + cmd := exec.Command(extensionHandlers.Name) + cmd.Stdin = bytes.NewReader(requestBytes) + output, err := cmd.Output() + if err != nil { + return nil, err + } + response := &extensions.ExtensionHandlerResponse{} + err = proto.Unmarshal(output, response) + if err != nil || !response.Handled { + return nil, err + } + if len(response.Errors) != 0 { + return nil, fmt.Errorf("Errors when parsing: %+v for field %s by vendor extension handler %s. Details %+v", in, extensionName, extensionHandlers.Name, strings.Join(response.Errors, ",")) + } + return response.Value, nil + } + return nil, nil +} diff --git a/vendor/github.com/google/gnostic-models/compiler/helpers.go b/vendor/github.com/google/gnostic-models/compiler/helpers.go new file mode 100644 index 000000000..975d65e8f --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/helpers.go @@ -0,0 +1,397 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "regexp" + "sort" + "strconv" + + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/jsonschema" +) + +// compiler helper functions, usually called from generated code + +// UnpackMap gets a *yaml.Node if possible. +func UnpackMap(in *yaml.Node) (*yaml.Node, bool) { + if in == nil { + return nil, false + } + return in, true +} + +// SortedKeysForMap returns the sorted keys of a yamlv2.MapSlice. +func SortedKeysForMap(m *yaml.Node) []string { + keys := make([]string, 0) + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + keys = append(keys, m.Content[i].Value) + } + } + sort.Strings(keys) + return keys +} + +// MapHasKey returns true if a yamlv2.MapSlice contains a specified key. +func MapHasKey(m *yaml.Node, key string) bool { + if m == nil { + return false + } + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + itemKey := m.Content[i].Value + if key == itemKey { + return true + } + } + } + return false +} + +// MapValueForKey gets the value of a map value for a specified key. +func MapValueForKey(m *yaml.Node, key string) *yaml.Node { + if m == nil { + return nil + } + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + itemKey := m.Content[i].Value + if key == itemKey { + return m.Content[i+1] + } + } + } + return nil +} + +// ConvertInterfaceArrayToStringArray converts an array of interfaces to an array of strings, if possible. +func ConvertInterfaceArrayToStringArray(interfaceArray []interface{}) []string { + stringArray := make([]string, 0) + for _, item := range interfaceArray { + v, ok := item.(string) + if ok { + stringArray = append(stringArray, v) + } + } + return stringArray +} + +// SequenceNodeForNode returns a node if it is a SequenceNode. +func SequenceNodeForNode(node *yaml.Node) (*yaml.Node, bool) { + if node.Kind != yaml.SequenceNode { + return nil, false + } + return node, true +} + +// BoolForScalarNode returns the bool value of a node. +func BoolForScalarNode(node *yaml.Node) (bool, bool) { + if node == nil { + return false, false + } + if node.Kind == yaml.DocumentNode { + return BoolForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return false, false + } + if node.Tag != "!!bool" { + return false, false + } + v, err := strconv.ParseBool(node.Value) + if err != nil { + return false, false + } + return v, true +} + +// IntForScalarNode returns the integer value of a node. +func IntForScalarNode(node *yaml.Node) (int64, bool) { + if node == nil { + return 0, false + } + if node.Kind == yaml.DocumentNode { + return IntForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return 0, false + } + if node.Tag != "!!int" { + return 0, false + } + v, err := strconv.ParseInt(node.Value, 10, 64) + if err != nil { + return 0, false + } + return v, true +} + +// FloatForScalarNode returns the float value of a node. +func FloatForScalarNode(node *yaml.Node) (float64, bool) { + if node == nil { + return 0.0, false + } + if node.Kind == yaml.DocumentNode { + return FloatForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return 0.0, false + } + if (node.Tag != "!!int") && (node.Tag != "!!float") { + return 0.0, false + } + v, err := strconv.ParseFloat(node.Value, 64) + if err != nil { + return 0.0, false + } + return v, true +} + +// StringForScalarNode returns the string value of a node. +func StringForScalarNode(node *yaml.Node) (string, bool) { + if node == nil { + return "", false + } + if node.Kind == yaml.DocumentNode { + return StringForScalarNode(node.Content[0]) + } + switch node.Kind { + case yaml.ScalarNode: + switch node.Tag { + case "!!int": + return node.Value, true + case "!!str": + return node.Value, true + case "!!timestamp": + return node.Value, true + case "!!null": + return "", true + default: + return "", false + } + default: + return "", false + } +} + +// StringArrayForSequenceNode converts a sequence node to an array of strings, if possible. +func StringArrayForSequenceNode(node *yaml.Node) []string { + stringArray := make([]string, 0) + for _, item := range node.Content { + v, ok := StringForScalarNode(item) + if ok { + stringArray = append(stringArray, v) + } + } + return stringArray +} + +// MissingKeysInMap identifies which keys from a list of required keys are not in a map. +func MissingKeysInMap(m *yaml.Node, requiredKeys []string) []string { + missingKeys := make([]string, 0) + for _, k := range requiredKeys { + if !MapHasKey(m, k) { + missingKeys = append(missingKeys, k) + } + } + return missingKeys +} + +// InvalidKeysInMap returns keys in a map that don't match a list of allowed keys and patterns. +func InvalidKeysInMap(m *yaml.Node, allowedKeys []string, allowedPatterns []*regexp.Regexp) []string { + invalidKeys := make([]string, 0) + if m == nil || m.Kind != yaml.MappingNode { + return invalidKeys + } + for i := 0; i < len(m.Content); i += 2 { + key := m.Content[i].Value + found := false + // does the key match an allowed key? + for _, allowedKey := range allowedKeys { + if key == allowedKey { + found = true + break + } + } + if !found { + // does the key match an allowed pattern? + for _, allowedPattern := range allowedPatterns { + if allowedPattern.MatchString(key) { + found = true + break + } + } + if !found { + invalidKeys = append(invalidKeys, key) + } + } + } + return invalidKeys +} + +// NewNullNode creates a new Null node. +func NewNullNode() *yaml.Node { + node := &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!null", + } + return node +} + +// NewMappingNode creates a new Mapping node. +func NewMappingNode() *yaml.Node { + return &yaml.Node{ + Kind: yaml.MappingNode, + Content: make([]*yaml.Node, 0), + } +} + +// NewSequenceNode creates a new Sequence node. +func NewSequenceNode() *yaml.Node { + node := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: make([]*yaml.Node, 0), + } + return node +} + +// NewScalarNodeForString creates a new node to hold a string. +func NewScalarNodeForString(s string) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: s, + } +} + +// NewSequenceNodeForStringArray creates a new node to hold an array of strings. +func NewSequenceNodeForStringArray(strings []string) *yaml.Node { + node := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: make([]*yaml.Node, 0), + } + for _, s := range strings { + node.Content = append(node.Content, NewScalarNodeForString(s)) + } + return node +} + +// NewScalarNodeForBool creates a new node to hold a bool. +func NewScalarNodeForBool(b bool) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!bool", + Value: fmt.Sprintf("%t", b), + } +} + +// NewScalarNodeForFloat creates a new node to hold a float. +func NewScalarNodeForFloat(f float64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!float", + Value: fmt.Sprintf("%g", f), + } +} + +// NewScalarNodeForInt creates a new node to hold an integer. +func NewScalarNodeForInt(i int64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!int", + Value: fmt.Sprintf("%d", i), + } +} + +// PluralProperties returns the string "properties" pluralized. +func PluralProperties(count int) string { + if count == 1 { + return "property" + } + return "properties" +} + +// StringArrayContainsValue returns true if a string array contains a specified value. +func StringArrayContainsValue(array []string, value string) bool { + for _, item := range array { + if item == value { + return true + } + } + return false +} + +// StringArrayContainsValues returns true if a string array contains all of a list of specified values. +func StringArrayContainsValues(array []string, values []string) bool { + for _, value := range values { + if !StringArrayContainsValue(array, value) { + return false + } + } + return true +} + +// StringValue returns the string value of an item. +func StringValue(item interface{}) (value string, ok bool) { + value, ok = item.(string) + if ok { + return value, ok + } + intValue, ok := item.(int) + if ok { + return strconv.Itoa(intValue), true + } + return "", false +} + +// Description returns a human-readable represention of an item. +func Description(item interface{}) string { + value, ok := item.(*yaml.Node) + if ok { + return jsonschema.Render(value) + } + return fmt.Sprintf("%+v", item) +} + +// Display returns a description of a node for use in error messages. +func Display(node *yaml.Node) string { + switch node.Kind { + case yaml.ScalarNode: + switch node.Tag { + case "!!str": + return fmt.Sprintf("%s (string)", node.Value) + } + } + return fmt.Sprintf("%+v (%T)", node, node) +} + +// Marshal creates a yaml version of a structure in our preferred style +func Marshal(in *yaml.Node) []byte { + clearStyle(in) + //bytes, _ := yaml.Marshal(&yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{in}}) + bytes, _ := yaml.Marshal(in) + + return bytes +} + +func clearStyle(node *yaml.Node) { + node.Style = 0 + for _, c := range node.Content { + clearStyle(c) + } +} diff --git a/vendor/github.com/google/gnostic-models/compiler/main.go b/vendor/github.com/google/gnostic-models/compiler/main.go new file mode 100644 index 000000000..ce9fcc456 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/main.go @@ -0,0 +1,16 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package compiler provides support functions to generated compiler code. +package compiler diff --git a/vendor/github.com/google/gnostic-models/compiler/reader.go b/vendor/github.com/google/gnostic-models/compiler/reader.go new file mode 100644 index 000000000..be0e8b40c --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/reader.go @@ -0,0 +1,307 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "io/ioutil" + "log" + "net/http" + "net/url" + "path/filepath" + "strings" + "sync" + + yaml "gopkg.in/yaml.v3" +) + +var verboseReader = false + +var fileCache map[string][]byte +var infoCache map[string]*yaml.Node + +var fileCacheEnable = true +var infoCacheEnable = true + +// These locks are used to synchronize accesses to the fileCache and infoCache +// maps (above). They are global state and can throw thread-related errors +// when modified from separate goroutines. The general strategy is to protect +// all public functions in this file with mutex Lock() calls. As a result, to +// avoid deadlock, these public functions should not call other public +// functions, so some public functions have private equivalents. +// In the future, we might consider replacing the maps with sync.Map and +// eliminating these mutexes. +var fileCacheMutex sync.Mutex +var infoCacheMutex sync.Mutex + +func initializeFileCache() { + if fileCache == nil { + fileCache = make(map[string][]byte, 0) + } +} + +func initializeInfoCache() { + if infoCache == nil { + infoCache = make(map[string]*yaml.Node, 0) + } +} + +// EnableFileCache turns on file caching. +func EnableFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCacheEnable = true +} + +// EnableInfoCache turns on parsed info caching. +func EnableInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCacheEnable = true +} + +// DisableFileCache turns off file caching. +func DisableFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCacheEnable = false +} + +// DisableInfoCache turns off parsed info caching. +func DisableInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCacheEnable = false +} + +// RemoveFromFileCache removes an entry from the file cache. +func RemoveFromFileCache(fileurl string) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + if !fileCacheEnable { + return + } + initializeFileCache() + delete(fileCache, fileurl) +} + +// RemoveFromInfoCache removes an entry from the info cache. +func RemoveFromInfoCache(filename string) { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + if !infoCacheEnable { + return + } + initializeInfoCache() + delete(infoCache, filename) +} + +// GetInfoCache returns the info cache map. +func GetInfoCache() map[string]*yaml.Node { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + if infoCache == nil { + initializeInfoCache() + } + return infoCache +} + +// ClearFileCache clears the file cache. +func ClearFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCache = make(map[string][]byte, 0) +} + +// ClearInfoCache clears the info cache. +func ClearInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCache = make(map[string]*yaml.Node) +} + +// ClearCaches clears all caches. +func ClearCaches() { + ClearFileCache() + ClearInfoCache() +} + +// FetchFile gets a specified file from the local filesystem or a remote location. +func FetchFile(fileurl string) ([]byte, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + return fetchFile(fileurl) +} + +func fetchFile(fileurl string) ([]byte, error) { + var bytes []byte + initializeFileCache() + if fileCacheEnable { + bytes, ok := fileCache[fileurl] + if ok { + if verboseReader { + log.Printf("Cache hit %s", fileurl) + } + return bytes, nil + } + if verboseReader { + log.Printf("Fetching %s", fileurl) + } + } + response, err := http.Get(fileurl) + if err != nil { + return nil, err + } + defer response.Body.Close() + if response.StatusCode != 200 { + return nil, fmt.Errorf("Error downloading %s: %s", fileurl, response.Status) + } + bytes, err = ioutil.ReadAll(response.Body) + if fileCacheEnable && err == nil { + fileCache[fileurl] = bytes + } + return bytes, err +} + +// ReadBytesForFile reads the bytes of a file. +func ReadBytesForFile(filename string) ([]byte, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + return readBytesForFile(filename) +} + +func readBytesForFile(filename string) ([]byte, error) { + // is the filename a url? + fileurl, _ := url.Parse(filename) + if fileurl.Scheme != "" { + // yes, fetch it + bytes, err := fetchFile(filename) + if err != nil { + return nil, err + } + return bytes, nil + } + // no, it's a local filename + bytes, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + return bytes, nil +} + +// ReadInfoFromBytes unmarshals a file as a *yaml.Node. +func ReadInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + return readInfoFromBytes(filename, bytes) +} + +func readInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { + initializeInfoCache() + if infoCacheEnable { + cachedInfo, ok := infoCache[filename] + if ok { + if verboseReader { + log.Printf("Cache hit info for file %s", filename) + } + return cachedInfo, nil + } + if verboseReader { + log.Printf("Reading info for file %s", filename) + } + } + var info yaml.Node + err := yaml.Unmarshal(bytes, &info) + if err != nil { + return nil, err + } + if infoCacheEnable && len(filename) > 0 { + infoCache[filename] = &info + } + return &info, nil +} + +// ReadInfoForRef reads a file and return the fragment needed to resolve a $ref. +func ReadInfoForRef(basefile string, ref string) (*yaml.Node, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + initializeInfoCache() + if infoCacheEnable { + info, ok := infoCache[ref] + if ok { + if verboseReader { + log.Printf("Cache hit for ref %s#%s", basefile, ref) + } + return info, nil + } + if verboseReader { + log.Printf("Reading info for ref %s#%s", basefile, ref) + } + } + basedir, _ := filepath.Split(basefile) + parts := strings.Split(ref, "#") + var filename string + if parts[0] != "" { + filename = parts[0] + if _, err := url.ParseRequestURI(parts[0]); err != nil { + // It is not an URL, so the file is local + filename = basedir + parts[0] + } + } else { + filename = basefile + } + bytes, err := readBytesForFile(filename) + if err != nil { + return nil, err + } + info, err := readInfoFromBytes(filename, bytes) + if info != nil && info.Kind == yaml.DocumentNode { + info = info.Content[0] + } + if err != nil { + log.Printf("File error: %v\n", err) + } else { + if info == nil { + return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) + } + if len(parts) > 1 { + path := strings.Split(parts[1], "/") + for i, key := range path { + if i > 0 { + m := info + if true { + found := false + for i := 0; i < len(m.Content); i += 2 { + if m.Content[i].Value == key { + info = m.Content[i+1] + found = true + } + } + if !found { + infoCache[ref] = nil + return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) + } + } + } + } + } + } + if infoCacheEnable { + infoCache[ref] = info + } + return info, nil +} diff --git a/vendor/github.com/google/gnostic/extensions/README.md b/vendor/github.com/google/gnostic-models/extensions/README.md similarity index 100% rename from vendor/github.com/google/gnostic/extensions/README.md rename to vendor/github.com/google/gnostic-models/extensions/README.md diff --git a/vendor/github.com/google/gnostic/extensions/extension.pb.go b/vendor/github.com/google/gnostic-models/extensions/extension.pb.go similarity index 99% rename from vendor/github.com/google/gnostic/extensions/extension.pb.go rename to vendor/github.com/google/gnostic-models/extensions/extension.pb.go index a6a4ccca6..a71df8abe 100644 --- a/vendor/github.com/google/gnostic/extensions/extension.pb.go +++ b/vendor/github.com/google/gnostic-models/extensions/extension.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.26.0 -// protoc v3.18.1 +// protoc-gen-go v1.27.1 +// protoc v3.19.3 // source: extensions/extension.proto package gnostic_extension_v1 diff --git a/vendor/github.com/google/gnostic/extensions/extension.proto b/vendor/github.com/google/gnostic-models/extensions/extension.proto similarity index 100% rename from vendor/github.com/google/gnostic/extensions/extension.proto rename to vendor/github.com/google/gnostic-models/extensions/extension.proto diff --git a/vendor/github.com/google/gnostic/extensions/extensions.go b/vendor/github.com/google/gnostic-models/extensions/extensions.go similarity index 93% rename from vendor/github.com/google/gnostic/extensions/extensions.go rename to vendor/github.com/google/gnostic-models/extensions/extensions.go index ec8afd009..0768163e5 100644 --- a/vendor/github.com/google/gnostic/extensions/extensions.go +++ b/vendor/github.com/google/gnostic-models/extensions/extensions.go @@ -19,8 +19,8 @@ import ( "log" "os" - "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/anypb" ) type extensionHandler func(name string, yamlInput string) (bool, proto.Message, error) @@ -54,7 +54,7 @@ func Main(handler extensionHandler) { response.Errors = append(response.Errors, err.Error()) } else if handled { response.Handled = true - response.Value, err = ptypes.MarshalAny(output) + response.Value, err = anypb.New(output) if err != nil { response.Errors = append(response.Errors, err.Error()) } diff --git a/vendor/github.com/google/gnostic/jsonschema/README.md b/vendor/github.com/google/gnostic-models/jsonschema/README.md similarity index 100% rename from vendor/github.com/google/gnostic/jsonschema/README.md rename to vendor/github.com/google/gnostic-models/jsonschema/README.md diff --git a/vendor/github.com/google/gnostic/jsonschema/base.go b/vendor/github.com/google/gnostic-models/jsonschema/base.go similarity index 90% rename from vendor/github.com/google/gnostic/jsonschema/base.go rename to vendor/github.com/google/gnostic-models/jsonschema/base.go index 0af8b148b..5fcc4885a 100644 --- a/vendor/github.com/google/gnostic/jsonschema/base.go +++ b/vendor/github.com/google/gnostic-models/jsonschema/base.go @@ -1,3 +1,16 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. // THIS FILE IS AUTOMATICALLY GENERATED. @@ -81,4 +94,4 @@ YXkiIH0sCiAgICAgICAgImFueU9mIjogeyAiJHJlZiI6ICIjL2RlZmluaXRpb25zL3NjaGVtYUFycmF5 IiB9LAogICAgICAgICJvbmVPZiI6IHsgIiRyZWYiOiAiIy9kZWZpbml0aW9ucy9zY2hlbWFBcnJheSIg fSwKICAgICAgICAibm90IjogeyAiJHJlZiI6ICIjIiB9CiAgICB9LAogICAgImRlcGVuZGVuY2llcyI6 IHsKICAgICAgICAiZXhjbHVzaXZlTWF4aW11bSI6IFsgIm1heGltdW0iIF0sCiAgICAgICAgImV4Y2x1 -c2l2ZU1pbmltdW0iOiBbICJtaW5pbXVtIiBdCiAgICB9LAogICAgImRlZmF1bHQiOiB7fQp9Cg==`)} \ No newline at end of file +c2l2ZU1pbmltdW0iOiBbICJtaW5pbXVtIiBdCiAgICB9LAogICAgImRlZmF1bHQiOiB7fQp9Cg==`)} diff --git a/vendor/github.com/google/gnostic/jsonschema/display.go b/vendor/github.com/google/gnostic-models/jsonschema/display.go similarity index 92% rename from vendor/github.com/google/gnostic/jsonschema/display.go rename to vendor/github.com/google/gnostic-models/jsonschema/display.go index 8677ed49a..028a760a9 100644 --- a/vendor/github.com/google/gnostic/jsonschema/display.go +++ b/vendor/github.com/google/gnostic-models/jsonschema/display.go @@ -46,23 +46,8 @@ func (schema *Schema) describeSchema(indent string) string { if schema.Schema != nil { result += indent + "$schema: " + *(schema.Schema) + "\n" } - if schema.ReadOnly != nil && *schema.ReadOnly { - result += indent + fmt.Sprintf("readOnly: %+v\n", *(schema.ReadOnly)) - } - if schema.WriteOnly != nil && *schema.WriteOnly { - result += indent + fmt.Sprintf("writeOnly: %+v\n", *(schema.WriteOnly)) - } if schema.ID != nil { - switch strings.TrimSuffix(*schema.Schema, "#") { - case "http://json-schema.org/draft-04/schema#": - fallthrough - case "#": - fallthrough - case "": - result += indent + "id: " + *(schema.ID) + "\n" - default: - result += indent + "$id: " + *(schema.ID) + "\n" - } + result += indent + "id: " + *(schema.ID) + "\n" } if schema.MultipleOf != nil { result += indent + fmt.Sprintf("multipleOf: %+v\n", *(schema.MultipleOf)) diff --git a/vendor/github.com/google/gnostic/jsonschema/models.go b/vendor/github.com/google/gnostic-models/jsonschema/models.go similarity index 97% rename from vendor/github.com/google/gnostic/jsonschema/models.go rename to vendor/github.com/google/gnostic-models/jsonschema/models.go index 0d877249a..4781bdc5f 100644 --- a/vendor/github.com/google/gnostic/jsonschema/models.go +++ b/vendor/github.com/google/gnostic-models/jsonschema/models.go @@ -23,11 +23,9 @@ import "gopkg.in/yaml.v3" // All fields are pointers and are nil if the associated values // are not specified. type Schema struct { - Schema *string // $schema - ID *string // id keyword used for $ref resolution scope - Ref *string // $ref, i.e. JSON Pointers - ReadOnly *bool - WriteOnly *bool + Schema *string // $schema + ID *string // id keyword used for $ref resolution scope + Ref *string // $ref, i.e. JSON Pointers // http://json-schema.org/latest/json-schema-validation.html // 5.1. Validation keywords for numeric instances (number and integer) diff --git a/vendor/github.com/google/gnostic/jsonschema/operations.go b/vendor/github.com/google/gnostic-models/jsonschema/operations.go similarity index 100% rename from vendor/github.com/google/gnostic/jsonschema/operations.go rename to vendor/github.com/google/gnostic-models/jsonschema/operations.go diff --git a/vendor/github.com/google/gnostic/jsonschema/reader.go b/vendor/github.com/google/gnostic-models/jsonschema/reader.go similarity index 99% rename from vendor/github.com/google/gnostic/jsonschema/reader.go rename to vendor/github.com/google/gnostic-models/jsonschema/reader.go index a909a3412..b8583d466 100644 --- a/vendor/github.com/google/gnostic/jsonschema/reader.go +++ b/vendor/github.com/google/gnostic-models/jsonschema/reader.go @@ -165,6 +165,7 @@ func NewSchemaFromObject(jsonData *yaml.Node) *Schema { default: fmt.Printf("schemaValue: unexpected node %+v\n", jsonData) + return nil } return nil diff --git a/vendor/github.com/google/gnostic/jsonschema/schema.json b/vendor/github.com/google/gnostic-models/jsonschema/schema.json similarity index 100% rename from vendor/github.com/google/gnostic/jsonschema/schema.json rename to vendor/github.com/google/gnostic-models/jsonschema/schema.json diff --git a/vendor/github.com/google/gnostic/jsonschema/writer.go b/vendor/github.com/google/gnostic-models/jsonschema/writer.go similarity index 92% rename from vendor/github.com/google/gnostic/jsonschema/writer.go rename to vendor/github.com/google/gnostic-models/jsonschema/writer.go index 15b1f9050..340dc5f93 100644 --- a/vendor/github.com/google/gnostic/jsonschema/writer.go +++ b/vendor/github.com/google/gnostic-models/jsonschema/writer.go @@ -16,7 +16,6 @@ package jsonschema import ( "fmt" - "strings" "gopkg.in/yaml.v3" ) @@ -34,11 +33,7 @@ func renderMappingNode(node *yaml.Node, indent string) (result string) { value := node.Content[i+1] switch value.Kind { case yaml.ScalarNode: - if value.Tag == "!!bool" { - result += value.Value - } else { - result += "\"" + value.Value + "\"" - } + result += "\"" + value.Value + "\"" case yaml.MappingNode: result += renderMappingNode(value, innerIndent) case yaml.SequenceNode: @@ -63,11 +58,7 @@ func renderSequenceNode(node *yaml.Node, indent string) (result string) { item := node.Content[i] switch item.Kind { case yaml.ScalarNode: - if item.Tag == "!!bool" { - result += innerIndent + item.Value - } else { - result += innerIndent + "\"" + item.Value + "\"" - } + result += innerIndent + "\"" + item.Value + "\"" case yaml.MappingNode: result += innerIndent + renderMappingNode(item, innerIndent) + "" default: @@ -269,26 +260,11 @@ func (schema *Schema) nodeValue() *yaml.Node { content = appendPair(content, "title", nodeForString(*schema.Title)) } if schema.ID != nil { - switch strings.TrimSuffix(*schema.Schema, "#") { - case "http://json-schema.org/draft-04/schema": - fallthrough - case "#": - fallthrough - case "": - content = appendPair(content, "id", nodeForString(*schema.ID)) - default: - content = appendPair(content, "$id", nodeForString(*schema.ID)) - } + content = appendPair(content, "id", nodeForString(*schema.ID)) } if schema.Schema != nil { content = appendPair(content, "$schema", nodeForString(*schema.Schema)) } - if schema.ReadOnly != nil && *schema.ReadOnly { - content = appendPair(content, "readOnly", nodeForBoolean(*schema.ReadOnly)) - } - if schema.WriteOnly != nil && *schema.WriteOnly { - content = appendPair(content, "writeOnly", nodeForBoolean(*schema.WriteOnly)) - } if schema.Type != nil { content = appendPair(content, "type", schema.Type.nodeValue()) } diff --git a/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go new file mode 100644 index 000000000..d71fe6d54 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go @@ -0,0 +1,8820 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package openapi_v2 + +import ( + "fmt" + "regexp" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "openapi_v2" +} + +// NewAdditionalPropertiesItem creates an object of type AdditionalPropertiesItem if possible, returning an error if not. +func NewAdditionalPropertiesItem(in *yaml.Node, context *compiler.Context) (*AdditionalPropertiesItem, error) { + errors := make([]error, 0) + x := &AdditionalPropertiesItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", m, context)) + if matchingError == nil { + x.Oneof = &AdditionalPropertiesItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // bool boolean = 2; + boolValue, ok := compiler.BoolForScalarNode(in) + if ok { + x.Oneof = &AdditionalPropertiesItem_Boolean{Boolean: boolValue} + matched = true + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid AdditionalPropertiesItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in *yaml.Node, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes := compiler.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewApiKeySecurity creates an object of type ApiKeySecurity if possible, returning an error if not. +func NewApiKeySecurity(in *yaml.Node, context *compiler.Context) (*ApiKeySecurity, error) { + errors := make([]error, 0) + x := &ApiKeySecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [apiKey] + if ok && !compiler.StringArrayContainsValue([]string{"apiKey"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header query] + if ok && !compiler.StringArrayContainsValue([]string{"header", "query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBasicAuthenticationSecurity creates an object of type BasicAuthenticationSecurity if possible, returning an error if not. +func NewBasicAuthenticationSecurity(in *yaml.Node, context *compiler.Context) (*BasicAuthenticationSecurity, error) { + errors := make([]error, 0) + x := &BasicAuthenticationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [basic] + if ok && !compiler.StringArrayContainsValue([]string{"basic"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBodyParameter creates an object of type BodyParameter if possible, returning an error if not. +func NewBodyParameter(in *yaml.Node, context *compiler.Context) (*BodyParameter, error) { + errors := make([]error, 0) + x := &BodyParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "schema"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "required", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [body] + if ok && !compiler.StringArrayContainsValue([]string{"body"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 4; + v4 := compiler.MapValueForKey(m, "required") + if v4 != nil { + x.Required, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema schema = 5; + v5 := compiler.MapValueForKey(m, "schema") + if v5 != nil { + var err error + x.Schema, err = NewSchema(v5, compiler.NewContext("schema", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewContact creates an object of type Contact if possible, returning an error if not. +func NewContact(in *yaml.Node, context *compiler.Context) (*Contact, error) { + errors := make([]error, 0) + x := &Contact{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"email", "name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string email = 3; + v3 := compiler.MapValueForKey(m, "email") + if v3 != nil { + x.Email, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for email: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefault creates an object of type Default if possible, returning an error if not. +func NewDefault(in *yaml.Node, context *compiler.Context) (*Default, error) { + errors := make([]error, 0) + x := &Default{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefinitions creates an object of type Definitions if possible, returning an error if not. +func NewDefinitions(in *yaml.Node, context *compiler.Context) (*Definitions, error) { + errors := make([]error, 0) + x := &Definitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in *yaml.Node, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"info", "paths", "swagger"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"basePath", "consumes", "definitions", "externalDocs", "host", "info", "parameters", "paths", "produces", "responses", "schemes", "security", "securityDefinitions", "swagger", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string swagger = 1; + v1 := compiler.MapValueForKey(m, "swagger") + if v1 != nil { + x.Swagger, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for swagger: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [2.0] + if ok && !compiler.StringArrayContainsValue([]string{"2.0"}, x.Swagger) { + message := fmt.Sprintf("has unexpected value for swagger: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Info info = 2; + v2 := compiler.MapValueForKey(m, "info") + if v2 != nil { + var err error + x.Info, err = NewInfo(v2, compiler.NewContext("info", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // string host = 3; + v3 := compiler.MapValueForKey(m, "host") + if v3 != nil { + x.Host, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for host: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string base_path = 4; + v4 := compiler.MapValueForKey(m, "basePath") + if v4 != nil { + x.BasePath, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for basePath: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string schemes = 5; + v5 := compiler.MapValueForKey(m, "schemes") + if v5 != nil { + v, ok := compiler.SequenceNodeForNode(v5) + if ok { + x.Schemes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 6; + v6 := compiler.MapValueForKey(m, "consumes") + if v6 != nil { + v, ok := compiler.SequenceNodeForNode(v6) + if ok { + x.Consumes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 7; + v7 := compiler.MapValueForKey(m, "produces") + if v7 != nil { + v, ok := compiler.SequenceNodeForNode(v7) + if ok { + x.Produces = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Paths paths = 8; + v8 := compiler.MapValueForKey(m, "paths") + if v8 != nil { + var err error + x.Paths, err = NewPaths(v8, compiler.NewContext("paths", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Definitions definitions = 9; + v9 := compiler.MapValueForKey(m, "definitions") + if v9 != nil { + var err error + x.Definitions, err = NewDefinitions(v9, compiler.NewContext("definitions", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // ParameterDefinitions parameters = 10; + v10 := compiler.MapValueForKey(m, "parameters") + if v10 != nil { + var err error + x.Parameters, err = NewParameterDefinitions(v10, compiler.NewContext("parameters", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // ResponseDefinitions responses = 11; + v11 := compiler.MapValueForKey(m, "responses") + if v11 != nil { + var err error + x.Responses, err = NewResponseDefinitions(v11, compiler.NewContext("responses", v11, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // SecurityDefinitions security_definitions = 13; + v13 := compiler.MapValueForKey(m, "securityDefinitions") + if v13 != nil { + var err error + x.SecurityDefinitions, err = NewSecurityDefinitions(v13, compiler.NewContext("securityDefinitions", v13, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Tag tags = 14; + v14 := compiler.MapValueForKey(m, "tags") + if v14 != nil { + // repeated Tag + x.Tags = make([]*Tag, 0) + a, ok := compiler.SequenceNodeForNode(v14) + if ok { + for _, item := range a.Content { + y, err := NewTag(item, compiler.NewContext("tags", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Tags = append(x.Tags, y) + } + } + } + // ExternalDocs external_docs = 15; + v15 := compiler.MapValueForKey(m, "externalDocs") + if v15 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v15, compiler.NewContext("externalDocs", v15, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 16; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamples creates an object of type Examples if possible, returning an error if not. +func NewExamples(in *yaml.Node, context *compiler.Context) (*Examples, error) { + errors := make([]error, 0) + x := &Examples{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExternalDocs creates an object of type ExternalDocs if possible, returning an error if not. +func NewExternalDocs(in *yaml.Node, context *compiler.Context) (*ExternalDocs, error) { + errors := make([]error, 0) + x := &ExternalDocs{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFileSchema creates an object of type FileSchema if possible, returning an error if not. +func NewFileSchema(in *yaml.Node, context *compiler.Context) (*FileSchema, error) { + errors := make([]error, 0) + x := &FileSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"default", "description", "example", "externalDocs", "format", "readOnly", "required", "title", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string format = 1; + v1 := compiler.MapValueForKey(m, "format") + if v1 != nil { + x.Format, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 2; + v2 := compiler.MapValueForKey(m, "title") + if v2 != nil { + x.Title, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 4; + v4 := compiler.MapValueForKey(m, "default") + if v4 != nil { + var err error + x.Default, err = NewAny(v4, compiler.NewContext("default", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string required = 5; + v5 := compiler.MapValueForKey(m, "required") + if v5 != nil { + v, ok := compiler.SequenceNodeForNode(v5) + if ok { + x.Required = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [file] + if ok && !compiler.StringArrayContainsValue([]string{"file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 7; + v7 := compiler.MapValueForKey(m, "readOnly") + if v7 != nil { + x.ReadOnly, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 8; + v8 := compiler.MapValueForKey(m, "externalDocs") + if v8 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v8, compiler.NewContext("externalDocs", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 9; + v9 := compiler.MapValueForKey(m, "example") + if v9 != nil { + var err error + x.Example, err = NewAny(v9, compiler.NewContext("example", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFormDataParameterSubSchema creates an object of type FormDataParameterSubSchema if possible, returning an error if not. +func NewFormDataParameterSubSchema(in *yaml.Node, context *compiler.Context) (*FormDataParameterSubSchema, error) { + errors := make([]error, 0) + x := &FormDataParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [formData] + if ok && !compiler.StringArrayContainsValue([]string{"formData"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array file] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array", "file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + v, ok := compiler.FloatForScalarNode(v11) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + v, ok := compiler.FloatForScalarNode(v13) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v14) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := compiler.IntForScalarNode(v16) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v17) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := compiler.IntForScalarNode(v19) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v20) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v20)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v21) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + v, ok := compiler.FloatForScalarNode(v22) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v22)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeader creates an object of type Header if possible, returning an error if not. +func NewHeader(in *yaml.Node, context *compiler.Context) (*Header, error) { + errors := make([]error, 0) + x := &Header{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + v, ok := compiler.FloatForScalarNode(v8) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := compiler.IntForScalarNode(v10) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := compiler.IntForScalarNode(v13) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v15) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v16) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + v, ok := compiler.FloatForScalarNode(v17) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 18; + v18 := compiler.MapValueForKey(m, "description") + if v18 != nil { + x.Description, ok = compiler.StringForScalarNode(v18) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 19; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaderParameterSubSchema creates an object of type HeaderParameterSubSchema if possible, returning an error if not. +func NewHeaderParameterSubSchema(in *yaml.Node, context *compiler.Context) (*HeaderParameterSubSchema, error) { + errors := make([]error, 0) + x := &HeaderParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header] + if ok && !compiler.StringArrayContainsValue([]string{"header"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + v, ok := compiler.FloatForScalarNode(v10) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + v, ok := compiler.FloatForScalarNode(v12) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v19) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + v, ok := compiler.FloatForScalarNode(v21) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v21)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaders creates an object of type Headers if possible, returning an error if not. +func NewHeaders(in *yaml.Node, context *compiler.Context) (*Headers, error) { + errors := make([]error, 0) + x := &Headers{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedHeader additional_properties = 1; + // MAP: Header + x.AdditionalProperties = make([]*NamedHeader, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedHeader{} + pair.Name = k + var err error + pair.Value, err = NewHeader(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewInfo creates an object of type Info if possible, returning an error if not. +func NewInfo(in *yaml.Node, context *compiler.Context) (*Info, error) { + errors := make([]error, 0) + x := &Info{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"title", "version"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"contact", "description", "license", "termsOfService", "title", "version"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string title = 1; + v1 := compiler.MapValueForKey(m, "title") + if v1 != nil { + x.Title, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string version = 2; + v2 := compiler.MapValueForKey(m, "version") + if v2 != nil { + x.Version, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string terms_of_service = 4; + v4 := compiler.MapValueForKey(m, "termsOfService") + if v4 != nil { + x.TermsOfService, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for termsOfService: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Contact contact = 5; + v5 := compiler.MapValueForKey(m, "contact") + if v5 != nil { + var err error + x.Contact, err = NewContact(v5, compiler.NewContext("contact", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // License license = 6; + v6 := compiler.MapValueForKey(m, "license") + if v6 != nil { + var err error + x.License, err = NewLicense(v6, compiler.NewContext("license", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewItemsItem creates an object of type ItemsItem if possible, returning an error if not. +func NewItemsItem(in *yaml.Node, context *compiler.Context) (*ItemsItem, error) { + errors := make([]error, 0) + x := &ItemsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value for item array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Schema = make([]*Schema, 0) + y, err := NewSchema(m, compiler.NewContext("", m, context)) + if err != nil { + return nil, err + } + x.Schema = append(x.Schema, y) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewJsonReference creates an object of type JsonReference if possible, returning an error if not. +func NewJsonReference(in *yaml.Node, context *compiler.Context) (*JsonReference, error) { + errors := make([]error, 0) + x := &JsonReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"$ref"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLicense creates an object of type License if possible, returning an error if not. +func NewLicense(in *yaml.Node, context *compiler.Context) (*License, error) { + errors := make([]error, 0) + x := &License{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAny creates an object of type NamedAny if possible, returning an error if not. +func NewNamedAny(in *yaml.Node, context *compiler.Context) (*NamedAny, error) { + errors := make([]error, 0) + x := &NamedAny{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAny(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedHeader creates an object of type NamedHeader if possible, returning an error if not. +func NewNamedHeader(in *yaml.Node, context *compiler.Context) (*NamedHeader, error) { + errors := make([]error, 0) + x := &NamedHeader{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Header value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewHeader(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameter creates an object of type NamedParameter if possible, returning an error if not. +func NewNamedParameter(in *yaml.Node, context *compiler.Context) (*NamedParameter, error) { + errors := make([]error, 0) + x := &NamedParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameter value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameter(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedPathItem creates an object of type NamedPathItem if possible, returning an error if not. +func NewNamedPathItem(in *yaml.Node, context *compiler.Context) (*NamedPathItem, error) { + errors := make([]error, 0) + x := &NamedPathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PathItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewPathItem(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponse creates an object of type NamedResponse if possible, returning an error if not. +func NewNamedResponse(in *yaml.Node, context *compiler.Context) (*NamedResponse, error) { + errors := make([]error, 0) + x := &NamedResponse{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Response value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponse(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponseValue creates an object of type NamedResponseValue if possible, returning an error if not. +func NewNamedResponseValue(in *yaml.Node, context *compiler.Context) (*NamedResponseValue, error) { + errors := make([]error, 0) + x := &NamedResponseValue{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ResponseValue value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponseValue(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchema creates an object of type NamedSchema if possible, returning an error if not. +func NewNamedSchema(in *yaml.Node, context *compiler.Context) (*NamedSchema, error) { + errors := make([]error, 0) + x := &NamedSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchema(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSecurityDefinitionsItem creates an object of type NamedSecurityDefinitionsItem if possible, returning an error if not. +func NewNamedSecurityDefinitionsItem(in *yaml.Node, context *compiler.Context) (*NamedSecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &NamedSecurityDefinitionsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SecurityDefinitionsItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSecurityDefinitionsItem(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedString creates an object of type NamedString if possible, returning an error if not. +func NewNamedString(in *yaml.Node, context *compiler.Context) (*NamedString, error) { + errors := make([]error, 0) + x := &NamedString{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + x.Value, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for value: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedStringArray creates an object of type NamedStringArray if possible, returning an error if not. +func NewNamedStringArray(in *yaml.Node, context *compiler.Context) (*NamedStringArray, error) { + errors := make([]error, 0) + x := &NamedStringArray{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // StringArray value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewStringArray(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNonBodyParameter creates an object of type NonBodyParameter if possible, returning an error if not. +func NewNonBodyParameter(in *yaml.Node, context *compiler.Context) (*NonBodyParameter, error) { + errors := make([]error, 0) + x := &NonBodyParameter{} + matched := false + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // HeaderParameterSubSchema header_parameter_sub_schema = 1; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewHeaderParameterSubSchema(m, compiler.NewContext("headerParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_HeaderParameterSubSchema{HeaderParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFormDataParameterSubSchema(m, compiler.NewContext("formDataParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_FormDataParameterSubSchema{FormDataParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // QueryParameterSubSchema query_parameter_sub_schema = 3; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewQueryParameterSubSchema(m, compiler.NewContext("queryParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_QueryParameterSubSchema{QueryParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // PathParameterSubSchema path_parameter_sub_schema = 4; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewPathParameterSubSchema(m, compiler.NewContext("pathParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_PathParameterSubSchema{PathParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid NonBodyParameter") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2AccessCodeSecurity creates an object of type Oauth2AccessCodeSecurity if possible, returning an error if not. +func NewOauth2AccessCodeSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2AccessCodeSecurity, error) { + errors := make([]error, 0) + x := &Oauth2AccessCodeSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [accessCode] + if ok && !compiler.StringArrayContainsValue([]string{"accessCode"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string token_url = 5; + v5 := compiler.MapValueForKey(m, "tokenUrl") + if v5 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 6; + v6 := compiler.MapValueForKey(m, "description") + if v6 != nil { + x.Description, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ApplicationSecurity creates an object of type Oauth2ApplicationSecurity if possible, returning an error if not. +func NewOauth2ApplicationSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2ApplicationSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ApplicationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [application] + if ok && !compiler.StringArrayContainsValue([]string{"application"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ImplicitSecurity creates an object of type Oauth2ImplicitSecurity if possible, returning an error if not. +func NewOauth2ImplicitSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2ImplicitSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ImplicitSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [implicit] + if ok && !compiler.StringArrayContainsValue([]string{"implicit"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2PasswordSecurity creates an object of type Oauth2PasswordSecurity if possible, returning an error if not. +func NewOauth2PasswordSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2PasswordSecurity, error) { + errors := make([]error, 0) + x := &Oauth2PasswordSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [password] + if ok && !compiler.StringArrayContainsValue([]string{"password"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2Scopes creates an object of type Oauth2Scopes if possible, returning an error if not. +func NewOauth2Scopes(in *yaml.Node, context *compiler.Context) (*Oauth2Scopes, error) { + errors := make([]error, 0) + x := &Oauth2Scopes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedString additional_properties = 1; + // MAP: string + x.AdditionalProperties = make([]*NamedString, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedString{} + pair.Name = k + pair.Value, _ = compiler.StringForScalarNode(v) + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOperation creates an object of type Operation if possible, returning an error if not. +func NewOperation(in *yaml.Node, context *compiler.Context) (*Operation, error) { + errors := make([]error, 0) + x := &Operation{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"responses"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"consumes", "deprecated", "description", "externalDocs", "operationId", "parameters", "produces", "responses", "schemes", "security", "summary", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string tags = 1; + v1 := compiler.MapValueForKey(m, "tags") + if v1 != nil { + v, ok := compiler.SequenceNodeForNode(v1) + if ok { + x.Tags = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for tags: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 4; + v4 := compiler.MapValueForKey(m, "externalDocs") + if v4 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v4, compiler.NewContext("externalDocs", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // string operation_id = 5; + v5 := compiler.MapValueForKey(m, "operationId") + if v5 != nil { + x.OperationId, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 6; + v6 := compiler.MapValueForKey(m, "produces") + if v6 != nil { + v, ok := compiler.SequenceNodeForNode(v6) + if ok { + x.Produces = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 7; + v7 := compiler.MapValueForKey(m, "consumes") + if v7 != nil { + v, ok := compiler.SequenceNodeForNode(v7) + if ok { + x.Consumes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated ParametersItem parameters = 8; + v8 := compiler.MapValueForKey(m, "parameters") + if v8 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := compiler.SequenceNodeForNode(v8) + if ok { + for _, item := range a.Content { + y, err := NewParametersItem(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // Responses responses = 9; + v9 := compiler.MapValueForKey(m, "responses") + if v9 != nil { + var err error + x.Responses, err = NewResponses(v9, compiler.NewContext("responses", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string schemes = 10; + v10 := compiler.MapValueForKey(m, "schemes") + if v10 != nil { + v, ok := compiler.SequenceNodeForNode(v10) + if ok { + x.Schemes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 11; + v11 := compiler.MapValueForKey(m, "deprecated") + if v11 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated NamedAny vendor_extension = 13; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in *yaml.Node, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + matched := false + // BodyParameter body_parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBodyParameter(m, compiler.NewContext("bodyParameter", m, context)) + if matchingError == nil { + x.Oneof = &Parameter_BodyParameter{BodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // NonBodyParameter non_body_parameter = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewNonBodyParameter(m, compiler.NewContext("nonBodyParameter", m, context)) + if matchingError == nil { + x.Oneof = &Parameter_NonBodyParameter{NonBodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid Parameter") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameterDefinitions creates an object of type ParameterDefinitions if possible, returning an error if not. +func NewParameterDefinitions(in *yaml.Node, context *compiler.Context) (*ParameterDefinitions, error) { + errors := make([]error, 0) + x := &ParameterDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameter additional_properties = 1; + // MAP: Parameter + x.AdditionalProperties = make([]*NamedParameter, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedParameter{} + pair.Name = k + var err error + pair.Value, err = NewParameter(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParametersItem creates an object of type ParametersItem if possible, returning an error if not. +func NewParametersItem(in *yaml.Node, context *compiler.Context) (*ParametersItem, error) { + errors := make([]error, 0) + x := &ParametersItem{} + matched := false + // Parameter parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewParameter(m, compiler.NewContext("parameter", m, context)) + if matchingError == nil { + x.Oneof = &ParametersItem_Parameter{Parameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", m, context)) + if matchingError == nil { + x.Oneof = &ParametersItem_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ParametersItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathItem creates an object of type PathItem if possible, returning an error if not. +func NewPathItem(in *yaml.Node, context *compiler.Context) (*PathItem, error) { + errors := make([]error, 0) + x := &PathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "delete", "get", "head", "options", "parameters", "patch", "post", "put"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Operation get = 2; + v2 := compiler.MapValueForKey(m, "get") + if v2 != nil { + var err error + x.Get, err = NewOperation(v2, compiler.NewContext("get", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation put = 3; + v3 := compiler.MapValueForKey(m, "put") + if v3 != nil { + var err error + x.Put, err = NewOperation(v3, compiler.NewContext("put", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation post = 4; + v4 := compiler.MapValueForKey(m, "post") + if v4 != nil { + var err error + x.Post, err = NewOperation(v4, compiler.NewContext("post", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation delete = 5; + v5 := compiler.MapValueForKey(m, "delete") + if v5 != nil { + var err error + x.Delete, err = NewOperation(v5, compiler.NewContext("delete", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation options = 6; + v6 := compiler.MapValueForKey(m, "options") + if v6 != nil { + var err error + x.Options, err = NewOperation(v6, compiler.NewContext("options", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation head = 7; + v7 := compiler.MapValueForKey(m, "head") + if v7 != nil { + var err error + x.Head, err = NewOperation(v7, compiler.NewContext("head", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation patch = 8; + v8 := compiler.MapValueForKey(m, "patch") + if v8 != nil { + var err error + x.Patch, err = NewOperation(v8, compiler.NewContext("patch", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated ParametersItem parameters = 9; + v9 := compiler.MapValueForKey(m, "parameters") + if v9 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := compiler.SequenceNodeForNode(v9) + if ok { + for _, item := range a.Content { + y, err := NewParametersItem(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathParameterSubSchema creates an object of type PathParameterSubSchema if possible, returning an error if not. +func NewPathParameterSubSchema(in *yaml.Node, context *compiler.Context) (*PathParameterSubSchema, error) { + errors := make([]error, 0) + x := &PathParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"required"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [path] + if ok && !compiler.StringArrayContainsValue([]string{"path"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + v, ok := compiler.FloatForScalarNode(v10) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + v, ok := compiler.FloatForScalarNode(v12) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v19) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + v, ok := compiler.FloatForScalarNode(v21) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v21)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPaths creates an object of type Paths if possible, returning an error if not. +func NewPaths(in *yaml.Node, context *compiler.Context) (*Paths, error) { + errors := make([]error, 0) + x := &Paths{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern0, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedAny vendor_extension = 1; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + // repeated NamedPathItem path = 2; + // MAP: PathItem ^/ + x.Path = make([]*NamedPathItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "/") { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPrimitivesItems creates an object of type PrimitivesItems if possible, returning an error if not. +func NewPrimitivesItems(in *yaml.Node, context *compiler.Context) (*PrimitivesItems, error) { + errors := make([]error, 0) + x := &PrimitivesItems{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + v, ok := compiler.FloatForScalarNode(v8) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := compiler.IntForScalarNode(v10) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := compiler.IntForScalarNode(v13) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v15) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v16) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + v, ok := compiler.FloatForScalarNode(v17) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 18; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProperties creates an object of type Properties if possible, returning an error if not. +func NewProperties(in *yaml.Node, context *compiler.Context) (*Properties, error) { + errors := make([]error, 0) + x := &Properties{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewQueryParameterSubSchema creates an object of type QueryParameterSubSchema if possible, returning an error if not. +func NewQueryParameterSubSchema(in *yaml.Node, context *compiler.Context) (*QueryParameterSubSchema, error) { + errors := make([]error, 0) + x := &QueryParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [query] + if ok && !compiler.StringArrayContainsValue([]string{"query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + v, ok := compiler.FloatForScalarNode(v11) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + v, ok := compiler.FloatForScalarNode(v13) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v14) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := compiler.IntForScalarNode(v16) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v17) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := compiler.IntForScalarNode(v19) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v20) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v20)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v21) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + v, ok := compiler.FloatForScalarNode(v22) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v22)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in *yaml.Node, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"description"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "examples", "headers", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaItem schema = 2; + v2 := compiler.MapValueForKey(m, "schema") + if v2 != nil { + var err error + x.Schema, err = NewSchemaItem(v2, compiler.NewContext("schema", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // Headers headers = 3; + v3 := compiler.MapValueForKey(m, "headers") + if v3 != nil { + var err error + x.Headers, err = NewHeaders(v3, compiler.NewContext("headers", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // Examples examples = 4; + v4 := compiler.MapValueForKey(m, "examples") + if v4 != nil { + var err error + x.Examples, err = NewExamples(v4, compiler.NewContext("examples", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseDefinitions creates an object of type ResponseDefinitions if possible, returning an error if not. +func NewResponseDefinitions(in *yaml.Node, context *compiler.Context) (*ResponseDefinitions, error) { + errors := make([]error, 0) + x := &ResponseDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResponse additional_properties = 1; + // MAP: Response + x.AdditionalProperties = make([]*NamedResponse, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedResponse{} + pair.Name = k + var err error + pair.Value, err = NewResponse(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseValue creates an object of type ResponseValue if possible, returning an error if not. +func NewResponseValue(in *yaml.Node, context *compiler.Context) (*ResponseValue, error) { + errors := make([]error, 0) + x := &ResponseValue{} + matched := false + // Response response = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewResponse(m, compiler.NewContext("response", m, context)) + if matchingError == nil { + x.Oneof = &ResponseValue_Response{Response: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", m, context)) + if matchingError == nil { + x.Oneof = &ResponseValue_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ResponseValue") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponses creates an object of type Responses if possible, returning an error if not. +func NewResponses(in *yaml.Node, context *compiler.Context) (*Responses, error) { + errors := make([]error, 0) + x := &Responses{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern2, pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedResponseValue response_code = 1; + // MAP: ResponseValue ^([0-9]{3})$|^(default)$ + x.ResponseCode = make([]*NamedResponseValue, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if pattern2.MatchString(k) { + pair := &NamedResponseValue{} + pair.Name = k + var err error + pair.Value, err = NewResponseValue(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.ResponseCode = append(x.ResponseCode, pair) + } + } + } + // repeated NamedAny vendor_extension = 2; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in *yaml.Node, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "additionalProperties", "allOf", "default", "description", "discriminator", "enum", "example", "exclusiveMaximum", "exclusiveMinimum", "externalDocs", "format", "items", "maxItems", "maxLength", "maxProperties", "maximum", "minItems", "minLength", "minProperties", "minimum", "multipleOf", "pattern", "properties", "readOnly", "required", "title", "type", "uniqueItems", "xml"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 3; + v3 := compiler.MapValueForKey(m, "title") + if v3 != nil { + x.Title, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float multiple_of = 6; + v6 := compiler.MapValueForKey(m, "multipleOf") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float maximum = 7; + v7 := compiler.MapValueForKey(m, "maximum") + if v7 != nil { + v, ok := compiler.FloatForScalarNode(v7) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 8; + v8 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v8 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 9; + v9 := compiler.MapValueForKey(m, "minimum") + if v9 != nil { + v, ok := compiler.FloatForScalarNode(v9) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 10; + v10 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v10 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v10) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 11; + v11 := compiler.MapValueForKey(m, "maxLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 12; + v12 := compiler.MapValueForKey(m, "minLength") + if v12 != nil { + t, ok := compiler.IntForScalarNode(v12) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 13; + v13 := compiler.MapValueForKey(m, "pattern") + if v13 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 14; + v14 := compiler.MapValueForKey(m, "maxItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 15; + v15 := compiler.MapValueForKey(m, "minItems") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 16; + v16 := compiler.MapValueForKey(m, "uniqueItems") + if v16 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_properties = 17; + v17 := compiler.MapValueForKey(m, "maxProperties") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxProperties: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_properties = 18; + v18 := compiler.MapValueForKey(m, "minProperties") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minProperties: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string required = 19; + v19 := compiler.MapValueForKey(m, "required") + if v19 != nil { + v, ok := compiler.SequenceNodeForNode(v19) + if ok { + x.Required = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // AdditionalPropertiesItem additional_properties = 21; + v21 := compiler.MapValueForKey(m, "additionalProperties") + if v21 != nil { + var err error + x.AdditionalProperties, err = NewAdditionalPropertiesItem(v21, compiler.NewContext("additionalProperties", v21, context)) + if err != nil { + errors = append(errors, err) + } + } + // TypeItem type = 22; + v22 := compiler.MapValueForKey(m, "type") + if v22 != nil { + var err error + x.Type, err = NewTypeItem(v22, compiler.NewContext("type", v22, context)) + if err != nil { + errors = append(errors, err) + } + } + // ItemsItem items = 23; + v23 := compiler.MapValueForKey(m, "items") + if v23 != nil { + var err error + x.Items, err = NewItemsItem(v23, compiler.NewContext("items", v23, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Schema all_of = 24; + v24 := compiler.MapValueForKey(m, "allOf") + if v24 != nil { + // repeated Schema + x.AllOf = make([]*Schema, 0) + a, ok := compiler.SequenceNodeForNode(v24) + if ok { + for _, item := range a.Content { + y, err := NewSchema(item, compiler.NewContext("allOf", item, context)) + if err != nil { + errors = append(errors, err) + } + x.AllOf = append(x.AllOf, y) + } + } + } + // Properties properties = 25; + v25 := compiler.MapValueForKey(m, "properties") + if v25 != nil { + var err error + x.Properties, err = NewProperties(v25, compiler.NewContext("properties", v25, context)) + if err != nil { + errors = append(errors, err) + } + } + // string discriminator = 26; + v26 := compiler.MapValueForKey(m, "discriminator") + if v26 != nil { + x.Discriminator, ok = compiler.StringForScalarNode(v26) + if !ok { + message := fmt.Sprintf("has unexpected value for discriminator: %s", compiler.Display(v26)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 27; + v27 := compiler.MapValueForKey(m, "readOnly") + if v27 != nil { + x.ReadOnly, ok = compiler.BoolForScalarNode(v27) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %s", compiler.Display(v27)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Xml xml = 28; + v28 := compiler.MapValueForKey(m, "xml") + if v28 != nil { + var err error + x.Xml, err = NewXml(v28, compiler.NewContext("xml", v28, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExternalDocs external_docs = 29; + v29 := compiler.MapValueForKey(m, "externalDocs") + if v29 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v29, compiler.NewContext("externalDocs", v29, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 30; + v30 := compiler.MapValueForKey(m, "example") + if v30 != nil { + var err error + x.Example, err = NewAny(v30, compiler.NewContext("example", v30, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 31; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemaItem creates an object of type SchemaItem if possible, returning an error if not. +func NewSchemaItem(in *yaml.Node, context *compiler.Context) (*SchemaItem, error) { + errors := make([]error, 0) + x := &SchemaItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", m, context)) + if matchingError == nil { + x.Oneof = &SchemaItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // FileSchema file_schema = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFileSchema(m, compiler.NewContext("fileSchema", m, context)) + if matchingError == nil { + x.Oneof = &SchemaItem_FileSchema{FileSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SchemaItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitions creates an object of type SecurityDefinitions if possible, returning an error if not. +func NewSecurityDefinitions(in *yaml.Node, context *compiler.Context) (*SecurityDefinitions, error) { + errors := make([]error, 0) + x := &SecurityDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSecurityDefinitionsItem additional_properties = 1; + // MAP: SecurityDefinitionsItem + x.AdditionalProperties = make([]*NamedSecurityDefinitionsItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSecurityDefinitionsItem{} + pair.Name = k + var err error + pair.Value, err = NewSecurityDefinitionsItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitionsItem creates an object of type SecurityDefinitionsItem if possible, returning an error if not. +func NewSecurityDefinitionsItem(in *yaml.Node, context *compiler.Context) (*SecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &SecurityDefinitionsItem{} + matched := false + // BasicAuthenticationSecurity basic_authentication_security = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBasicAuthenticationSecurity(m, compiler.NewContext("basicAuthenticationSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_BasicAuthenticationSecurity{BasicAuthenticationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // ApiKeySecurity api_key_security = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewApiKeySecurity(m, compiler.NewContext("apiKeySecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_ApiKeySecurity{ApiKeySecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ImplicitSecurity oauth2_implicit_security = 3; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ImplicitSecurity(m, compiler.NewContext("oauth2ImplicitSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ImplicitSecurity{Oauth2ImplicitSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2PasswordSecurity oauth2_password_security = 4; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2PasswordSecurity(m, compiler.NewContext("oauth2PasswordSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2PasswordSecurity{Oauth2PasswordSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ApplicationSecurity oauth2_application_security = 5; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ApplicationSecurity(m, compiler.NewContext("oauth2ApplicationSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ApplicationSecurity{Oauth2ApplicationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2AccessCodeSecurity(m, compiler.NewContext("oauth2AccessCodeSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2AccessCodeSecurity{Oauth2AccessCodeSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SecurityDefinitionsItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityRequirement creates an object of type SecurityRequirement if possible, returning an error if not. +func NewSecurityRequirement(in *yaml.Node, context *compiler.Context) (*SecurityRequirement, error) { + errors := make([]error, 0) + x := &SecurityRequirement{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedStringArray additional_properties = 1; + // MAP: StringArray + x.AdditionalProperties = make([]*NamedStringArray, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedStringArray{} + pair.Name = k + var err error + pair.Value, err = NewStringArray(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in *yaml.Node, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + x.Value = make([]string, 0) + for _, node := range in.Content { + s, _ := compiler.StringForScalarNode(node) + x.Value = append(x.Value, s) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTag creates an object of type Tag if possible, returning an error if not. +func NewTag(in *yaml.Node, context *compiler.Context) (*Tag, error) { + errors := make([]error, 0) + x := &Tag{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "externalDocs", "name"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 3; + v3 := compiler.MapValueForKey(m, "externalDocs") + if v3 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v3, compiler.NewContext("externalDocs", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTypeItem creates an object of type TypeItem if possible, returning an error if not. +func NewTypeItem(in *yaml.Node, context *compiler.Context) (*TypeItem, error) { + errors := make([]error, 0) + x := &TypeItem{} + v1 := in + switch v1.Kind { + case yaml.ScalarNode: + x.Value = make([]string, 0) + x.Value = append(x.Value, v1.Value) + case yaml.SequenceNode: + x.Value = make([]string, 0) + for _, v := range v1.Content { + value := v.Value + ok := v.Kind == yaml.ScalarNode + if ok { + x.Value = append(x.Value, value) + } else { + message := fmt.Sprintf("has unexpected value for string array element: %+v (%T)", value, value) + errors = append(errors, compiler.NewError(context, message)) + } + } + default: + message := fmt.Sprintf("has unexpected value for string array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewVendorExtension creates an object of type VendorExtension if possible, returning an error if not. +func NewVendorExtension(in *yaml.Node, context *compiler.Context) (*VendorExtension, error) { + errors := make([]error, 0) + x := &VendorExtension{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewXml creates an object of type Xml if possible, returning an error if not. +func NewXml(in *yaml.Node, context *compiler.Context) (*Xml, error) { + errors := make([]error, 0) + x := &Xml{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"attribute", "name", "namespace", "prefix", "wrapped"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string namespace = 2; + v2 := compiler.MapValueForKey(m, "namespace") + if v2 != nil { + x.Namespace, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for namespace: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string prefix = 3; + v3 := compiler.MapValueForKey(m, "prefix") + if v3 != nil { + x.Prefix, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for prefix: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool attribute = 4; + v4 := compiler.MapValueForKey(m, "attribute") + if v4 != nil { + x.Attribute, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for attribute: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool wrapped = 5; + v5 := compiler.MapValueForKey(m, "wrapped") + if v5 != nil { + x.Wrapped, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for wrapped: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. +func (m *AdditionalPropertiesItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AdditionalPropertiesItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ApiKeySecurity objects. +func (m *ApiKeySecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BasicAuthenticationSecurity objects. +func (m *BasicAuthenticationSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BodyParameter objects. +func (m *BodyParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Contact objects. +func (m *Contact) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Default objects. +func (m *Default) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Definitions objects. +func (m *Definitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Info != nil { + _, err := m.Info.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Paths != nil { + _, err := m.Paths.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Definitions != nil { + _, err := m.Definitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.SecurityDefinitions != nil { + _, err := m.SecurityDefinitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Tags { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Examples objects. +func (m *Examples) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExternalDocs objects. +func (m *ExternalDocs) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FileSchema objects. +func (m *FileSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FormDataParameterSubSchema objects. +func (m *FormDataParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Header objects. +func (m *Header) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeaderParameterSubSchema objects. +func (m *HeaderParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Headers objects. +func (m *Headers) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Info objects. +func (m *Info) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Contact != nil { + _, err := m.Contact.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.License != nil { + _, err := m.License.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ItemsItem objects. +func (m *ItemsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.Schema { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside JsonReference objects. +func (m *JsonReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewJsonReference(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside License objects. +func (m *License) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAny objects. +func (m *NamedAny) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedHeader objects. +func (m *NamedHeader) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameter objects. +func (m *NamedParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedPathItem objects. +func (m *NamedPathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponse objects. +func (m *NamedResponse) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponseValue objects. +func (m *NamedResponseValue) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchema objects. +func (m *NamedSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSecurityDefinitionsItem objects. +func (m *NamedSecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedString objects. +func (m *NamedString) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedStringArray objects. +func (m *NamedStringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NonBodyParameter objects. +func (m *NonBodyParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*NonBodyParameter_HeaderParameterSubSchema) + if ok { + _, err := p.HeaderParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_FormDataParameterSubSchema) + if ok { + _, err := p.FormDataParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_QueryParameterSubSchema) + if ok { + _, err := p.QueryParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_PathParameterSubSchema) + if ok { + _, err := p.PathParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2AccessCodeSecurity objects. +func (m *Oauth2AccessCodeSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ApplicationSecurity objects. +func (m *Oauth2ApplicationSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ImplicitSecurity objects. +func (m *Oauth2ImplicitSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2PasswordSecurity objects. +func (m *Oauth2PasswordSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2Scopes objects. +func (m *Oauth2Scopes) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Operation objects. +func (m *Operation) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*Parameter_BodyParameter) + if ok { + _, err := p.BodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*Parameter_NonBodyParameter) + if ok { + _, err := p.NonBodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParameterDefinitions objects. +func (m *ParameterDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParametersItem objects. +func (m *ParametersItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ParametersItem_Parameter) + if ok { + _, err := p.Parameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ParametersItem_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewParametersItem(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathItem objects. +func (m *PathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewPathItem(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Get != nil { + _, err := m.Get.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Put != nil { + _, err := m.Put.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Post != nil { + _, err := m.Post.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Delete != nil { + _, err := m.Delete.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Options != nil { + _, err := m.Options.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Head != nil { + _, err := m.Head.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Patch != nil { + _, err := m.Patch.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathParameterSubSchema objects. +func (m *PathParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Paths objects. +func (m *Paths) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PrimitivesItems objects. +func (m *PrimitivesItems) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Properties objects. +func (m *Properties) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside QueryParameterSubSchema objects. +func (m *QueryParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseDefinitions objects. +func (m *ResponseDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseValue objects. +func (m *ResponseValue) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ResponseValue_Response) + if ok { + _, err := p.Response.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ResponseValue_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewResponseValue(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Responses objects. +func (m *Responses) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.ResponseCode { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewSchema(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Type != nil { + _, err := m.Type.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.AllOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Xml != nil { + _, err := m.Xml.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemaItem objects. +func (m *SchemaItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SchemaItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SchemaItem_FileSchema) + if ok { + _, err := p.FileSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitions objects. +func (m *SecurityDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitionsItem objects. +func (m *SecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_BasicAuthenticationSecurity) + if ok { + _, err := p.BasicAuthenticationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_ApiKeySecurity) + if ok { + _, err := p.ApiKeySecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ImplicitSecurity) + if ok { + _, err := p.Oauth2ImplicitSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2PasswordSecurity) + if ok { + _, err := p.Oauth2PasswordSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ApplicationSecurity) + if ok { + _, err := p.Oauth2ApplicationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) + if ok { + _, err := p.Oauth2AccessCodeSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityRequirement objects. +func (m *SecurityRequirement) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Tag objects. +func (m *Tag) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside TypeItem objects. +func (m *TypeItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside VendorExtension objects. +func (m *VendorExtension) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Xml objects. +func (m *Xml) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. +func (m *AdditionalPropertiesItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // AdditionalPropertiesItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return compiler.NewScalarNodeForBool(v1.Boolean) + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() *yaml.Node { + var err error + var node yaml.Node + err = yaml.Unmarshal([]byte(m.Yaml), &node) + if err == nil { + if node.Kind == yaml.DocumentNode { + return node.Content[0] + } + return &node + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ApiKeySecurity suitable for JSON or YAML export. +func (m *ApiKeySecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of BasicAuthenticationSecurity suitable for JSON or YAML export. +func (m *BasicAuthenticationSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of BodyParameter suitable for JSON or YAML export. +func (m *BodyParameter) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Contact suitable for JSON or YAML export. +func (m *Contact) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.Email != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("email")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Email)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Default suitable for JSON or YAML export. +func (m *Default) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Definitions suitable for JSON or YAML export. +func (m *Definitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("swagger")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Swagger)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("info")) + info.Content = append(info.Content, m.Info.ToRawInfo()) + if m.Host != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("host")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Host)) + } + if m.BasePath != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("basePath")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.BasePath)) + } + if len(m.Schemes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) + } + if len(m.Consumes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) + } + if len(m.Produces) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("paths")) + info.Content = append(info.Content, m.Paths.ToRawInfo()) + if m.Definitions != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("definitions")) + info.Content = append(info.Content, m.Definitions.ToRawInfo()) + } + if m.Parameters != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, m.Parameters.ToRawInfo()) + } + if m.Responses != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if m.SecurityDefinitions != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("securityDefinitions")) + info.Content = append(info.Content, m.SecurityDefinitions.ToRawInfo()) + } + if len(m.Tags) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Tags { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, items) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Examples suitable for JSON or YAML export. +func (m *Examples) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. +func (m *ExternalDocs) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of FileSchema suitable for JSON or YAML export. +func (m *FileSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Title != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if len(m.Required) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.ReadOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of FormDataParameterSubSchema suitable for JSON or YAML export. +func (m *FormDataParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Header suitable for JSON or YAML export. +func (m *Header) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of HeaderParameterSubSchema suitable for JSON or YAML export. +func (m *HeaderParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Headers suitable for JSON or YAML export. +func (m *Headers) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Info suitable for JSON or YAML export. +func (m *Info) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("version")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Version)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.TermsOfService != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("termsOfService")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TermsOfService)) + } + if m.Contact != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("contact")) + info.Content = append(info.Content, m.Contact.ToRawInfo()) + } + if m.License != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("license")) + info.Content = append(info.Content, m.License.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. +func (m *ItemsItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Schema) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Schema { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, items) + } + return info +} + +// ToRawInfo returns a description of JsonReference suitable for JSON or YAML export. +func (m *JsonReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + return info +} + +// ToRawInfo returns a description of License suitable for JSON or YAML export. +func (m *License) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. +func (m *NamedAny) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, m.Value.ToRawInfo()) + } + return info +} + +// ToRawInfo returns a description of NamedHeader suitable for JSON or YAML export. +func (m *NamedHeader) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export. +func (m *NamedParameter) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. +func (m *NamedPathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponse suitable for JSON or YAML export. +func (m *NamedResponse) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponseValue suitable for JSON or YAML export. +func (m *NamedResponseValue) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ResponseValue StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export. +func (m *NamedSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSecurityDefinitionsItem suitable for JSON or YAML export. +func (m *NamedSecurityDefinitionsItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:SecurityDefinitionsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. +func (m *NamedString) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Value)) + } + return info +} + +// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. +func (m *NamedStringArray) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NonBodyParameter suitable for JSON or YAML export. +func (m *NonBodyParameter) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // NonBodyParameter + // {Name:headerParameterSubSchema Type:HeaderParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetHeaderParameterSubSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:formDataParameterSubSchema Type:FormDataParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFormDataParameterSubSchema() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:queryParameterSubSchema Type:QueryParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetQueryParameterSubSchema() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:pathParameterSubSchema Type:PathParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetPathParameterSubSchema() + if v3 != nil { + return v3.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Oauth2AccessCodeSecurity suitable for JSON or YAML export. +func (m *Oauth2AccessCodeSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2ApplicationSecurity suitable for JSON or YAML export. +func (m *Oauth2ApplicationSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2ImplicitSecurity suitable for JSON or YAML export. +func (m *Oauth2ImplicitSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2PasswordSecurity suitable for JSON or YAML export. +func (m *Oauth2PasswordSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2Scopes suitable for JSON or YAML export. +func (m *Oauth2Scopes) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Operation suitable for JSON or YAML export. +func (m *Operation) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Tags) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Tags)) + } + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.OperationId != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) + } + if len(m.Produces) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) + } + if len(m.Consumes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + if len(m.Schemes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // Parameter + // {Name:bodyParameter Type:BodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBodyParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:nonBodyParameter Type:NonBodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetNonBodyParameter() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ParameterDefinitions suitable for JSON or YAML export. +func (m *ParameterDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ParametersItem suitable for JSON or YAML export. +func (m *ParametersItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ParametersItem + // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. +func (m *PathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.XRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + } + if m.Get != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("get")) + info.Content = append(info.Content, m.Get.ToRawInfo()) + } + if m.Put != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("put")) + info.Content = append(info.Content, m.Put.ToRawInfo()) + } + if m.Post != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("post")) + info.Content = append(info.Content, m.Post.ToRawInfo()) + } + if m.Delete != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("delete")) + info.Content = append(info.Content, m.Delete.ToRawInfo()) + } + if m.Options != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("options")) + info.Content = append(info.Content, m.Options.ToRawInfo()) + } + if m.Head != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("head")) + info.Content = append(info.Content, m.Head.ToRawInfo()) + } + if m.Patch != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("patch")) + info.Content = append(info.Content, m.Patch.ToRawInfo()) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of PathParameterSubSchema suitable for JSON or YAML export. +func (m *PathParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Paths suitable for JSON or YAML export. +func (m *Paths) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.Path != nil { + for _, item := range m.Path { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of PrimitivesItems suitable for JSON or YAML export. +func (m *PrimitivesItems) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Properties suitable for JSON or YAML export. +func (m *Properties) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of QueryParameterSubSchema suitable for JSON or YAML export. +func (m *QueryParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + if m.Schema != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + } + if m.Headers != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) + info.Content = append(info.Content, m.Headers.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponseDefinitions suitable for JSON or YAML export. +func (m *ResponseDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponseValue suitable for JSON or YAML export. +func (m *ResponseValue) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ResponseValue + // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetResponse() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Responses suitable for JSON or YAML export. +func (m *Responses) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.ResponseCode != nil { + for _, item := range m.ResponseCode { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.XRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Title != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if m.MaxProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxProperties)) + } + if m.MinProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinProperties)) + } + if len(m.Required) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.AdditionalProperties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("additionalProperties")) + info.Content = append(info.Content, m.AdditionalProperties.ToRawInfo()) + } + if m.Type != nil { + if len(m.Type.Value) == 1 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type.Value[0])) + } else { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Type.Value)) + } + } + if m.Items != nil { + items := compiler.NewSequenceNode() + for _, item := range m.Items.Schema { + items.Content = append(items.Content, item.ToRawInfo()) + } + if len(items.Content) == 1 { + items = items.Content[0] + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, items) + } + if len(m.AllOf) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.AllOf { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("allOf")) + info.Content = append(info.Content, items) + } + if m.Properties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("properties")) + info.Content = append(info.Content, m.Properties.ToRawInfo()) + } + if m.Discriminator != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("discriminator")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Discriminator)) + } + if m.ReadOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) + } + if m.Xml != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("xml")) + info.Content = append(info.Content, m.Xml.ToRawInfo()) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SchemaItem suitable for JSON or YAML export. +func (m *SchemaItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SchemaItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:fileSchema Type:FileSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFileSchema() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SecurityDefinitions suitable for JSON or YAML export. +func (m *SecurityDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SecurityDefinitionsItem suitable for JSON or YAML export. +func (m *SecurityDefinitionsItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SecurityDefinitionsItem + // {Name:basicAuthenticationSecurity Type:BasicAuthenticationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBasicAuthenticationSecurity() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:apiKeySecurity Type:ApiKeySecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetApiKeySecurity() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:oauth2ImplicitSecurity Type:Oauth2ImplicitSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetOauth2ImplicitSecurity() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:oauth2PasswordSecurity Type:Oauth2PasswordSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetOauth2PasswordSecurity() + if v3 != nil { + return v3.ToRawInfo() + } + // {Name:oauth2ApplicationSecurity Type:Oauth2ApplicationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v4 := m.GetOauth2ApplicationSecurity() + if v4 != nil { + return v4.ToRawInfo() + } + // {Name:oauth2AccessCodeSecurity Type:Oauth2AccessCodeSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v5 := m.GetOauth2AccessCodeSecurity() + if v5 != nil { + return v5.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. +func (m *SecurityRequirement) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() *yaml.Node { + return compiler.NewSequenceNodeForStringArray(m.Value) +} + +// ToRawInfo returns a description of Tag suitable for JSON or YAML export. +func (m *Tag) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of TypeItem suitable for JSON or YAML export. +func (m *TypeItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Value) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Value)) + } + return info +} + +// ToRawInfo returns a description of VendorExtension suitable for JSON or YAML export. +func (m *VendorExtension) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Xml suitable for JSON or YAML export. +func (m *Xml) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Namespace != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("namespace")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Namespace)) + } + if m.Prefix != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("prefix")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Prefix)) + } + if m.Attribute != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("attribute")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Attribute)) + } + if m.Wrapped != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("wrapped")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Wrapped)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +var ( + pattern0 = regexp.MustCompile("^x-") + pattern1 = regexp.MustCompile("^/") + pattern2 = regexp.MustCompile("^([0-9]{3})$|^(default)$") +) diff --git a/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pb.go b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go similarity index 99% rename from vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pb.go rename to vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go index 06b60157c..65c4c913c 100644 --- a/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pb.go +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go @@ -16,8 +16,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.26.0 -// protoc v3.18.1 +// protoc-gen-go v1.27.1 +// protoc v3.19.3 // source: openapiv2/OpenAPIv2.proto package openapi_v2 diff --git a/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto new file mode 100644 index 000000000..1c59b2f4a --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto @@ -0,0 +1,666 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package openapi.v2; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v2"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +// The Go package name. +option go_package = "./openapiv2;openapi_v2"; + +message AdditionalPropertiesItem { + oneof oneof { + Schema schema = 1; + bool boolean = 2; + } +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message ApiKeySecurity { + string type = 1; + string name = 2; + string in = 3; + string description = 4; + repeated NamedAny vendor_extension = 5; +} + +message BasicAuthenticationSecurity { + string type = 1; + string description = 2; + repeated NamedAny vendor_extension = 3; +} + +message BodyParameter { + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 1; + // The name of the parameter. + string name = 2; + // Determines the location of the parameter. + string in = 3; + // Determines whether or not this parameter is required or optional. + bool required = 4; + Schema schema = 5; + repeated NamedAny vendor_extension = 6; +} + +// Contact information for the owners of the API. +message Contact { + // The identifying name of the contact person/organization. + string name = 1; + // The URL pointing to the contact information. + string url = 2; + // The email address of the contact person/organization. + string email = 3; + repeated NamedAny vendor_extension = 4; +} + +message Default { + repeated NamedAny additional_properties = 1; +} + +// One or more JSON objects describing the schemas being consumed and produced by the API. +message Definitions { + repeated NamedSchema additional_properties = 1; +} + +message Document { + // The Swagger version of this document. + string swagger = 1; + Info info = 2; + // The host (name or ip) of the API. Example: 'swagger.io' + string host = 3; + // The base path to the API. Example: '/api'. + string base_path = 4; + // The transfer protocol of the API. + repeated string schemes = 5; + // A list of MIME types accepted by the API. + repeated string consumes = 6; + // A list of MIME types the API can produce. + repeated string produces = 7; + Paths paths = 8; + Definitions definitions = 9; + ParameterDefinitions parameters = 10; + ResponseDefinitions responses = 11; + repeated SecurityRequirement security = 12; + SecurityDefinitions security_definitions = 13; + repeated Tag tags = 14; + ExternalDocs external_docs = 15; + repeated NamedAny vendor_extension = 16; +} + +message Examples { + repeated NamedAny additional_properties = 1; +} + +// information about external documentation +message ExternalDocs { + string description = 1; + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// A deterministic version of a JSON Schema object. +message FileSchema { + string format = 1; + string title = 2; + string description = 3; + Any default = 4; + repeated string required = 5; + string type = 6; + bool read_only = 7; + ExternalDocs external_docs = 8; + Any example = 9; + repeated NamedAny vendor_extension = 10; +} + +message FormDataParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Header { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + string description = 18; + repeated NamedAny vendor_extension = 19; +} + +message HeaderParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +message Headers { + repeated NamedHeader additional_properties = 1; +} + +// General information about the API. +message Info { + // A unique and precise title of the API. + string title = 1; + // A semantic version number of the API. + string version = 2; + // A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + string description = 3; + // The terms of service for the API. + string terms_of_service = 4; + Contact contact = 5; + License license = 6; + repeated NamedAny vendor_extension = 7; +} + +message ItemsItem { + repeated Schema schema = 1; +} + +message JsonReference { + string _ref = 1; + string description = 2; +} + +message License { + // The name of the license type. It's encouraged to use an OSI compatible license. + string name = 1; + // The URL pointing to the license. + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +message NamedAny { + // Map key + string name = 1; + // Mapped value + Any value = 2; +} + +// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +message NamedHeader { + // Map key + string name = 1; + // Mapped value + Header value = 2; +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +message NamedParameter { + // Map key + string name = 1; + // Mapped value + Parameter value = 2; +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +message NamedPathItem { + // Map key + string name = 1; + // Mapped value + PathItem value = 2; +} + +// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +message NamedResponse { + // Map key + string name = 1; + // Mapped value + Response value = 2; +} + +// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +message NamedResponseValue { + // Map key + string name = 1; + // Mapped value + ResponseValue value = 2; +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +message NamedSchema { + // Map key + string name = 1; + // Mapped value + Schema value = 2; +} + +// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +message NamedSecurityDefinitionsItem { + // Map key + string name = 1; + // Mapped value + SecurityDefinitionsItem value = 2; +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +message NamedString { + // Map key + string name = 1; + // Mapped value + string value = 2; +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +message NamedStringArray { + // Map key + string name = 1; + // Mapped value + StringArray value = 2; +} + +message NonBodyParameter { + oneof oneof { + HeaderParameterSubSchema header_parameter_sub_schema = 1; + FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + QueryParameterSubSchema query_parameter_sub_schema = 3; + PathParameterSubSchema path_parameter_sub_schema = 4; + } +} + +message Oauth2AccessCodeSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string token_url = 5; + string description = 6; + repeated NamedAny vendor_extension = 7; +} + +message Oauth2ApplicationSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2ImplicitSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2PasswordSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2Scopes { + repeated NamedString additional_properties = 1; +} + +message Operation { + repeated string tags = 1; + // A brief summary of the operation. + string summary = 2; + // A longer description of the operation, GitHub Flavored Markdown is allowed. + string description = 3; + ExternalDocs external_docs = 4; + // A unique identifier of the operation. + string operation_id = 5; + // A list of MIME types the API can produce. + repeated string produces = 6; + // A list of MIME types the API can consume. + repeated string consumes = 7; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 8; + Responses responses = 9; + // The transfer protocol of the API. + repeated string schemes = 10; + bool deprecated = 11; + repeated SecurityRequirement security = 12; + repeated NamedAny vendor_extension = 13; +} + +message Parameter { + oneof oneof { + BodyParameter body_parameter = 1; + NonBodyParameter non_body_parameter = 2; + } +} + +// One or more JSON representations for parameters +message ParameterDefinitions { + repeated NamedParameter additional_properties = 1; +} + +message ParametersItem { + oneof oneof { + Parameter parameter = 1; + JsonReference json_reference = 2; + } +} + +message PathItem { + string _ref = 1; + Operation get = 2; + Operation put = 3; + Operation post = 4; + Operation delete = 5; + Operation options = 6; + Operation head = 7; + Operation patch = 8; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 9; + repeated NamedAny vendor_extension = 10; +} + +message PathParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +message Paths { + repeated NamedAny vendor_extension = 1; + repeated NamedPathItem path = 2; +} + +message PrimitivesItems { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + repeated NamedAny vendor_extension = 18; +} + +message Properties { + repeated NamedSchema additional_properties = 1; +} + +message QueryParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Response { + string description = 1; + SchemaItem schema = 2; + Headers headers = 3; + Examples examples = 4; + repeated NamedAny vendor_extension = 5; +} + +// One or more JSON representations for responses +message ResponseDefinitions { + repeated NamedResponse additional_properties = 1; +} + +message ResponseValue { + oneof oneof { + Response response = 1; + JsonReference json_reference = 2; + } +} + +// Response objects names can either be any valid HTTP status code or 'default'. +message Responses { + repeated NamedResponseValue response_code = 1; + repeated NamedAny vendor_extension = 2; +} + +// A deterministic version of a JSON Schema object. +message Schema { + string _ref = 1; + string format = 2; + string title = 3; + string description = 4; + Any default = 5; + double multiple_of = 6; + double maximum = 7; + bool exclusive_maximum = 8; + double minimum = 9; + bool exclusive_minimum = 10; + int64 max_length = 11; + int64 min_length = 12; + string pattern = 13; + int64 max_items = 14; + int64 min_items = 15; + bool unique_items = 16; + int64 max_properties = 17; + int64 min_properties = 18; + repeated string required = 19; + repeated Any enum = 20; + AdditionalPropertiesItem additional_properties = 21; + TypeItem type = 22; + ItemsItem items = 23; + repeated Schema all_of = 24; + Properties properties = 25; + string discriminator = 26; + bool read_only = 27; + Xml xml = 28; + ExternalDocs external_docs = 29; + Any example = 30; + repeated NamedAny vendor_extension = 31; +} + +message SchemaItem { + oneof oneof { + Schema schema = 1; + FileSchema file_schema = 2; + } +} + +message SecurityDefinitions { + repeated NamedSecurityDefinitionsItem additional_properties = 1; +} + +message SecurityDefinitionsItem { + oneof oneof { + BasicAuthenticationSecurity basic_authentication_security = 1; + ApiKeySecurity api_key_security = 2; + Oauth2ImplicitSecurity oauth2_implicit_security = 3; + Oauth2PasswordSecurity oauth2_password_security = 4; + Oauth2ApplicationSecurity oauth2_application_security = 5; + Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + } +} + +message SecurityRequirement { + repeated NamedStringArray additional_properties = 1; +} + +message StringArray { + repeated string value = 1; +} + +message Tag { + string name = 1; + string description = 2; + ExternalDocs external_docs = 3; + repeated NamedAny vendor_extension = 4; +} + +message TypeItem { + repeated string value = 1; +} + +// Any property starting with x- is valid. +message VendorExtension { + repeated NamedAny additional_properties = 1; +} + +message Xml { + string name = 1; + string namespace = 2; + string prefix = 3; + bool attribute = 4; + bool wrapped = 5; + repeated NamedAny vendor_extension = 6; +} + diff --git a/vendor/github.com/google/gnostic-models/openapiv2/README.md b/vendor/github.com/google/gnostic-models/openapiv2/README.md new file mode 100644 index 000000000..5276128d3 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/README.md @@ -0,0 +1,14 @@ +# OpenAPI v2 Protocol Buffer Models + +This directory contains a Protocol Buffer-language model and related code for +supporting OpenAPI v2. + +Gnostic applications and plugins can use OpenAPIv2.proto to generate Protocol +Buffer support code for their preferred languages. + +OpenAPIv2.go is used by Gnostic to read JSON and YAML OpenAPI descriptions into +the Protocol Buffer-based datastructures generated from OpenAPIv2.proto. + +OpenAPIv2.proto and OpenAPIv2.go are generated by the Gnostic compiler +generator, and OpenAPIv2.pb.go is generated by protoc, the Protocol Buffer +compiler, and protoc-gen-go, the Protocol Buffer Go code generation plugin. diff --git a/vendor/github.com/google/gnostic-models/openapiv2/document.go b/vendor/github.com/google/gnostic-models/openapiv2/document.go new file mode 100644 index 000000000..e96ac0d6d --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/document.go @@ -0,0 +1,42 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v2 + +import ( + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// ParseDocument reads an OpenAPI v2 description from a YAML/JSON representation. +func ParseDocument(b []byte) (*Document, error) { + info, err := compiler.ReadInfoFromBytes("", b) + if err != nil { + return nil, err + } + root := info.Content[0] + return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) +} + +// YAMLValue produces a serialized YAML representation of the document. +func (d *Document) YAMLValue(comment string) ([]byte, error) { + rawInfo := d.ToRawInfo() + rawInfo = &yaml.Node{ + Kind: yaml.DocumentNode, + Content: []*yaml.Node{rawInfo}, + HeadComment: comment, + } + return yaml.Marshal(rawInfo) +} diff --git a/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json b/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json new file mode 100644 index 000000000..afa12b79b --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json @@ -0,0 +1,1610 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for responses" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + }, + "description": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go new file mode 100644 index 000000000..4b1131ce1 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go @@ -0,0 +1,8633 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package openapi_v3 + +import ( + "fmt" + "regexp" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "openapi_v3" +} + +// NewAdditionalPropertiesItem creates an object of type AdditionalPropertiesItem if possible, returning an error if not. +func NewAdditionalPropertiesItem(in *yaml.Node, context *compiler.Context) (*AdditionalPropertiesItem, error) { + errors := make([]error, 0) + x := &AdditionalPropertiesItem{} + matched := false + // SchemaOrReference schema_or_reference = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchemaOrReference(m, compiler.NewContext("schemaOrReference", m, context)) + if matchingError == nil { + x.Oneof = &AdditionalPropertiesItem_SchemaOrReference{SchemaOrReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // bool boolean = 2; + boolValue, ok := compiler.BoolForScalarNode(in) + if ok { + x.Oneof = &AdditionalPropertiesItem_Boolean{Boolean: boolValue} + matched = true + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid AdditionalPropertiesItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in *yaml.Node, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes := compiler.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAnyOrExpression creates an object of type AnyOrExpression if possible, returning an error if not. +func NewAnyOrExpression(in *yaml.Node, context *compiler.Context) (*AnyOrExpression, error) { + errors := make([]error, 0) + x := &AnyOrExpression{} + matched := false + // Any any = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewAny(m, compiler.NewContext("any", m, context)) + if matchingError == nil { + x.Oneof = &AnyOrExpression_Any{Any: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Expression expression = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewExpression(m, compiler.NewContext("expression", m, context)) + if matchingError == nil { + x.Oneof = &AnyOrExpression_Expression{Expression: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid AnyOrExpression") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallback creates an object of type Callback if possible, returning an error if not. +func NewCallback(in *yaml.Node, context *compiler.Context) (*Callback, error) { + errors := make([]error, 0) + x := &Callback{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern0, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedPathItem path = 1; + // MAP: PathItem ^ + x.Path = make([]*NamedPathItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if true { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + // repeated NamedAny specification_extension = 2; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallbackOrReference creates an object of type CallbackOrReference if possible, returning an error if not. +func NewCallbackOrReference(in *yaml.Node, context *compiler.Context) (*CallbackOrReference, error) { + errors := make([]error, 0) + x := &CallbackOrReference{} + matched := false + // Callback callback = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewCallback(m, compiler.NewContext("callback", m, context)) + if matchingError == nil { + x.Oneof = &CallbackOrReference_Callback{Callback: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &CallbackOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid CallbackOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewCallbacksOrReferences creates an object of type CallbacksOrReferences if possible, returning an error if not. +func NewCallbacksOrReferences(in *yaml.Node, context *compiler.Context) (*CallbacksOrReferences, error) { + errors := make([]error, 0) + x := &CallbacksOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedCallbackOrReference additional_properties = 1; + // MAP: CallbackOrReference + x.AdditionalProperties = make([]*NamedCallbackOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedCallbackOrReference{} + pair.Name = k + var err error + pair.Value, err = NewCallbackOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewComponents creates an object of type Components if possible, returning an error if not. +func NewComponents(in *yaml.Node, context *compiler.Context) (*Components, error) { + errors := make([]error, 0) + x := &Components{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"callbacks", "examples", "headers", "links", "parameters", "requestBodies", "responses", "schemas", "securitySchemes"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // SchemasOrReferences schemas = 1; + v1 := compiler.MapValueForKey(m, "schemas") + if v1 != nil { + var err error + x.Schemas, err = NewSchemasOrReferences(v1, compiler.NewContext("schemas", v1, context)) + if err != nil { + errors = append(errors, err) + } + } + // ResponsesOrReferences responses = 2; + v2 := compiler.MapValueForKey(m, "responses") + if v2 != nil { + var err error + x.Responses, err = NewResponsesOrReferences(v2, compiler.NewContext("responses", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // ParametersOrReferences parameters = 3; + v3 := compiler.MapValueForKey(m, "parameters") + if v3 != nil { + var err error + x.Parameters, err = NewParametersOrReferences(v3, compiler.NewContext("parameters", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 4; + v4 := compiler.MapValueForKey(m, "examples") + if v4 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v4, compiler.NewContext("examples", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // RequestBodiesOrReferences request_bodies = 5; + v5 := compiler.MapValueForKey(m, "requestBodies") + if v5 != nil { + var err error + x.RequestBodies, err = NewRequestBodiesOrReferences(v5, compiler.NewContext("requestBodies", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // HeadersOrReferences headers = 6; + v6 := compiler.MapValueForKey(m, "headers") + if v6 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v6, compiler.NewContext("headers", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // SecuritySchemesOrReferences security_schemes = 7; + v7 := compiler.MapValueForKey(m, "securitySchemes") + if v7 != nil { + var err error + x.SecuritySchemes, err = NewSecuritySchemesOrReferences(v7, compiler.NewContext("securitySchemes", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // LinksOrReferences links = 8; + v8 := compiler.MapValueForKey(m, "links") + if v8 != nil { + var err error + x.Links, err = NewLinksOrReferences(v8, compiler.NewContext("links", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // CallbacksOrReferences callbacks = 9; + v9 := compiler.MapValueForKey(m, "callbacks") + if v9 != nil { + var err error + x.Callbacks, err = NewCallbacksOrReferences(v9, compiler.NewContext("callbacks", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 10; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewContact creates an object of type Contact if possible, returning an error if not. +func NewContact(in *yaml.Node, context *compiler.Context) (*Contact, error) { + errors := make([]error, 0) + x := &Contact{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"email", "name", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string email = 3; + v3 := compiler.MapValueForKey(m, "email") + if v3 != nil { + x.Email, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for email: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefaultType creates an object of type DefaultType if possible, returning an error if not. +func NewDefaultType(in *yaml.Node, context *compiler.Context) (*DefaultType, error) { + errors := make([]error, 0) + x := &DefaultType{} + matched := false + switch in.Tag { + case "!!bool": + var v bool + v, matched = compiler.BoolForScalarNode(in) + x.Oneof = &DefaultType_Boolean{Boolean: v} + case "!!str": + var v string + v, matched = compiler.StringForScalarNode(in) + x.Oneof = &DefaultType_String_{String_: v} + case "!!float": + var v float64 + v, matched = compiler.FloatForScalarNode(in) + x.Oneof = &DefaultType_Number{Number: v} + case "!!int": + var v int64 + v, matched = compiler.IntForScalarNode(in) + x.Oneof = &DefaultType_Number{Number: float64(v)} + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDiscriminator creates an object of type Discriminator if possible, returning an error if not. +func NewDiscriminator(in *yaml.Node, context *compiler.Context) (*Discriminator, error) { + errors := make([]error, 0) + x := &Discriminator{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"propertyName"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"mapping", "propertyName"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string property_name = 1; + v1 := compiler.MapValueForKey(m, "propertyName") + if v1 != nil { + x.PropertyName, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for propertyName: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Strings mapping = 2; + v2 := compiler.MapValueForKey(m, "mapping") + if v2 != nil { + var err error + x.Mapping, err = NewStrings(v2, compiler.NewContext("mapping", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in *yaml.Node, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"info", "openapi", "paths"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"components", "externalDocs", "info", "openapi", "paths", "security", "servers", "tags"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string openapi = 1; + v1 := compiler.MapValueForKey(m, "openapi") + if v1 != nil { + x.Openapi, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for openapi: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Info info = 2; + v2 := compiler.MapValueForKey(m, "info") + if v2 != nil { + var err error + x.Info, err = NewInfo(v2, compiler.NewContext("info", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Server servers = 3; + v3 := compiler.MapValueForKey(m, "servers") + if v3 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := compiler.SequenceNodeForNode(v3) + if ok { + for _, item := range a.Content { + y, err := NewServer(item, compiler.NewContext("servers", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // Paths paths = 4; + v4 := compiler.MapValueForKey(m, "paths") + if v4 != nil { + var err error + x.Paths, err = NewPaths(v4, compiler.NewContext("paths", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // Components components = 5; + v5 := compiler.MapValueForKey(m, "components") + if v5 != nil { + var err error + x.Components, err = NewComponents(v5, compiler.NewContext("components", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated SecurityRequirement security = 6; + v6 := compiler.MapValueForKey(m, "security") + if v6 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v6) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated Tag tags = 7; + v7 := compiler.MapValueForKey(m, "tags") + if v7 != nil { + // repeated Tag + x.Tags = make([]*Tag, 0) + a, ok := compiler.SequenceNodeForNode(v7) + if ok { + for _, item := range a.Content { + y, err := NewTag(item, compiler.NewContext("tags", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Tags = append(x.Tags, y) + } + } + } + // ExternalDocs external_docs = 8; + v8 := compiler.MapValueForKey(m, "externalDocs") + if v8 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v8, compiler.NewContext("externalDocs", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 9; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewEncoding creates an object of type Encoding if possible, returning an error if not. +func NewEncoding(in *yaml.Node, context *compiler.Context) (*Encoding, error) { + errors := make([]error, 0) + x := &Encoding{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowReserved", "contentType", "explode", "headers", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string content_type = 1; + v1 := compiler.MapValueForKey(m, "contentType") + if v1 != nil { + x.ContentType, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for contentType: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeadersOrReferences headers = 2; + v2 := compiler.MapValueForKey(m, "headers") + if v2 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v2, compiler.NewContext("headers", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // string style = 3; + v3 := compiler.MapValueForKey(m, "style") + if v3 != nil { + x.Style, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 4; + v4 := compiler.MapValueForKey(m, "explode") + if v4 != nil { + x.Explode, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 5; + v5 := compiler.MapValueForKey(m, "allowReserved") + if v5 != nil { + x.AllowReserved, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 6; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewEncodings creates an object of type Encodings if possible, returning an error if not. +func NewEncodings(in *yaml.Node, context *compiler.Context) (*Encodings, error) { + errors := make([]error, 0) + x := &Encodings{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedEncoding additional_properties = 1; + // MAP: Encoding + x.AdditionalProperties = make([]*NamedEncoding, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedEncoding{} + pair.Name = k + var err error + pair.Value, err = NewEncoding(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExample creates an object of type Example if possible, returning an error if not. +func NewExample(in *yaml.Node, context *compiler.Context) (*Example, error) { + errors := make([]error, 0) + x := &Example{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description", "externalValue", "summary", "value"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string summary = 1; + v1 := compiler.MapValueForKey(m, "summary") + if v1 != nil { + x.Summary, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 3; + v3 := compiler.MapValueForKey(m, "value") + if v3 != nil { + var err error + x.Value, err = NewAny(v3, compiler.NewContext("value", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string external_value = 4; + v4 := compiler.MapValueForKey(m, "externalValue") + if v4 != nil { + x.ExternalValue, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for externalValue: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExampleOrReference creates an object of type ExampleOrReference if possible, returning an error if not. +func NewExampleOrReference(in *yaml.Node, context *compiler.Context) (*ExampleOrReference, error) { + errors := make([]error, 0) + x := &ExampleOrReference{} + matched := false + // Example example = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewExample(m, compiler.NewContext("example", m, context)) + if matchingError == nil { + x.Oneof = &ExampleOrReference_Example{Example: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &ExampleOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ExampleOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamplesOrReferences creates an object of type ExamplesOrReferences if possible, returning an error if not. +func NewExamplesOrReferences(in *yaml.Node, context *compiler.Context) (*ExamplesOrReferences, error) { + errors := make([]error, 0) + x := &ExamplesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedExampleOrReference additional_properties = 1; + // MAP: ExampleOrReference + x.AdditionalProperties = make([]*NamedExampleOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedExampleOrReference{} + pair.Name = k + var err error + pair.Value, err = NewExampleOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExpression creates an object of type Expression if possible, returning an error if not. +func NewExpression(in *yaml.Node, context *compiler.Context) (*Expression, error) { + errors := make([]error, 0) + x := &Expression{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExternalDocs creates an object of type ExternalDocs if possible, returning an error if not. +func NewExternalDocs(in *yaml.Node, context *compiler.Context) (*ExternalDocs, error) { + errors := make([]error, 0) + x := &ExternalDocs{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeader creates an object of type Header if possible, returning an error if not. +func NewHeader(in *yaml.Node, context *compiler.Context) (*Header, error) { + errors := make([]error, 0) + x := &Header{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "allowReserved", "content", "deprecated", "description", "example", "examples", "explode", "required", "schema", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 2; + v2 := compiler.MapValueForKey(m, "required") + if v2 != nil { + x.Required, ok = compiler.BoolForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 3; + v3 := compiler.MapValueForKey(m, "deprecated") + if v3 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 4; + v4 := compiler.MapValueForKey(m, "allowEmptyValue") + if v4 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string style = 5; + v5 := compiler.MapValueForKey(m, "style") + if v5 != nil { + x.Style, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 6; + v6 := compiler.MapValueForKey(m, "explode") + if v6 != nil { + x.Explode, ok = compiler.BoolForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 7; + v7 := compiler.MapValueForKey(m, "allowReserved") + if v7 != nil { + x.AllowReserved, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference schema = 8; + v8 := compiler.MapValueForKey(m, "schema") + if v8 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v8, compiler.NewContext("schema", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 9; + v9 := compiler.MapValueForKey(m, "example") + if v9 != nil { + var err error + x.Example, err = NewAny(v9, compiler.NewContext("example", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 10; + v10 := compiler.MapValueForKey(m, "examples") + if v10 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v10, compiler.NewContext("examples", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 11; + v11 := compiler.MapValueForKey(m, "content") + if v11 != nil { + var err error + x.Content, err = NewMediaTypes(v11, compiler.NewContext("content", v11, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 12; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaderOrReference creates an object of type HeaderOrReference if possible, returning an error if not. +func NewHeaderOrReference(in *yaml.Node, context *compiler.Context) (*HeaderOrReference, error) { + errors := make([]error, 0) + x := &HeaderOrReference{} + matched := false + // Header header = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewHeader(m, compiler.NewContext("header", m, context)) + if matchingError == nil { + x.Oneof = &HeaderOrReference_Header{Header: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &HeaderOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid HeaderOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeadersOrReferences creates an object of type HeadersOrReferences if possible, returning an error if not. +func NewHeadersOrReferences(in *yaml.Node, context *compiler.Context) (*HeadersOrReferences, error) { + errors := make([]error, 0) + x := &HeadersOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedHeaderOrReference additional_properties = 1; + // MAP: HeaderOrReference + x.AdditionalProperties = make([]*NamedHeaderOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedHeaderOrReference{} + pair.Name = k + var err error + pair.Value, err = NewHeaderOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewInfo creates an object of type Info if possible, returning an error if not. +func NewInfo(in *yaml.Node, context *compiler.Context) (*Info, error) { + errors := make([]error, 0) + x := &Info{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"title", "version"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"contact", "description", "license", "summary", "termsOfService", "title", "version"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string title = 1; + v1 := compiler.MapValueForKey(m, "title") + if v1 != nil { + x.Title, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string terms_of_service = 3; + v3 := compiler.MapValueForKey(m, "termsOfService") + if v3 != nil { + x.TermsOfService, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for termsOfService: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Contact contact = 4; + v4 := compiler.MapValueForKey(m, "contact") + if v4 != nil { + var err error + x.Contact, err = NewContact(v4, compiler.NewContext("contact", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // License license = 5; + v5 := compiler.MapValueForKey(m, "license") + if v5 != nil { + var err error + x.License, err = NewLicense(v5, compiler.NewContext("license", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // string version = 6; + v6 := compiler.MapValueForKey(m, "version") + if v6 != nil { + x.Version, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 7; + v7 := compiler.MapValueForKey(m, "summary") + if v7 != nil { + x.Summary, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 8; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewItemsItem creates an object of type ItemsItem if possible, returning an error if not. +func NewItemsItem(in *yaml.Node, context *compiler.Context) (*ItemsItem, error) { + errors := make([]error, 0) + x := &ItemsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value for item array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.SchemaOrReference = make([]*SchemaOrReference, 0) + y, err := NewSchemaOrReference(m, compiler.NewContext("", m, context)) + if err != nil { + return nil, err + } + x.SchemaOrReference = append(x.SchemaOrReference, y) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLicense creates an object of type License if possible, returning an error if not. +func NewLicense(in *yaml.Node, context *compiler.Context) (*License, error) { + errors := make([]error, 0) + x := &License{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"name", "url"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLink creates an object of type Link if possible, returning an error if not. +func NewLink(in *yaml.Node, context *compiler.Context) (*Link, error) { + errors := make([]error, 0) + x := &Link{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"description", "operationId", "operationRef", "parameters", "requestBody", "server"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string operation_ref = 1; + v1 := compiler.MapValueForKey(m, "operationRef") + if v1 != nil { + x.OperationRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for operationRef: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string operation_id = 2; + v2 := compiler.MapValueForKey(m, "operationId") + if v2 != nil { + x.OperationId, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // AnyOrExpression parameters = 3; + v3 := compiler.MapValueForKey(m, "parameters") + if v3 != nil { + var err error + x.Parameters, err = NewAnyOrExpression(v3, compiler.NewContext("parameters", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // AnyOrExpression request_body = 4; + v4 := compiler.MapValueForKey(m, "requestBody") + if v4 != nil { + var err error + x.RequestBody, err = NewAnyOrExpression(v4, compiler.NewContext("requestBody", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Server server = 6; + v6 := compiler.MapValueForKey(m, "server") + if v6 != nil { + var err error + x.Server, err = NewServer(v6, compiler.NewContext("server", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 7; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLinkOrReference creates an object of type LinkOrReference if possible, returning an error if not. +func NewLinkOrReference(in *yaml.Node, context *compiler.Context) (*LinkOrReference, error) { + errors := make([]error, 0) + x := &LinkOrReference{} + matched := false + // Link link = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewLink(m, compiler.NewContext("link", m, context)) + if matchingError == nil { + x.Oneof = &LinkOrReference_Link{Link: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &LinkOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid LinkOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLinksOrReferences creates an object of type LinksOrReferences if possible, returning an error if not. +func NewLinksOrReferences(in *yaml.Node, context *compiler.Context) (*LinksOrReferences, error) { + errors := make([]error, 0) + x := &LinksOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedLinkOrReference additional_properties = 1; + // MAP: LinkOrReference + x.AdditionalProperties = make([]*NamedLinkOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedLinkOrReference{} + pair.Name = k + var err error + pair.Value, err = NewLinkOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMediaType creates an object of type MediaType if possible, returning an error if not. +func NewMediaType(in *yaml.Node, context *compiler.Context) (*MediaType, error) { + errors := make([]error, 0) + x := &MediaType{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"encoding", "example", "examples", "schema"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // SchemaOrReference schema = 1; + v1 := compiler.MapValueForKey(m, "schema") + if v1 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v1, compiler.NewContext("schema", v1, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 2; + v2 := compiler.MapValueForKey(m, "example") + if v2 != nil { + var err error + x.Example, err = NewAny(v2, compiler.NewContext("example", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 3; + v3 := compiler.MapValueForKey(m, "examples") + if v3 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v3, compiler.NewContext("examples", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // Encodings encoding = 4; + v4 := compiler.MapValueForKey(m, "encoding") + if v4 != nil { + var err error + x.Encoding, err = NewEncodings(v4, compiler.NewContext("encoding", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewMediaTypes creates an object of type MediaTypes if possible, returning an error if not. +func NewMediaTypes(in *yaml.Node, context *compiler.Context) (*MediaTypes, error) { + errors := make([]error, 0) + x := &MediaTypes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedMediaType additional_properties = 1; + // MAP: MediaType + x.AdditionalProperties = make([]*NamedMediaType, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedMediaType{} + pair.Name = k + var err error + pair.Value, err = NewMediaType(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAny creates an object of type NamedAny if possible, returning an error if not. +func NewNamedAny(in *yaml.Node, context *compiler.Context) (*NamedAny, error) { + errors := make([]error, 0) + x := &NamedAny{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAny(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedCallbackOrReference creates an object of type NamedCallbackOrReference if possible, returning an error if not. +func NewNamedCallbackOrReference(in *yaml.Node, context *compiler.Context) (*NamedCallbackOrReference, error) { + errors := make([]error, 0) + x := &NamedCallbackOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // CallbackOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewCallbackOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedEncoding creates an object of type NamedEncoding if possible, returning an error if not. +func NewNamedEncoding(in *yaml.Node, context *compiler.Context) (*NamedEncoding, error) { + errors := make([]error, 0) + x := &NamedEncoding{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Encoding value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewEncoding(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedExampleOrReference creates an object of type NamedExampleOrReference if possible, returning an error if not. +func NewNamedExampleOrReference(in *yaml.Node, context *compiler.Context) (*NamedExampleOrReference, error) { + errors := make([]error, 0) + x := &NamedExampleOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExampleOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewExampleOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedHeaderOrReference creates an object of type NamedHeaderOrReference if possible, returning an error if not. +func NewNamedHeaderOrReference(in *yaml.Node, context *compiler.Context) (*NamedHeaderOrReference, error) { + errors := make([]error, 0) + x := &NamedHeaderOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeaderOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewHeaderOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedLinkOrReference creates an object of type NamedLinkOrReference if possible, returning an error if not. +func NewNamedLinkOrReference(in *yaml.Node, context *compiler.Context) (*NamedLinkOrReference, error) { + errors := make([]error, 0) + x := &NamedLinkOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // LinkOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewLinkOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedMediaType creates an object of type NamedMediaType if possible, returning an error if not. +func NewNamedMediaType(in *yaml.Node, context *compiler.Context) (*NamedMediaType, error) { + errors := make([]error, 0) + x := &NamedMediaType{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // MediaType value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewMediaType(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameterOrReference creates an object of type NamedParameterOrReference if possible, returning an error if not. +func NewNamedParameterOrReference(in *yaml.Node, context *compiler.Context) (*NamedParameterOrReference, error) { + errors := make([]error, 0) + x := &NamedParameterOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ParameterOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameterOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedPathItem creates an object of type NamedPathItem if possible, returning an error if not. +func NewNamedPathItem(in *yaml.Node, context *compiler.Context) (*NamedPathItem, error) { + errors := make([]error, 0) + x := &NamedPathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PathItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewPathItem(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedRequestBodyOrReference creates an object of type NamedRequestBodyOrReference if possible, returning an error if not. +func NewNamedRequestBodyOrReference(in *yaml.Node, context *compiler.Context) (*NamedRequestBodyOrReference, error) { + errors := make([]error, 0) + x := &NamedRequestBodyOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // RequestBodyOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewRequestBodyOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponseOrReference creates an object of type NamedResponseOrReference if possible, returning an error if not. +func NewNamedResponseOrReference(in *yaml.Node, context *compiler.Context) (*NamedResponseOrReference, error) { + errors := make([]error, 0) + x := &NamedResponseOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ResponseOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponseOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchemaOrReference creates an object of type NamedSchemaOrReference if possible, returning an error if not. +func NewNamedSchemaOrReference(in *yaml.Node, context *compiler.Context) (*NamedSchemaOrReference, error) { + errors := make([]error, 0) + x := &NamedSchemaOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchemaOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSecuritySchemeOrReference creates an object of type NamedSecuritySchemeOrReference if possible, returning an error if not. +func NewNamedSecuritySchemeOrReference(in *yaml.Node, context *compiler.Context) (*NamedSecuritySchemeOrReference, error) { + errors := make([]error, 0) + x := &NamedSecuritySchemeOrReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SecuritySchemeOrReference value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSecuritySchemeOrReference(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedServerVariable creates an object of type NamedServerVariable if possible, returning an error if not. +func NewNamedServerVariable(in *yaml.Node, context *compiler.Context) (*NamedServerVariable, error) { + errors := make([]error, 0) + x := &NamedServerVariable{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ServerVariable value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewServerVariable(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedString creates an object of type NamedString if possible, returning an error if not. +func NewNamedString(in *yaml.Node, context *compiler.Context) (*NamedString, error) { + errors := make([]error, 0) + x := &NamedString{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + x.Value, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for value: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedStringArray creates an object of type NamedStringArray if possible, returning an error if not. +func NewNamedStringArray(in *yaml.Node, context *compiler.Context) (*NamedStringArray, error) { + errors := make([]error, 0) + x := &NamedStringArray{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // StringArray value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewStringArray(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauthFlow creates an object of type OauthFlow if possible, returning an error if not. +func NewOauthFlow(in *yaml.Node, context *compiler.Context) (*OauthFlow, error) { + errors := make([]error, 0) + x := &OauthFlow{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"authorizationUrl", "refreshUrl", "scopes", "tokenUrl"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string authorization_url = 1; + v1 := compiler.MapValueForKey(m, "authorizationUrl") + if v1 != nil { + x.AuthorizationUrl, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string token_url = 2; + v2 := compiler.MapValueForKey(m, "tokenUrl") + if v2 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string refresh_url = 3; + v3 := compiler.MapValueForKey(m, "refreshUrl") + if v3 != nil { + x.RefreshUrl, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for refreshUrl: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Strings scopes = 4; + v4 := compiler.MapValueForKey(m, "scopes") + if v4 != nil { + var err error + x.Scopes, err = NewStrings(v4, compiler.NewContext("scopes", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauthFlows creates an object of type OauthFlows if possible, returning an error if not. +func NewOauthFlows(in *yaml.Node, context *compiler.Context) (*OauthFlows, error) { + errors := make([]error, 0) + x := &OauthFlows{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"authorizationCode", "clientCredentials", "implicit", "password"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // OauthFlow implicit = 1; + v1 := compiler.MapValueForKey(m, "implicit") + if v1 != nil { + var err error + x.Implicit, err = NewOauthFlow(v1, compiler.NewContext("implicit", v1, context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow password = 2; + v2 := compiler.MapValueForKey(m, "password") + if v2 != nil { + var err error + x.Password, err = NewOauthFlow(v2, compiler.NewContext("password", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow client_credentials = 3; + v3 := compiler.MapValueForKey(m, "clientCredentials") + if v3 != nil { + var err error + x.ClientCredentials, err = NewOauthFlow(v3, compiler.NewContext("clientCredentials", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // OauthFlow authorization_code = 4; + v4 := compiler.MapValueForKey(m, "authorizationCode") + if v4 != nil { + var err error + x.AuthorizationCode, err = NewOauthFlow(v4, compiler.NewContext("authorizationCode", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewObject creates an object of type Object if possible, returning an error if not. +func NewObject(in *yaml.Node, context *compiler.Context) (*Object, error) { + errors := make([]error, 0) + x := &Object{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOperation creates an object of type Operation if possible, returning an error if not. +func NewOperation(in *yaml.Node, context *compiler.Context) (*Operation, error) { + errors := make([]error, 0) + x := &Operation{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"responses"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"callbacks", "deprecated", "description", "externalDocs", "operationId", "parameters", "requestBody", "responses", "security", "servers", "summary", "tags"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string tags = 1; + v1 := compiler.MapValueForKey(m, "tags") + if v1 != nil { + v, ok := compiler.SequenceNodeForNode(v1) + if ok { + x.Tags = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for tags: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 4; + v4 := compiler.MapValueForKey(m, "externalDocs") + if v4 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v4, compiler.NewContext("externalDocs", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // string operation_id = 5; + v5 := compiler.MapValueForKey(m, "operationId") + if v5 != nil { + x.OperationId, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated ParameterOrReference parameters = 6; + v6 := compiler.MapValueForKey(m, "parameters") + if v6 != nil { + // repeated ParameterOrReference + x.Parameters = make([]*ParameterOrReference, 0) + a, ok := compiler.SequenceNodeForNode(v6) + if ok { + for _, item := range a.Content { + y, err := NewParameterOrReference(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // RequestBodyOrReference request_body = 7; + v7 := compiler.MapValueForKey(m, "requestBody") + if v7 != nil { + var err error + x.RequestBody, err = NewRequestBodyOrReference(v7, compiler.NewContext("requestBody", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // Responses responses = 8; + v8 := compiler.MapValueForKey(m, "responses") + if v8 != nil { + var err error + x.Responses, err = NewResponses(v8, compiler.NewContext("responses", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // CallbacksOrReferences callbacks = 9; + v9 := compiler.MapValueForKey(m, "callbacks") + if v9 != nil { + var err error + x.Callbacks, err = NewCallbacksOrReferences(v9, compiler.NewContext("callbacks", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // bool deprecated = 10; + v10 := compiler.MapValueForKey(m, "deprecated") + if v10 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v10) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SecurityRequirement security = 11; + v11 := compiler.MapValueForKey(m, "security") + if v11 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v11) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated Server servers = 12; + v12 := compiler.MapValueForKey(m, "servers") + if v12 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewServer(item, compiler.NewContext("servers", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // repeated NamedAny specification_extension = 13; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in *yaml.Node, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"allowEmptyValue", "allowReserved", "content", "deprecated", "description", "example", "examples", "explode", "in", "name", "required", "schema", "style"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 4; + v4 := compiler.MapValueForKey(m, "required") + if v4 != nil { + x.Required, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 5; + v5 := compiler.MapValueForKey(m, "deprecated") + if v5 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 6; + v6 := compiler.MapValueForKey(m, "allowEmptyValue") + if v6 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string style = 7; + v7 := compiler.MapValueForKey(m, "style") + if v7 != nil { + x.Style, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for style: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool explode = 8; + v8 := compiler.MapValueForKey(m, "explode") + if v8 != nil { + x.Explode, ok = compiler.BoolForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for explode: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_reserved = 9; + v9 := compiler.MapValueForKey(m, "allowReserved") + if v9 != nil { + x.AllowReserved, ok = compiler.BoolForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for allowReserved: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaOrReference schema = 10; + v10 := compiler.MapValueForKey(m, "schema") + if v10 != nil { + var err error + x.Schema, err = NewSchemaOrReference(v10, compiler.NewContext("schema", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 11; + v11 := compiler.MapValueForKey(m, "example") + if v11 != nil { + var err error + x.Example, err = NewAny(v11, compiler.NewContext("example", v11, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExamplesOrReferences examples = 12; + v12 := compiler.MapValueForKey(m, "examples") + if v12 != nil { + var err error + x.Examples, err = NewExamplesOrReferences(v12, compiler.NewContext("examples", v12, context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 13; + v13 := compiler.MapValueForKey(m, "content") + if v13 != nil { + var err error + x.Content, err = NewMediaTypes(v13, compiler.NewContext("content", v13, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 14; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameterOrReference creates an object of type ParameterOrReference if possible, returning an error if not. +func NewParameterOrReference(in *yaml.Node, context *compiler.Context) (*ParameterOrReference, error) { + errors := make([]error, 0) + x := &ParameterOrReference{} + matched := false + // Parameter parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewParameter(m, compiler.NewContext("parameter", m, context)) + if matchingError == nil { + x.Oneof = &ParameterOrReference_Parameter{Parameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &ParameterOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ParameterOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParametersOrReferences creates an object of type ParametersOrReferences if possible, returning an error if not. +func NewParametersOrReferences(in *yaml.Node, context *compiler.Context) (*ParametersOrReferences, error) { + errors := make([]error, 0) + x := &ParametersOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameterOrReference additional_properties = 1; + // MAP: ParameterOrReference + x.AdditionalProperties = make([]*NamedParameterOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedParameterOrReference{} + pair.Name = k + var err error + pair.Value, err = NewParameterOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathItem creates an object of type PathItem if possible, returning an error if not. +func NewPathItem(in *yaml.Node, context *compiler.Context) (*PathItem, error) { + errors := make([]error, 0) + x := &PathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "delete", "description", "get", "head", "options", "parameters", "patch", "post", "put", "servers", "summary", "trace"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Operation get = 4; + v4 := compiler.MapValueForKey(m, "get") + if v4 != nil { + var err error + x.Get, err = NewOperation(v4, compiler.NewContext("get", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation put = 5; + v5 := compiler.MapValueForKey(m, "put") + if v5 != nil { + var err error + x.Put, err = NewOperation(v5, compiler.NewContext("put", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation post = 6; + v6 := compiler.MapValueForKey(m, "post") + if v6 != nil { + var err error + x.Post, err = NewOperation(v6, compiler.NewContext("post", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation delete = 7; + v7 := compiler.MapValueForKey(m, "delete") + if v7 != nil { + var err error + x.Delete, err = NewOperation(v7, compiler.NewContext("delete", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation options = 8; + v8 := compiler.MapValueForKey(m, "options") + if v8 != nil { + var err error + x.Options, err = NewOperation(v8, compiler.NewContext("options", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation head = 9; + v9 := compiler.MapValueForKey(m, "head") + if v9 != nil { + var err error + x.Head, err = NewOperation(v9, compiler.NewContext("head", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation patch = 10; + v10 := compiler.MapValueForKey(m, "patch") + if v10 != nil { + var err error + x.Patch, err = NewOperation(v10, compiler.NewContext("patch", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation trace = 11; + v11 := compiler.MapValueForKey(m, "trace") + if v11 != nil { + var err error + x.Trace, err = NewOperation(v11, compiler.NewContext("trace", v11, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Server servers = 12; + v12 := compiler.MapValueForKey(m, "servers") + if v12 != nil { + // repeated Server + x.Servers = make([]*Server, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewServer(item, compiler.NewContext("servers", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Servers = append(x.Servers, y) + } + } + } + // repeated ParameterOrReference parameters = 13; + v13 := compiler.MapValueForKey(m, "parameters") + if v13 != nil { + // repeated ParameterOrReference + x.Parameters = make([]*ParameterOrReference, 0) + a, ok := compiler.SequenceNodeForNode(v13) + if ok { + for _, item := range a.Content { + y, err := NewParameterOrReference(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // repeated NamedAny specification_extension = 14; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPaths creates an object of type Paths if possible, returning an error if not. +func NewPaths(in *yaml.Node, context *compiler.Context) (*Paths, error) { + errors := make([]error, 0) + x := &Paths{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern2, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedPathItem path = 1; + // MAP: PathItem ^/ + x.Path = make([]*NamedPathItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "/") { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + // repeated NamedAny specification_extension = 2; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProperties creates an object of type Properties if possible, returning an error if not. +func NewProperties(in *yaml.Node, context *compiler.Context) (*Properties, error) { + errors := make([]error, 0) + x := &Properties{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchemaOrReference additional_properties = 1; + // MAP: SchemaOrReference + x.AdditionalProperties = make([]*NamedSchemaOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchemaOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSchemaOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewReference creates an object of type Reference if possible, returning an error if not. +func NewReference(in *yaml.Node, context *compiler.Context) (*Reference, error) { + errors := make([]error, 0) + x := &Reference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"$ref"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBodiesOrReferences creates an object of type RequestBodiesOrReferences if possible, returning an error if not. +func NewRequestBodiesOrReferences(in *yaml.Node, context *compiler.Context) (*RequestBodiesOrReferences, error) { + errors := make([]error, 0) + x := &RequestBodiesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedRequestBodyOrReference additional_properties = 1; + // MAP: RequestBodyOrReference + x.AdditionalProperties = make([]*NamedRequestBodyOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedRequestBodyOrReference{} + pair.Name = k + var err error + pair.Value, err = NewRequestBodyOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBody creates an object of type RequestBody if possible, returning an error if not. +func NewRequestBody(in *yaml.Node, context *compiler.Context) (*RequestBody, error) { + errors := make([]error, 0) + x := &RequestBody{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"content"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"content", "description", "required"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // MediaTypes content = 2; + v2 := compiler.MapValueForKey(m, "content") + if v2 != nil { + var err error + x.Content, err = NewMediaTypes(v2, compiler.NewContext("content", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // bool required = 3; + v3 := compiler.MapValueForKey(m, "required") + if v3 != nil { + x.Required, ok = compiler.BoolForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewRequestBodyOrReference creates an object of type RequestBodyOrReference if possible, returning an error if not. +func NewRequestBodyOrReference(in *yaml.Node, context *compiler.Context) (*RequestBodyOrReference, error) { + errors := make([]error, 0) + x := &RequestBodyOrReference{} + matched := false + // RequestBody request_body = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewRequestBody(m, compiler.NewContext("requestBody", m, context)) + if matchingError == nil { + x.Oneof = &RequestBodyOrReference_RequestBody{RequestBody: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &RequestBodyOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid RequestBodyOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in *yaml.Node, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"description"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"content", "description", "headers", "links"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // HeadersOrReferences headers = 2; + v2 := compiler.MapValueForKey(m, "headers") + if v2 != nil { + var err error + x.Headers, err = NewHeadersOrReferences(v2, compiler.NewContext("headers", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // MediaTypes content = 3; + v3 := compiler.MapValueForKey(m, "content") + if v3 != nil { + var err error + x.Content, err = NewMediaTypes(v3, compiler.NewContext("content", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // LinksOrReferences links = 4; + v4 := compiler.MapValueForKey(m, "links") + if v4 != nil { + var err error + x.Links, err = NewLinksOrReferences(v4, compiler.NewContext("links", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 5; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseOrReference creates an object of type ResponseOrReference if possible, returning an error if not. +func NewResponseOrReference(in *yaml.Node, context *compiler.Context) (*ResponseOrReference, error) { + errors := make([]error, 0) + x := &ResponseOrReference{} + matched := false + // Response response = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewResponse(m, compiler.NewContext("response", m, context)) + if matchingError == nil { + x.Oneof = &ResponseOrReference_Response{Response: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &ResponseOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ResponseOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponses creates an object of type Responses if possible, returning an error if not. +func NewResponses(in *yaml.Node, context *compiler.Context) (*Responses, error) { + errors := make([]error, 0) + x := &Responses{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"default"} + allowedPatterns := []*regexp.Regexp{pattern3, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // ResponseOrReference default = 1; + v1 := compiler.MapValueForKey(m, "default") + if v1 != nil { + var err error + x.Default, err = NewResponseOrReference(v1, compiler.NewContext("default", v1, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedResponseOrReference response_or_reference = 2; + // MAP: ResponseOrReference ^([0-9X]{3})$ + x.ResponseOrReference = make([]*NamedResponseOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if pattern3.MatchString(k) { + pair := &NamedResponseOrReference{} + pair.Name = k + var err error + pair.Value, err = NewResponseOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.ResponseOrReference = append(x.ResponseOrReference, pair) + } + } + } + // repeated NamedAny specification_extension = 3; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponsesOrReferences creates an object of type ResponsesOrReferences if possible, returning an error if not. +func NewResponsesOrReferences(in *yaml.Node, context *compiler.Context) (*ResponsesOrReferences, error) { + errors := make([]error, 0) + x := &ResponsesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResponseOrReference additional_properties = 1; + // MAP: ResponseOrReference + x.AdditionalProperties = make([]*NamedResponseOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedResponseOrReference{} + pair.Name = k + var err error + pair.Value, err = NewResponseOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in *yaml.Node, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"additionalProperties", "allOf", "anyOf", "default", "deprecated", "description", "discriminator", "enum", "example", "exclusiveMaximum", "exclusiveMinimum", "externalDocs", "format", "items", "maxItems", "maxLength", "maxProperties", "maximum", "minItems", "minLength", "minProperties", "minimum", "multipleOf", "not", "nullable", "oneOf", "pattern", "properties", "readOnly", "required", "title", "type", "uniqueItems", "writeOnly", "xml"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool nullable = 1; + v1 := compiler.MapValueForKey(m, "nullable") + if v1 != nil { + x.Nullable, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for nullable: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Discriminator discriminator = 2; + v2 := compiler.MapValueForKey(m, "discriminator") + if v2 != nil { + var err error + x.Discriminator, err = NewDiscriminator(v2, compiler.NewContext("discriminator", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // bool read_only = 3; + v3 := compiler.MapValueForKey(m, "readOnly") + if v3 != nil { + x.ReadOnly, ok = compiler.BoolForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool write_only = 4; + v4 := compiler.MapValueForKey(m, "writeOnly") + if v4 != nil { + x.WriteOnly, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for writeOnly: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Xml xml = 5; + v5 := compiler.MapValueForKey(m, "xml") + if v5 != nil { + var err error + x.Xml, err = NewXml(v5, compiler.NewContext("xml", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExternalDocs external_docs = 6; + v6 := compiler.MapValueForKey(m, "externalDocs") + if v6 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v6, compiler.NewContext("externalDocs", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 7; + v7 := compiler.MapValueForKey(m, "example") + if v7 != nil { + var err error + x.Example, err = NewAny(v7, compiler.NewContext("example", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // bool deprecated = 8; + v8 := compiler.MapValueForKey(m, "deprecated") + if v8 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 9; + v9 := compiler.MapValueForKey(m, "title") + if v9 != nil { + x.Title, ok = compiler.StringForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float multiple_of = 10; + v10 := compiler.MapValueForKey(m, "multipleOf") + if v10 != nil { + v, ok := compiler.FloatForScalarNode(v10) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + v, ok := compiler.FloatForScalarNode(v11) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + v, ok := compiler.FloatForScalarNode(v13) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v14) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := compiler.IntForScalarNode(v16) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v17) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := compiler.IntForScalarNode(v19) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v20) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v20)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_properties = 21; + v21 := compiler.MapValueForKey(m, "maxProperties") + if v21 != nil { + t, ok := compiler.IntForScalarNode(v21) + if ok { + x.MaxProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxProperties: %s", compiler.Display(v21)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_properties = 22; + v22 := compiler.MapValueForKey(m, "minProperties") + if v22 != nil { + t, ok := compiler.IntForScalarNode(v22) + if ok { + x.MinProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minProperties: %s", compiler.Display(v22)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string required = 23; + v23 := compiler.MapValueForKey(m, "required") + if v23 != nil { + v, ok := compiler.SequenceNodeForNode(v23) + if ok { + x.Required = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v23)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 24; + v24 := compiler.MapValueForKey(m, "enum") + if v24 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v24) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // string type = 25; + v25 := compiler.MapValueForKey(m, "type") + if v25 != nil { + x.Type, ok = compiler.StringForScalarNode(v25) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v25)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SchemaOrReference all_of = 26; + v26 := compiler.MapValueForKey(m, "allOf") + if v26 != nil { + // repeated SchemaOrReference + x.AllOf = make([]*SchemaOrReference, 0) + a, ok := compiler.SequenceNodeForNode(v26) + if ok { + for _, item := range a.Content { + y, err := NewSchemaOrReference(item, compiler.NewContext("allOf", item, context)) + if err != nil { + errors = append(errors, err) + } + x.AllOf = append(x.AllOf, y) + } + } + } + // repeated SchemaOrReference one_of = 27; + v27 := compiler.MapValueForKey(m, "oneOf") + if v27 != nil { + // repeated SchemaOrReference + x.OneOf = make([]*SchemaOrReference, 0) + a, ok := compiler.SequenceNodeForNode(v27) + if ok { + for _, item := range a.Content { + y, err := NewSchemaOrReference(item, compiler.NewContext("oneOf", item, context)) + if err != nil { + errors = append(errors, err) + } + x.OneOf = append(x.OneOf, y) + } + } + } + // repeated SchemaOrReference any_of = 28; + v28 := compiler.MapValueForKey(m, "anyOf") + if v28 != nil { + // repeated SchemaOrReference + x.AnyOf = make([]*SchemaOrReference, 0) + a, ok := compiler.SequenceNodeForNode(v28) + if ok { + for _, item := range a.Content { + y, err := NewSchemaOrReference(item, compiler.NewContext("anyOf", item, context)) + if err != nil { + errors = append(errors, err) + } + x.AnyOf = append(x.AnyOf, y) + } + } + } + // Schema not = 29; + v29 := compiler.MapValueForKey(m, "not") + if v29 != nil { + var err error + x.Not, err = NewSchema(v29, compiler.NewContext("not", v29, context)) + if err != nil { + errors = append(errors, err) + } + } + // ItemsItem items = 30; + v30 := compiler.MapValueForKey(m, "items") + if v30 != nil { + var err error + x.Items, err = NewItemsItem(v30, compiler.NewContext("items", v30, context)) + if err != nil { + errors = append(errors, err) + } + } + // Properties properties = 31; + v31 := compiler.MapValueForKey(m, "properties") + if v31 != nil { + var err error + x.Properties, err = NewProperties(v31, compiler.NewContext("properties", v31, context)) + if err != nil { + errors = append(errors, err) + } + } + // AdditionalPropertiesItem additional_properties = 32; + v32 := compiler.MapValueForKey(m, "additionalProperties") + if v32 != nil { + var err error + x.AdditionalProperties, err = NewAdditionalPropertiesItem(v32, compiler.NewContext("additionalProperties", v32, context)) + if err != nil { + errors = append(errors, err) + } + } + // DefaultType default = 33; + v33 := compiler.MapValueForKey(m, "default") + if v33 != nil { + var err error + x.Default, err = NewDefaultType(v33, compiler.NewContext("default", v33, context)) + if err != nil { + errors = append(errors, err) + } + } + // string description = 34; + v34 := compiler.MapValueForKey(m, "description") + if v34 != nil { + x.Description, ok = compiler.StringForScalarNode(v34) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v34)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 35; + v35 := compiler.MapValueForKey(m, "format") + if v35 != nil { + x.Format, ok = compiler.StringForScalarNode(v35) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v35)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 36; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemaOrReference creates an object of type SchemaOrReference if possible, returning an error if not. +func NewSchemaOrReference(in *yaml.Node, context *compiler.Context) (*SchemaOrReference, error) { + errors := make([]error, 0) + x := &SchemaOrReference{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", m, context)) + if matchingError == nil { + x.Oneof = &SchemaOrReference_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &SchemaOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SchemaOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemasOrReferences creates an object of type SchemasOrReferences if possible, returning an error if not. +func NewSchemasOrReferences(in *yaml.Node, context *compiler.Context) (*SchemasOrReferences, error) { + errors := make([]error, 0) + x := &SchemasOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchemaOrReference additional_properties = 1; + // MAP: SchemaOrReference + x.AdditionalProperties = make([]*NamedSchemaOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchemaOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSchemaOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityRequirement creates an object of type SecurityRequirement if possible, returning an error if not. +func NewSecurityRequirement(in *yaml.Node, context *compiler.Context) (*SecurityRequirement, error) { + errors := make([]error, 0) + x := &SecurityRequirement{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedStringArray additional_properties = 1; + // MAP: StringArray + x.AdditionalProperties = make([]*NamedStringArray, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedStringArray{} + pair.Name = k + var err error + pair.Value, err = NewStringArray(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityScheme creates an object of type SecurityScheme if possible, returning an error if not. +func NewSecurityScheme(in *yaml.Node, context *compiler.Context) (*SecurityScheme, error) { + errors := make([]error, 0) + x := &SecurityScheme{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"bearerFormat", "description", "flows", "in", "name", "openIdConnectUrl", "scheme", "type"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 3; + v3 := compiler.MapValueForKey(m, "name") + if v3 != nil { + x.Name, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 4; + v4 := compiler.MapValueForKey(m, "in") + if v4 != nil { + x.In, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string scheme = 5; + v5 := compiler.MapValueForKey(m, "scheme") + if v5 != nil { + x.Scheme, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for scheme: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string bearer_format = 6; + v6 := compiler.MapValueForKey(m, "bearerFormat") + if v6 != nil { + x.BearerFormat, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for bearerFormat: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // OauthFlows flows = 7; + v7 := compiler.MapValueForKey(m, "flows") + if v7 != nil { + var err error + x.Flows, err = NewOauthFlows(v7, compiler.NewContext("flows", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // string open_id_connect_url = 8; + v8 := compiler.MapValueForKey(m, "openIdConnectUrl") + if v8 != nil { + x.OpenIdConnectUrl, ok = compiler.StringForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for openIdConnectUrl: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 9; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecuritySchemeOrReference creates an object of type SecuritySchemeOrReference if possible, returning an error if not. +func NewSecuritySchemeOrReference(in *yaml.Node, context *compiler.Context) (*SecuritySchemeOrReference, error) { + errors := make([]error, 0) + x := &SecuritySchemeOrReference{} + matched := false + // SecurityScheme security_scheme = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSecurityScheme(m, compiler.NewContext("securityScheme", m, context)) + if matchingError == nil { + x.Oneof = &SecuritySchemeOrReference_SecurityScheme{SecurityScheme: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Reference reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewReference(m, compiler.NewContext("reference", m, context)) + if matchingError == nil { + x.Oneof = &SecuritySchemeOrReference_Reference{Reference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SecuritySchemeOrReference") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecuritySchemesOrReferences creates an object of type SecuritySchemesOrReferences if possible, returning an error if not. +func NewSecuritySchemesOrReferences(in *yaml.Node, context *compiler.Context) (*SecuritySchemesOrReferences, error) { + errors := make([]error, 0) + x := &SecuritySchemesOrReferences{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSecuritySchemeOrReference additional_properties = 1; + // MAP: SecuritySchemeOrReference + x.AdditionalProperties = make([]*NamedSecuritySchemeOrReference, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSecuritySchemeOrReference{} + pair.Name = k + var err error + pair.Value, err = NewSecuritySchemeOrReference(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServer creates an object of type Server if possible, returning an error if not. +func NewServer(in *yaml.Node, context *compiler.Context) (*Server, error) { + errors := make([]error, 0) + x := &Server{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url", "variables"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string url = 1; + v1 := compiler.MapValueForKey(m, "url") + if v1 != nil { + x.Url, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ServerVariables variables = 3; + v3 := compiler.MapValueForKey(m, "variables") + if v3 != nil { + var err error + x.Variables, err = NewServerVariables(v3, compiler.NewContext("variables", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServerVariable creates an object of type ServerVariable if possible, returning an error if not. +func NewServerVariable(in *yaml.Node, context *compiler.Context) (*ServerVariable, error) { + errors := make([]error, 0) + x := &ServerVariable{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"default"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"default", "description", "enum"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string enum = 1; + v1 := compiler.MapValueForKey(m, "enum") + if v1 != nil { + v, ok := compiler.SequenceNodeForNode(v1) + if ok { + x.Enum = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for enum: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string default = 2; + v2 := compiler.MapValueForKey(m, "default") + if v2 != nil { + x.Default, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for default: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewServerVariables creates an object of type ServerVariables if possible, returning an error if not. +func NewServerVariables(in *yaml.Node, context *compiler.Context) (*ServerVariables, error) { + errors := make([]error, 0) + x := &ServerVariables{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedServerVariable additional_properties = 1; + // MAP: ServerVariable + x.AdditionalProperties = make([]*NamedServerVariable, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedServerVariable{} + pair.Name = k + var err error + pair.Value, err = NewServerVariable(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSpecificationExtension creates an object of type SpecificationExtension if possible, returning an error if not. +func NewSpecificationExtension(in *yaml.Node, context *compiler.Context) (*SpecificationExtension, error) { + errors := make([]error, 0) + x := &SpecificationExtension{} + matched := false + switch in.Tag { + case "!!bool": + var v bool + v, matched = compiler.BoolForScalarNode(in) + x.Oneof = &SpecificationExtension_Boolean{Boolean: v} + case "!!str": + var v string + v, matched = compiler.StringForScalarNode(in) + x.Oneof = &SpecificationExtension_String_{String_: v} + case "!!float": + var v float64 + v, matched = compiler.FloatForScalarNode(in) + x.Oneof = &SpecificationExtension_Number{Number: v} + case "!!int": + var v int64 + v, matched = compiler.IntForScalarNode(in) + x.Oneof = &SpecificationExtension_Number{Number: float64(v)} + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in *yaml.Node, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + x.Value = make([]string, 0) + for _, node := range in.Content { + s, _ := compiler.StringForScalarNode(node) + x.Value = append(x.Value, s) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStrings creates an object of type Strings if possible, returning an error if not. +func NewStrings(in *yaml.Node, context *compiler.Context) (*Strings, error) { + errors := make([]error, 0) + x := &Strings{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedString additional_properties = 1; + // MAP: string + x.AdditionalProperties = make([]*NamedString, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedString{} + pair.Name = k + pair.Value, _ = compiler.StringForScalarNode(v) + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTag creates an object of type Tag if possible, returning an error if not. +func NewTag(in *yaml.Node, context *compiler.Context) (*Tag, error) { + errors := make([]error, 0) + x := &Tag{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "externalDocs", "name"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 3; + v3 := compiler.MapValueForKey(m, "externalDocs") + if v3 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v3, compiler.NewContext("externalDocs", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny specification_extension = 4; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewXml creates an object of type Xml if possible, returning an error if not. +func NewXml(in *yaml.Node, context *compiler.Context) (*Xml, error) { + errors := make([]error, 0) + x := &Xml{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"attribute", "name", "namespace", "prefix", "wrapped"} + allowedPatterns := []*regexp.Regexp{pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string namespace = 2; + v2 := compiler.MapValueForKey(m, "namespace") + if v2 != nil { + x.Namespace, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for namespace: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string prefix = 3; + v3 := compiler.MapValueForKey(m, "prefix") + if v3 != nil { + x.Prefix, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for prefix: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool attribute = 4; + v4 := compiler.MapValueForKey(m, "attribute") + if v4 != nil { + x.Attribute, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for attribute: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool wrapped = 5; + v5 := compiler.MapValueForKey(m, "wrapped") + if v5 != nil { + x.Wrapped, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for wrapped: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny specification_extension = 6; + // MAP: Any ^x- + x.SpecificationExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.SpecificationExtension = append(x.SpecificationExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. +func (m *AdditionalPropertiesItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AdditionalPropertiesItem_SchemaOrReference) + if ok { + _, err := p.SchemaOrReference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AnyOrExpression objects. +func (m *AnyOrExpression) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AnyOrExpression_Any) + if ok { + _, err := p.Any.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*AnyOrExpression_Expression) + if ok { + _, err := p.Expression.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Callback objects. +func (m *Callback) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside CallbackOrReference objects. +func (m *CallbackOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*CallbackOrReference_Callback) + if ok { + _, err := p.Callback.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*CallbackOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside CallbacksOrReferences objects. +func (m *CallbacksOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Components objects. +func (m *Components) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schemas != nil { + _, err := m.Schemas.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.RequestBodies != nil { + _, err := m.RequestBodies.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.SecuritySchemes != nil { + _, err := m.SecuritySchemes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Links != nil { + _, err := m.Links.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Callbacks != nil { + _, err := m.Callbacks.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Contact objects. +func (m *Contact) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside DefaultType objects. +func (m *DefaultType) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Discriminator objects. +func (m *Discriminator) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Mapping != nil { + _, err := m.Mapping.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Info != nil { + _, err := m.Info.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Paths != nil { + _, err := m.Paths.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Components != nil { + _, err := m.Components.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Tags { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Encoding objects. +func (m *Encoding) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Encodings objects. +func (m *Encodings) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Example objects. +func (m *Example) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExampleOrReference objects. +func (m *ExampleOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ExampleOrReference_Example) + if ok { + _, err := p.Example.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ExampleOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExamplesOrReferences objects. +func (m *ExamplesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Expression objects. +func (m *Expression) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExternalDocs objects. +func (m *ExternalDocs) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Header objects. +func (m *Header) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeaderOrReference objects. +func (m *HeaderOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*HeaderOrReference_Header) + if ok { + _, err := p.Header.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*HeaderOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeadersOrReferences objects. +func (m *HeadersOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Info objects. +func (m *Info) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Contact != nil { + _, err := m.Contact.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.License != nil { + _, err := m.License.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ItemsItem objects. +func (m *ItemsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SchemaOrReference { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside License objects. +func (m *License) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Link objects. +func (m *Link) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.RequestBody != nil { + _, err := m.RequestBody.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Server != nil { + _, err := m.Server.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside LinkOrReference objects. +func (m *LinkOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*LinkOrReference_Link) + if ok { + _, err := p.Link.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*LinkOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside LinksOrReferences objects. +func (m *LinksOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside MediaType objects. +func (m *MediaType) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Encoding != nil { + _, err := m.Encoding.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside MediaTypes objects. +func (m *MediaTypes) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAny objects. +func (m *NamedAny) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedCallbackOrReference objects. +func (m *NamedCallbackOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedEncoding objects. +func (m *NamedEncoding) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedExampleOrReference objects. +func (m *NamedExampleOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedHeaderOrReference objects. +func (m *NamedHeaderOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedLinkOrReference objects. +func (m *NamedLinkOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedMediaType objects. +func (m *NamedMediaType) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameterOrReference objects. +func (m *NamedParameterOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedPathItem objects. +func (m *NamedPathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedRequestBodyOrReference objects. +func (m *NamedRequestBodyOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponseOrReference objects. +func (m *NamedResponseOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchemaOrReference objects. +func (m *NamedSchemaOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSecuritySchemeOrReference objects. +func (m *NamedSecuritySchemeOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedServerVariable objects. +func (m *NamedServerVariable) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedString objects. +func (m *NamedString) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedStringArray objects. +func (m *NamedStringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside OauthFlow objects. +func (m *OauthFlow) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside OauthFlows objects. +func (m *OauthFlows) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Implicit != nil { + _, err := m.Implicit.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Password != nil { + _, err := m.Password.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ClientCredentials != nil { + _, err := m.ClientCredentials.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AuthorizationCode != nil { + _, err := m.AuthorizationCode.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Object objects. +func (m *Object) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Operation objects. +func (m *Operation) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.RequestBody != nil { + _, err := m.RequestBody.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Callbacks != nil { + _, err := m.Callbacks.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParameterOrReference objects. +func (m *ParameterOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ParameterOrReference_Parameter) + if ok { + _, err := p.Parameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ParameterOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParametersOrReferences objects. +func (m *ParametersOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathItem objects. +func (m *PathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewPathItem(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Get != nil { + _, err := m.Get.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Put != nil { + _, err := m.Put.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Post != nil { + _, err := m.Post.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Delete != nil { + _, err := m.Delete.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Options != nil { + _, err := m.Options.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Head != nil { + _, err := m.Head.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Patch != nil { + _, err := m.Patch.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Trace != nil { + _, err := m.Trace.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Servers { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Paths objects. +func (m *Paths) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Properties objects. +func (m *Properties) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Reference objects. +func (m *Reference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewReference(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBodiesOrReferences objects. +func (m *RequestBodiesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBody objects. +func (m *RequestBody) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside RequestBodyOrReference objects. +func (m *RequestBodyOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*RequestBodyOrReference_RequestBody) + if ok { + _, err := p.RequestBody.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*RequestBodyOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Content != nil { + _, err := m.Content.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Links != nil { + _, err := m.Links.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseOrReference objects. +func (m *ResponseOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ResponseOrReference_Response) + if ok { + _, err := p.Response.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ResponseOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Responses objects. +func (m *Responses) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.ResponseOrReference { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponsesOrReferences objects. +func (m *ResponsesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Discriminator != nil { + _, err := m.Discriminator.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Xml != nil { + _, err := m.Xml.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.AllOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.OneOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.AnyOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Not != nil { + _, err := m.Not.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemaOrReference objects. +func (m *SchemaOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SchemaOrReference_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SchemaOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemasOrReferences objects. +func (m *SchemasOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityRequirement objects. +func (m *SecurityRequirement) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityScheme objects. +func (m *SecurityScheme) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Flows != nil { + _, err := m.Flows.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecuritySchemeOrReference objects. +func (m *SecuritySchemeOrReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SecuritySchemeOrReference_SecurityScheme) + if ok { + _, err := p.SecurityScheme.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecuritySchemeOrReference_Reference) + if ok { + _, err := p.Reference.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecuritySchemesOrReferences objects. +func (m *SecuritySchemesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Server objects. +func (m *Server) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Variables != nil { + _, err := m.Variables.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ServerVariable objects. +func (m *ServerVariable) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ServerVariables objects. +func (m *ServerVariables) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SpecificationExtension objects. +func (m *SpecificationExtension) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Strings objects. +func (m *Strings) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Tag objects. +func (m *Tag) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Xml objects. +func (m *Xml) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.SpecificationExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. +func (m *AdditionalPropertiesItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // AdditionalPropertiesItem + // {Name:schemaOrReference Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchemaOrReference() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return compiler.NewScalarNodeForBool(v1.Boolean) + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() *yaml.Node { + var err error + var node yaml.Node + err = yaml.Unmarshal([]byte(m.Yaml), &node) + if err == nil { + if node.Kind == yaml.DocumentNode { + return node.Content[0] + } + return &node + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of AnyOrExpression suitable for JSON or YAML export. +func (m *AnyOrExpression) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // AnyOrExpression + // {Name:any Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetAny() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:expression Type:Expression StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetExpression() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Callback suitable for JSON or YAML export. +func (m *Callback) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Path != nil { + for _, item := range m.Path { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of CallbackOrReference suitable for JSON or YAML export. +func (m *CallbackOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // CallbackOrReference + // {Name:callback Type:Callback StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetCallback() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of CallbacksOrReferences suitable for JSON or YAML export. +func (m *CallbacksOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Components suitable for JSON or YAML export. +func (m *Components) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Schemas != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemas")) + info.Content = append(info.Content, m.Schemas.ToRawInfo()) + } + if m.Responses != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + } + if m.Parameters != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, m.Parameters.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.RequestBodies != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBodies")) + info.Content = append(info.Content, m.RequestBodies.ToRawInfo()) + } + if m.Headers != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) + info.Content = append(info.Content, m.Headers.ToRawInfo()) + } + if m.SecuritySchemes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("securitySchemes")) + info.Content = append(info.Content, m.SecuritySchemes.ToRawInfo()) + } + if m.Links != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("links")) + info.Content = append(info.Content, m.Links.ToRawInfo()) + } + if m.Callbacks != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("callbacks")) + info.Content = append(info.Content, m.Callbacks.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Contact suitable for JSON or YAML export. +func (m *Contact) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.Email != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("email")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Email)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of DefaultType suitable for JSON or YAML export. +func (m *DefaultType) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // DefaultType + // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v0, ok := m.GetOneof().(*DefaultType_Number); ok { + return compiler.NewScalarNodeForFloat(v0.Number) + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*DefaultType_Boolean); ok { + return compiler.NewScalarNodeForBool(v1.Boolean) + } + // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v2, ok := m.GetOneof().(*DefaultType_String_); ok { + return compiler.NewScalarNodeForString(v2.String_) + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Discriminator suitable for JSON or YAML export. +func (m *Discriminator) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("propertyName")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.PropertyName)) + if m.Mapping != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("mapping")) + info.Content = append(info.Content, m.Mapping.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("openapi")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Openapi)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("info")) + info.Content = append(info.Content, m.Info.ToRawInfo()) + if len(m.Servers) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Servers { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) + info.Content = append(info.Content, items) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("paths")) + info.Content = append(info.Content, m.Paths.ToRawInfo()) + if m.Components != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("components")) + info.Content = append(info.Content, m.Components.ToRawInfo()) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if len(m.Tags) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Tags { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, items) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Encoding suitable for JSON or YAML export. +func (m *Encoding) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.ContentType != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("contentType")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.ContentType)) + } + if m.Headers != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) + info.Content = append(info.Content, m.Headers.ToRawInfo()) + } + if m.Style != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) + } + if m.Explode != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) + } + if m.AllowReserved != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Encodings suitable for JSON or YAML export. +func (m *Encodings) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Example suitable for JSON or YAML export. +func (m *Example) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Value != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, m.Value.ToRawInfo()) + } + if m.ExternalValue != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.ExternalValue)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ExampleOrReference suitable for JSON or YAML export. +func (m *ExampleOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ExampleOrReference + // {Name:example Type:Example StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetExample() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ExamplesOrReferences suitable for JSON or YAML export. +func (m *ExamplesOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Expression suitable for JSON or YAML export. +func (m *Expression) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. +func (m *ExternalDocs) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Header suitable for JSON or YAML export. +func (m *Header) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Style != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) + } + if m.Explode != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) + } + if m.AllowReserved != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) + } + if m.Schema != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.Content != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) + info.Content = append(info.Content, m.Content.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of HeaderOrReference suitable for JSON or YAML export. +func (m *HeaderOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // HeaderOrReference + // {Name:header Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetHeader() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of HeadersOrReferences suitable for JSON or YAML export. +func (m *HeadersOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Info suitable for JSON or YAML export. +func (m *Info) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.TermsOfService != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("termsOfService")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TermsOfService)) + } + if m.Contact != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("contact")) + info.Content = append(info.Content, m.Contact.ToRawInfo()) + } + if m.License != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("license")) + info.Content = append(info.Content, m.License.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("version")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Version)) + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. +func (m *ItemsItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.SchemaOrReference) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.SchemaOrReference { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemaOrReference")) + info.Content = append(info.Content, items) + } + return info +} + +// ToRawInfo returns a description of License suitable for JSON or YAML export. +func (m *License) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Link suitable for JSON or YAML export. +func (m *Link) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.OperationRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("operationRef")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationRef)) + } + if m.OperationId != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) + } + if m.Parameters != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, m.Parameters.ToRawInfo()) + } + if m.RequestBody != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBody")) + info.Content = append(info.Content, m.RequestBody.ToRawInfo()) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Server != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("server")) + info.Content = append(info.Content, m.Server.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of LinkOrReference suitable for JSON or YAML export. +func (m *LinkOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // LinkOrReference + // {Name:link Type:Link StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetLink() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of LinksOrReferences suitable for JSON or YAML export. +func (m *LinksOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of MediaType suitable for JSON or YAML export. +func (m *MediaType) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Schema != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.Encoding != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("encoding")) + info.Content = append(info.Content, m.Encoding.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of MediaTypes suitable for JSON or YAML export. +func (m *MediaTypes) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. +func (m *NamedAny) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, m.Value.ToRawInfo()) + } + return info +} + +// ToRawInfo returns a description of NamedCallbackOrReference suitable for JSON or YAML export. +func (m *NamedCallbackOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:CallbackOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedEncoding suitable for JSON or YAML export. +func (m *NamedEncoding) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Encoding StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedExampleOrReference suitable for JSON or YAML export. +func (m *NamedExampleOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ExampleOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedHeaderOrReference suitable for JSON or YAML export. +func (m *NamedHeaderOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:HeaderOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedLinkOrReference suitable for JSON or YAML export. +func (m *NamedLinkOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:LinkOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedMediaType suitable for JSON or YAML export. +func (m *NamedMediaType) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:MediaType StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameterOrReference suitable for JSON or YAML export. +func (m *NamedParameterOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ParameterOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. +func (m *NamedPathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedRequestBodyOrReference suitable for JSON or YAML export. +func (m *NamedRequestBodyOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:RequestBodyOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponseOrReference suitable for JSON or YAML export. +func (m *NamedResponseOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ResponseOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchemaOrReference suitable for JSON or YAML export. +func (m *NamedSchemaOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSecuritySchemeOrReference suitable for JSON or YAML export. +func (m *NamedSecuritySchemeOrReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:SecuritySchemeOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedServerVariable suitable for JSON or YAML export. +func (m *NamedServerVariable) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ServerVariable StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. +func (m *NamedString) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Value)) + } + return info +} + +// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. +func (m *NamedStringArray) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of OauthFlow suitable for JSON or YAML export. +func (m *OauthFlow) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AuthorizationUrl != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) + } + if m.TokenUrl != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + } + if m.RefreshUrl != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("refreshUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.RefreshUrl)) + } + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of OauthFlows suitable for JSON or YAML export. +func (m *OauthFlows) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Implicit != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("implicit")) + info.Content = append(info.Content, m.Implicit.ToRawInfo()) + } + if m.Password != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("password")) + info.Content = append(info.Content, m.Password.ToRawInfo()) + } + if m.ClientCredentials != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("clientCredentials")) + info.Content = append(info.Content, m.ClientCredentials.ToRawInfo()) + } + if m.AuthorizationCode != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationCode")) + info.Content = append(info.Content, m.AuthorizationCode.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Object suitable for JSON or YAML export. +func (m *Object) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Operation suitable for JSON or YAML export. +func (m *Operation) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Tags) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Tags)) + } + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.OperationId != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + if m.RequestBody != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBody")) + info.Content = append(info.Content, m.RequestBody.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + if m.Callbacks != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("callbacks")) + info.Content = append(info.Content, m.Callbacks.ToRawInfo()) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if len(m.Servers) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Servers { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) + info.Content = append(info.Content, items) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Style != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) + } + if m.Explode != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) + } + if m.AllowReserved != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) + } + if m.Schema != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.Content != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) + info.Content = append(info.Content, m.Content.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ParameterOrReference suitable for JSON or YAML export. +func (m *ParameterOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ParameterOrReference + // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ParametersOrReferences suitable for JSON or YAML export. +func (m *ParametersOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. +func (m *PathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.XRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + } + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Get != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("get")) + info.Content = append(info.Content, m.Get.ToRawInfo()) + } + if m.Put != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("put")) + info.Content = append(info.Content, m.Put.ToRawInfo()) + } + if m.Post != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("post")) + info.Content = append(info.Content, m.Post.ToRawInfo()) + } + if m.Delete != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("delete")) + info.Content = append(info.Content, m.Delete.ToRawInfo()) + } + if m.Options != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("options")) + info.Content = append(info.Content, m.Options.ToRawInfo()) + } + if m.Head != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("head")) + info.Content = append(info.Content, m.Head.ToRawInfo()) + } + if m.Patch != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("patch")) + info.Content = append(info.Content, m.Patch.ToRawInfo()) + } + if m.Trace != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("trace")) + info.Content = append(info.Content, m.Trace.ToRawInfo()) + } + if len(m.Servers) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Servers { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) + info.Content = append(info.Content, items) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Paths suitable for JSON or YAML export. +func (m *Paths) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Path != nil { + for _, item := range m.Path { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Properties suitable for JSON or YAML export. +func (m *Properties) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Reference suitable for JSON or YAML export. +func (m *Reference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + return info +} + +// ToRawInfo returns a description of RequestBodiesOrReferences suitable for JSON or YAML export. +func (m *RequestBodiesOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of RequestBody suitable for JSON or YAML export. +func (m *RequestBody) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) + info.Content = append(info.Content, m.Content.ToRawInfo()) + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of RequestBodyOrReference suitable for JSON or YAML export. +func (m *RequestBodyOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // RequestBodyOrReference + // {Name:requestBody Type:RequestBody StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetRequestBody() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + if m.Headers != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) + info.Content = append(info.Content, m.Headers.ToRawInfo()) + } + if m.Content != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) + info.Content = append(info.Content, m.Content.ToRawInfo()) + } + if m.Links != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("links")) + info.Content = append(info.Content, m.Links.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponseOrReference suitable for JSON or YAML export. +func (m *ResponseOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ResponseOrReference + // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetResponse() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Responses suitable for JSON or YAML export. +func (m *Responses) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.ResponseOrReference != nil { + for _, item := range m.ResponseOrReference { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponsesOrReferences suitable for JSON or YAML export. +func (m *ResponsesOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Nullable != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("nullable")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Nullable)) + } + if m.Discriminator != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("discriminator")) + info.Content = append(info.Content, m.Discriminator.ToRawInfo()) + } + if m.ReadOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) + } + if m.WriteOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("writeOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.WriteOnly)) + } + if m.Xml != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("xml")) + info.Content = append(info.Content, m.Xml.ToRawInfo()) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if m.Title != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if m.MaxProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxProperties)) + } + if m.MinProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinProperties)) + } + if len(m.Required) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if len(m.AllOf) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.AllOf { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("allOf")) + info.Content = append(info.Content, items) + } + if len(m.OneOf) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.OneOf { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("oneOf")) + info.Content = append(info.Content, items) + } + if len(m.AnyOf) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.AnyOf { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("anyOf")) + info.Content = append(info.Content, items) + } + if m.Not != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("not")) + info.Content = append(info.Content, m.Not.ToRawInfo()) + } + if m.Items != nil { + items := compiler.NewSequenceNode() + for _, item := range m.Items.SchemaOrReference { + items.Content = append(items.Content, item.ToRawInfo()) + } + if len(items.Content) == 1 { + items = items.Content[0] + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, items) + } + if m.Properties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("properties")) + info.Content = append(info.Content, m.Properties.ToRawInfo()) + } + if m.AdditionalProperties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("additionalProperties")) + info.Content = append(info.Content, m.AdditionalProperties.ToRawInfo()) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SchemaOrReference suitable for JSON or YAML export. +func (m *SchemaOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SchemaOrReference + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SchemasOrReferences suitable for JSON or YAML export. +func (m *SchemasOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. +func (m *SecurityRequirement) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SecurityScheme suitable for JSON or YAML export. +func (m *SecurityScheme) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Scheme != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scheme")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Scheme)) + } + if m.BearerFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("bearerFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.BearerFormat)) + } + if m.Flows != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("flows")) + info.Content = append(info.Content, m.Flows.ToRawInfo()) + } + if m.OpenIdConnectUrl != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("openIdConnectUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OpenIdConnectUrl)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SecuritySchemeOrReference suitable for JSON or YAML export. +func (m *SecuritySchemeOrReference) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SecuritySchemeOrReference + // {Name:securityScheme Type:SecurityScheme StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSecurityScheme() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SecuritySchemesOrReferences suitable for JSON or YAML export. +func (m *SecuritySchemesOrReferences) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Server suitable for JSON or YAML export. +func (m *Server) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Variables != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("variables")) + info.Content = append(info.Content, m.Variables.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ServerVariable suitable for JSON or YAML export. +func (m *ServerVariable) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Enum) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Enum)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Default)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ServerVariables suitable for JSON or YAML export. +func (m *ServerVariables) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SpecificationExtension suitable for JSON or YAML export. +func (m *SpecificationExtension) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SpecificationExtension + // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v0, ok := m.GetOneof().(*SpecificationExtension_Number); ok { + return compiler.NewScalarNodeForFloat(v0.Number) + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*SpecificationExtension_Boolean); ok { + return compiler.NewScalarNodeForBool(v1.Boolean) + } + // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v2, ok := m.GetOneof().(*SpecificationExtension_String_); ok { + return compiler.NewScalarNodeForString(v2.String_) + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() *yaml.Node { + return compiler.NewSequenceNodeForStringArray(m.Value) +} + +// ToRawInfo returns a description of Strings suitable for JSON or YAML export. +func (m *Strings) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Tag suitable for JSON or YAML export. +func (m *Tag) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Xml suitable for JSON or YAML export. +func (m *Xml) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Namespace != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("namespace")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Namespace)) + } + if m.Prefix != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("prefix")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Prefix)) + } + if m.Attribute != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("attribute")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Attribute)) + } + if m.Wrapped != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("wrapped")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Wrapped)) + } + if m.SpecificationExtension != nil { + for _, item := range m.SpecificationExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +var ( + pattern0 = regexp.MustCompile("^") + pattern1 = regexp.MustCompile("^x-") + pattern2 = regexp.MustCompile("^/") + pattern3 = regexp.MustCompile("^([0-9X]{3})$") +) diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go similarity index 99% rename from vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go rename to vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go index 90a56f552..945b8d11f 100644 --- a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go @@ -16,8 +16,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.28.0 -// protoc v3.19.4 +// protoc-gen-go v1.27.1 +// protoc v3.19.3 // source: openapiv3/OpenAPIv3.proto package openapi_v3 @@ -6760,13 +6760,12 @@ var file_openapiv3_OpenAPIv3_proto_rawDesc = []byte{ 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x16, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x56, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x3e, 0x0a, 0x0e, 0x6f, 0x72, 0x67, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0x42, 0x0c, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, - 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2f, 0x6f, 0x70, 0x65, 0x6e, - 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, - 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x5a, 0x16, 0x2e, 0x2f, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto new file mode 100644 index 000000000..1be335b89 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto @@ -0,0 +1,672 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package openapi.v3; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v3"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +// The Go package name. +option go_package = "./openapiv3;openapi_v3"; + +message AdditionalPropertiesItem { + oneof oneof { + SchemaOrReference schema_or_reference = 1; + bool boolean = 2; + } +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message AnyOrExpression { + oneof oneof { + Any any = 1; + Expression expression = 2; + } +} + +// A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation. +message Callback { + repeated NamedPathItem path = 1; + repeated NamedAny specification_extension = 2; +} + +message CallbackOrReference { + oneof oneof { + Callback callback = 1; + Reference reference = 2; + } +} + +message CallbacksOrReferences { + repeated NamedCallbackOrReference additional_properties = 1; +} + +// Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object. +message Components { + SchemasOrReferences schemas = 1; + ResponsesOrReferences responses = 2; + ParametersOrReferences parameters = 3; + ExamplesOrReferences examples = 4; + RequestBodiesOrReferences request_bodies = 5; + HeadersOrReferences headers = 6; + SecuritySchemesOrReferences security_schemes = 7; + LinksOrReferences links = 8; + CallbacksOrReferences callbacks = 9; + repeated NamedAny specification_extension = 10; +} + +// Contact information for the exposed API. +message Contact { + string name = 1; + string url = 2; + string email = 3; + repeated NamedAny specification_extension = 4; +} + +message DefaultType { + oneof oneof { + double number = 1; + bool boolean = 2; + string string = 3; + } +} + +// When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered. +message Discriminator { + string property_name = 1; + Strings mapping = 2; + repeated NamedAny specification_extension = 3; +} + +message Document { + string openapi = 1; + Info info = 2; + repeated Server servers = 3; + Paths paths = 4; + Components components = 5; + repeated SecurityRequirement security = 6; + repeated Tag tags = 7; + ExternalDocs external_docs = 8; + repeated NamedAny specification_extension = 9; +} + +// A single encoding definition applied to a single schema property. +message Encoding { + string content_type = 1; + HeadersOrReferences headers = 2; + string style = 3; + bool explode = 4; + bool allow_reserved = 5; + repeated NamedAny specification_extension = 6; +} + +message Encodings { + repeated NamedEncoding additional_properties = 1; +} + +message Example { + string summary = 1; + string description = 2; + Any value = 3; + string external_value = 4; + repeated NamedAny specification_extension = 5; +} + +message ExampleOrReference { + oneof oneof { + Example example = 1; + Reference reference = 2; + } +} + +message ExamplesOrReferences { + repeated NamedExampleOrReference additional_properties = 1; +} + +message Expression { + repeated NamedAny additional_properties = 1; +} + +// Allows referencing an external resource for extended documentation. +message ExternalDocs { + string description = 1; + string url = 2; + repeated NamedAny specification_extension = 3; +} + +// The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`). +message Header { + string description = 1; + bool required = 2; + bool deprecated = 3; + bool allow_empty_value = 4; + string style = 5; + bool explode = 6; + bool allow_reserved = 7; + SchemaOrReference schema = 8; + Any example = 9; + ExamplesOrReferences examples = 10; + MediaTypes content = 11; + repeated NamedAny specification_extension = 12; +} + +message HeaderOrReference { + oneof oneof { + Header header = 1; + Reference reference = 2; + } +} + +message HeadersOrReferences { + repeated NamedHeaderOrReference additional_properties = 1; +} + +// The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience. +message Info { + string title = 1; + string description = 2; + string terms_of_service = 3; + Contact contact = 4; + License license = 5; + string version = 6; + repeated NamedAny specification_extension = 7; + string summary = 8; +} + +message ItemsItem { + repeated SchemaOrReference schema_or_reference = 1; +} + +// License information for the exposed API. +message License { + string name = 1; + string url = 2; + repeated NamedAny specification_extension = 3; +} + +// The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation. +message Link { + string operation_ref = 1; + string operation_id = 2; + AnyOrExpression parameters = 3; + AnyOrExpression request_body = 4; + string description = 5; + Server server = 6; + repeated NamedAny specification_extension = 7; +} + +message LinkOrReference { + oneof oneof { + Link link = 1; + Reference reference = 2; + } +} + +message LinksOrReferences { + repeated NamedLinkOrReference additional_properties = 1; +} + +// Each Media Type Object provides schema and examples for the media type identified by its key. +message MediaType { + SchemaOrReference schema = 1; + Any example = 2; + ExamplesOrReferences examples = 3; + Encodings encoding = 4; + repeated NamedAny specification_extension = 5; +} + +message MediaTypes { + repeated NamedMediaType additional_properties = 1; +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +message NamedAny { + // Map key + string name = 1; + // Mapped value + Any value = 2; +} + +// Automatically-generated message used to represent maps of CallbackOrReference as ordered (name,value) pairs. +message NamedCallbackOrReference { + // Map key + string name = 1; + // Mapped value + CallbackOrReference value = 2; +} + +// Automatically-generated message used to represent maps of Encoding as ordered (name,value) pairs. +message NamedEncoding { + // Map key + string name = 1; + // Mapped value + Encoding value = 2; +} + +// Automatically-generated message used to represent maps of ExampleOrReference as ordered (name,value) pairs. +message NamedExampleOrReference { + // Map key + string name = 1; + // Mapped value + ExampleOrReference value = 2; +} + +// Automatically-generated message used to represent maps of HeaderOrReference as ordered (name,value) pairs. +message NamedHeaderOrReference { + // Map key + string name = 1; + // Mapped value + HeaderOrReference value = 2; +} + +// Automatically-generated message used to represent maps of LinkOrReference as ordered (name,value) pairs. +message NamedLinkOrReference { + // Map key + string name = 1; + // Mapped value + LinkOrReference value = 2; +} + +// Automatically-generated message used to represent maps of MediaType as ordered (name,value) pairs. +message NamedMediaType { + // Map key + string name = 1; + // Mapped value + MediaType value = 2; +} + +// Automatically-generated message used to represent maps of ParameterOrReference as ordered (name,value) pairs. +message NamedParameterOrReference { + // Map key + string name = 1; + // Mapped value + ParameterOrReference value = 2; +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +message NamedPathItem { + // Map key + string name = 1; + // Mapped value + PathItem value = 2; +} + +// Automatically-generated message used to represent maps of RequestBodyOrReference as ordered (name,value) pairs. +message NamedRequestBodyOrReference { + // Map key + string name = 1; + // Mapped value + RequestBodyOrReference value = 2; +} + +// Automatically-generated message used to represent maps of ResponseOrReference as ordered (name,value) pairs. +message NamedResponseOrReference { + // Map key + string name = 1; + // Mapped value + ResponseOrReference value = 2; +} + +// Automatically-generated message used to represent maps of SchemaOrReference as ordered (name,value) pairs. +message NamedSchemaOrReference { + // Map key + string name = 1; + // Mapped value + SchemaOrReference value = 2; +} + +// Automatically-generated message used to represent maps of SecuritySchemeOrReference as ordered (name,value) pairs. +message NamedSecuritySchemeOrReference { + // Map key + string name = 1; + // Mapped value + SecuritySchemeOrReference value = 2; +} + +// Automatically-generated message used to represent maps of ServerVariable as ordered (name,value) pairs. +message NamedServerVariable { + // Map key + string name = 1; + // Mapped value + ServerVariable value = 2; +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +message NamedString { + // Map key + string name = 1; + // Mapped value + string value = 2; +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +message NamedStringArray { + // Map key + string name = 1; + // Mapped value + StringArray value = 2; +} + +// Configuration details for a supported OAuth Flow +message OauthFlow { + string authorization_url = 1; + string token_url = 2; + string refresh_url = 3; + Strings scopes = 4; + repeated NamedAny specification_extension = 5; +} + +// Allows configuration of the supported OAuth Flows. +message OauthFlows { + OauthFlow implicit = 1; + OauthFlow password = 2; + OauthFlow client_credentials = 3; + OauthFlow authorization_code = 4; + repeated NamedAny specification_extension = 5; +} + +message Object { + repeated NamedAny additional_properties = 1; +} + +// Describes a single API operation on a path. +message Operation { + repeated string tags = 1; + string summary = 2; + string description = 3; + ExternalDocs external_docs = 4; + string operation_id = 5; + repeated ParameterOrReference parameters = 6; + RequestBodyOrReference request_body = 7; + Responses responses = 8; + CallbacksOrReferences callbacks = 9; + bool deprecated = 10; + repeated SecurityRequirement security = 11; + repeated Server servers = 12; + repeated NamedAny specification_extension = 13; +} + +// Describes a single operation parameter. A unique parameter is defined by a combination of a name and location. +message Parameter { + string name = 1; + string in = 2; + string description = 3; + bool required = 4; + bool deprecated = 5; + bool allow_empty_value = 6; + string style = 7; + bool explode = 8; + bool allow_reserved = 9; + SchemaOrReference schema = 10; + Any example = 11; + ExamplesOrReferences examples = 12; + MediaTypes content = 13; + repeated NamedAny specification_extension = 14; +} + +message ParameterOrReference { + oneof oneof { + Parameter parameter = 1; + Reference reference = 2; + } +} + +message ParametersOrReferences { + repeated NamedParameterOrReference additional_properties = 1; +} + +// Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available. +message PathItem { + string _ref = 1; + string summary = 2; + string description = 3; + Operation get = 4; + Operation put = 5; + Operation post = 6; + Operation delete = 7; + Operation options = 8; + Operation head = 9; + Operation patch = 10; + Operation trace = 11; + repeated Server servers = 12; + repeated ParameterOrReference parameters = 13; + repeated NamedAny specification_extension = 14; +} + +// Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints. +message Paths { + repeated NamedPathItem path = 1; + repeated NamedAny specification_extension = 2; +} + +message Properties { + repeated NamedSchemaOrReference additional_properties = 1; +} + +// A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification. +message Reference { + string _ref = 1; + string summary = 2; + string description = 3; +} + +message RequestBodiesOrReferences { + repeated NamedRequestBodyOrReference additional_properties = 1; +} + +// Describes a single request body. +message RequestBody { + string description = 1; + MediaTypes content = 2; + bool required = 3; + repeated NamedAny specification_extension = 4; +} + +message RequestBodyOrReference { + oneof oneof { + RequestBody request_body = 1; + Reference reference = 2; + } +} + +// Describes a single response from an API Operation, including design-time, static `links` to operations based on the response. +message Response { + string description = 1; + HeadersOrReferences headers = 2; + MediaTypes content = 3; + LinksOrReferences links = 4; + repeated NamedAny specification_extension = 5; +} + +message ResponseOrReference { + oneof oneof { + Response response = 1; + Reference reference = 2; + } +} + +// A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call. +message Responses { + ResponseOrReference default = 1; + repeated NamedResponseOrReference response_or_reference = 2; + repeated NamedAny specification_extension = 3; +} + +message ResponsesOrReferences { + repeated NamedResponseOrReference additional_properties = 1; +} + +// The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema. +message Schema { + bool nullable = 1; + Discriminator discriminator = 2; + bool read_only = 3; + bool write_only = 4; + Xml xml = 5; + ExternalDocs external_docs = 6; + Any example = 7; + bool deprecated = 8; + string title = 9; + double multiple_of = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + int64 max_properties = 21; + int64 min_properties = 22; + repeated string required = 23; + repeated Any enum = 24; + string type = 25; + repeated SchemaOrReference all_of = 26; + repeated SchemaOrReference one_of = 27; + repeated SchemaOrReference any_of = 28; + Schema not = 29; + ItemsItem items = 30; + Properties properties = 31; + AdditionalPropertiesItem additional_properties = 32; + DefaultType default = 33; + string description = 34; + string format = 35; + repeated NamedAny specification_extension = 36; +} + +message SchemaOrReference { + oneof oneof { + Schema schema = 1; + Reference reference = 2; + } +} + +message SchemasOrReferences { + repeated NamedSchemaOrReference additional_properties = 1; +} + +// Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the OpenAPI Object or Operation Object, only one of the Security Requirement Objects in the list needs to be satisfied to authorize the request. +message SecurityRequirement { + repeated NamedStringArray additional_properties = 1; +} + +// Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header, a cookie parameter or as a query parameter), mutual TLS (use of a client certificate), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect. Please note that currently (2019) the implicit flow is about to be deprecated OAuth 2.0 Security Best Current Practice. Recommended for most use case is Authorization Code Grant flow with PKCE. +message SecurityScheme { + string type = 1; + string description = 2; + string name = 3; + string in = 4; + string scheme = 5; + string bearer_format = 6; + OauthFlows flows = 7; + string open_id_connect_url = 8; + repeated NamedAny specification_extension = 9; +} + +message SecuritySchemeOrReference { + oneof oneof { + SecurityScheme security_scheme = 1; + Reference reference = 2; + } +} + +message SecuritySchemesOrReferences { + repeated NamedSecuritySchemeOrReference additional_properties = 1; +} + +// An object representing a Server. +message Server { + string url = 1; + string description = 2; + ServerVariables variables = 3; + repeated NamedAny specification_extension = 4; +} + +// An object representing a Server Variable for server URL template substitution. +message ServerVariable { + repeated string enum = 1; + string default = 2; + string description = 3; + repeated NamedAny specification_extension = 4; +} + +message ServerVariables { + repeated NamedServerVariable additional_properties = 1; +} + +// Any property starting with x- is valid. +message SpecificationExtension { + oneof oneof { + double number = 1; + bool boolean = 2; + string string = 3; + } +} + +message StringArray { + repeated string value = 1; +} + +message Strings { + repeated NamedString additional_properties = 1; +} + +// Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances. +message Tag { + string name = 1; + string description = 2; + ExternalDocs external_docs = 3; + repeated NamedAny specification_extension = 4; +} + +// A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior. +message Xml { + string name = 1; + string namespace = 2; + string prefix = 3; + bool attribute = 4; + bool wrapped = 5; + repeated NamedAny specification_extension = 6; +} + diff --git a/vendor/github.com/google/gnostic-models/openapiv3/README.md b/vendor/github.com/google/gnostic-models/openapiv3/README.md new file mode 100644 index 000000000..5ee12d92e --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv3/README.md @@ -0,0 +1,21 @@ +# OpenAPI v3 Protocol Buffer Models + +This directory contains a Protocol Buffer-language model and related code for +supporting OpenAPI v3. + +Gnostic applications and plugins can use OpenAPIv3.proto to generate Protocol +Buffer support code for their preferred languages. + +OpenAPIv3.go is used by Gnostic to read JSON and YAML OpenAPI descriptions into +the Protocol Buffer-based datastructures generated from OpenAPIv3.proto. + +OpenAPIv3.proto and OpenAPIv3.go are generated by the Gnostic compiler +generator, and OpenAPIv3.pb.go is generated by protoc, the Protocol Buffer +compiler, and protoc-gen-go, the Protocol Buffer Go code generation plugin. + +openapi-3.1.json is a JSON schema for OpenAPI 3.1 that is automatically +generated from the OpenAPI 3.1 specification. It is not an official JSON Schema +for OpenAPI. + +The schema-generator directory contains support code which generates +openapi-3.1.json from the OpenAPI 3.1 specification document (Markdown). diff --git a/vendor/github.com/google/gnostic/openapiv3/annotations.pb.go b/vendor/github.com/google/gnostic-models/openapiv3/annotations.pb.go similarity index 90% rename from vendor/github.com/google/gnostic/openapiv3/annotations.pb.go rename to vendor/github.com/google/gnostic-models/openapiv3/annotations.pb.go index ae242f304..5c2b63e26 100644 --- a/vendor/github.com/google/gnostic/openapiv3/annotations.pb.go +++ b/vendor/github.com/google/gnostic-models/openapiv3/annotations.pb.go @@ -14,8 +14,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.28.0 -// protoc v3.19.4 +// protoc-gen-go v1.27.1 +// protoc v3.19.3 // source: openapiv3/annotations.proto package openapi_v3 @@ -98,10 +98,10 @@ var File_openapiv3_annotations_proto protoreflect.FileDescriptor var file_openapiv3_annotations_proto_rawDesc = []byte{ 0x0a, 0x1b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x6f, - 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x1a, 0x19, 0x6f, 0x70, 0x65, 0x6e, 0x61, - 0x70, 0x69, 0x76, 0x33, 0x2f, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x76, 0x33, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x1a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x2f, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x76, 0x33, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3a, 0x4f, 0x0a, 0x08, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, @@ -122,13 +122,12 @@ var file_openapiv3_annotations_proto_rawDesc = []byte{ 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xf7, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x42, 0x5a, 0x0a, 0x0e, 0x6f, + 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x42, 0x42, 0x0a, 0x0e, 0x6f, 0x72, 0x67, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0x42, 0x10, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, - 0x01, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2f, 0x6f, 0x70, 0x65, - 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, - 0x33, 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x01, 0x5a, 0x16, 0x2e, 0x2f, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var file_openapiv3_annotations_proto_goTypes = []interface{}{ diff --git a/vendor/github.com/google/gnostic-models/openapiv3/annotations.proto b/vendor/github.com/google/gnostic-models/openapiv3/annotations.proto new file mode 100644 index 000000000..09ee0aac5 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv3/annotations.proto @@ -0,0 +1,56 @@ +// Copyright 2022 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package openapi.v3; + +import "google/protobuf/descriptor.proto"; +import "openapiv3/OpenAPIv3.proto"; + +// The Go package name. +option go_package = "./openapiv3;openapi_v3"; +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "AnnotationsProto"; +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v3"; +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +extend google.protobuf.FileOptions { + Document document = 1143; +} + +extend google.protobuf.MethodOptions { + Operation operation = 1143; +} + +extend google.protobuf.MessageOptions { + Schema schema = 1143; +} + +extend google.protobuf.FieldOptions { + Schema property = 1143; +} diff --git a/vendor/github.com/google/gnostic-models/openapiv3/document.go b/vendor/github.com/google/gnostic-models/openapiv3/document.go new file mode 100644 index 000000000..1cee46773 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv3/document.go @@ -0,0 +1,42 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v3 + +import ( + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// ParseDocument reads an OpenAPI v3 description from a YAML/JSON representation. +func ParseDocument(b []byte) (*Document, error) { + info, err := compiler.ReadInfoFromBytes("", b) + if err != nil { + return nil, err + } + root := info.Content[0] + return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) +} + +// YAMLValue produces a serialized YAML representation of the document. +func (d *Document) YAMLValue(comment string) ([]byte, error) { + rawInfo := d.ToRawInfo() + rawInfo = &yaml.Node{ + Kind: yaml.DocumentNode, + Content: []*yaml.Node{rawInfo}, + HeadComment: comment, + } + return yaml.Marshal(rawInfo) +} diff --git a/vendor/github.com/google/gnostic/compiler/context.go b/vendor/github.com/google/gnostic/compiler/context.go index 1bfe96121..7ded14eed 100644 --- a/vendor/github.com/google/gnostic/compiler/context.go +++ b/vendor/github.com/google/gnostic/compiler/context.go @@ -15,35 +15,14 @@ package compiler import ( - yaml "gopkg.in/yaml.v3" + "github.com/google/gnostic-models/compiler" ) // Context contains state of the compiler as it traverses a document. -type Context struct { - Parent *Context - Name string - Node *yaml.Node - ExtensionHandlers *[]ExtensionHandler -} +type Context = compiler.Context // NewContextWithExtensions returns a new object representing the compiler state -func NewContextWithExtensions(name string, node *yaml.Node, parent *Context, extensionHandlers *[]ExtensionHandler) *Context { - return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: extensionHandlers} -} +var NewContextWithExtensions = compiler.NewContextWithExtensions // NewContext returns a new object representing the compiler state -func NewContext(name string, node *yaml.Node, parent *Context) *Context { - if parent != nil { - return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: parent.ExtensionHandlers} - } - return &Context{Name: name, Parent: parent, ExtensionHandlers: nil} -} - -// Description returns a text description of the compiler state -func (context *Context) Description() string { - name := context.Name - if context.Parent != nil { - name = context.Parent.Description() + "." + name - } - return name -} +var NewContext = compiler.NewContext diff --git a/vendor/github.com/google/gnostic/compiler/error.go b/vendor/github.com/google/gnostic/compiler/error.go index 6f40515d6..1272d9f19 100644 --- a/vendor/github.com/google/gnostic/compiler/error.go +++ b/vendor/github.com/google/gnostic/compiler/error.go @@ -14,57 +14,18 @@ package compiler -import "fmt" +import ( + "github.com/google/gnostic-models/compiler" +) // Error represents compiler errors and their location in the document. -type Error struct { - Context *Context - Message string -} +type Error = compiler.Error // NewError creates an Error. -func NewError(context *Context, message string) *Error { - return &Error{Context: context, Message: message} -} - -func (err *Error) locationDescription() string { - if err.Context.Node != nil { - return fmt.Sprintf("[%d,%d] %s", err.Context.Node.Line, err.Context.Node.Column, err.Context.Description()) - } - return err.Context.Description() -} - -// Error returns the string value of an Error. -func (err *Error) Error() string { - if err.Context == nil { - return err.Message - } - return err.locationDescription() + " " + err.Message -} +var NewError = compiler.NewError // ErrorGroup is a container for groups of Error values. -type ErrorGroup struct { - Errors []error -} +type ErrorGroup = compiler.ErrorGroup // NewErrorGroupOrNil returns a new ErrorGroup for a slice of errors or nil if the slice is empty. -func NewErrorGroupOrNil(errors []error) error { - if len(errors) == 0 { - return nil - } else if len(errors) == 1 { - return errors[0] - } else { - return &ErrorGroup{Errors: errors} - } -} - -func (group *ErrorGroup) Error() string { - result := "" - for i, err := range group.Errors { - if i > 0 { - result += "\n" - } - result += err.Error() - } - return result -} +var NewErrorGroupOrNil = compiler.NewErrorGroupOrNil diff --git a/vendor/github.com/google/gnostic/compiler/extensions.go b/vendor/github.com/google/gnostic/compiler/extensions.go index 5b5a916d2..cd751159e 100644 --- a/vendor/github.com/google/gnostic/compiler/extensions.go +++ b/vendor/github.com/google/gnostic/compiler/extensions.go @@ -15,72 +15,11 @@ package compiler import ( - "bytes" - "fmt" - "os/exec" - "strings" - - "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes/any" - yaml "gopkg.in/yaml.v3" - - extensions "github.com/google/gnostic/extensions" + "github.com/google/gnostic-models/compiler" ) // ExtensionHandler describes a binary that is called by the compiler to handle specification extensions. -type ExtensionHandler struct { - Name string -} +type ExtensionHandler = compiler.ExtensionHandler // CallExtension calls a binary extension handler. -func CallExtension(context *Context, in *yaml.Node, extensionName string) (handled bool, response *any.Any, err error) { - if context == nil || context.ExtensionHandlers == nil { - return false, nil, nil - } - handled = false - for _, handler := range *(context.ExtensionHandlers) { - response, err = handler.handle(in, extensionName) - if response == nil { - continue - } else { - handled = true - break - } - } - return handled, response, err -} - -func (extensionHandlers *ExtensionHandler) handle(in *yaml.Node, extensionName string) (*any.Any, error) { - if extensionHandlers.Name != "" { - yamlData, _ := yaml.Marshal(in) - request := &extensions.ExtensionHandlerRequest{ - CompilerVersion: &extensions.Version{ - Major: 0, - Minor: 1, - Patch: 0, - }, - Wrapper: &extensions.Wrapper{ - Version: "unknown", // TODO: set this to the type/version of spec being parsed. - Yaml: string(yamlData), - ExtensionName: extensionName, - }, - } - requestBytes, _ := proto.Marshal(request) - cmd := exec.Command(extensionHandlers.Name) - cmd.Stdin = bytes.NewReader(requestBytes) - output, err := cmd.Output() - if err != nil { - return nil, err - } - response := &extensions.ExtensionHandlerResponse{} - err = proto.Unmarshal(output, response) - if err != nil || !response.Handled { - return nil, err - } - if len(response.Errors) != 0 { - return nil, fmt.Errorf("Errors when parsing: %+v for field %s by vendor extension handler %s. Details %+v", in, extensionName, extensionHandlers.Name, strings.Join(response.Errors, ",")) - } - return response.Value, nil - } - return nil, nil -} +var CallExtension = compiler.CallExtension diff --git a/vendor/github.com/google/gnostic/compiler/helpers.go b/vendor/github.com/google/gnostic/compiler/helpers.go index 97ffaa513..490acc5f0 100644 --- a/vendor/github.com/google/gnostic/compiler/helpers.go +++ b/vendor/github.com/google/gnostic/compiler/helpers.go @@ -15,383 +15,91 @@ package compiler import ( - "fmt" - "regexp" - "sort" - "strconv" - - "gopkg.in/yaml.v3" - - "github.com/google/gnostic/jsonschema" + "github.com/google/gnostic-models/compiler" ) // compiler helper functions, usually called from generated code // UnpackMap gets a *yaml.Node if possible. -func UnpackMap(in *yaml.Node) (*yaml.Node, bool) { - if in == nil { - return nil, false - } - return in, true -} +var UnpackMap = compiler.UnpackMap // SortedKeysForMap returns the sorted keys of a yamlv2.MapSlice. -func SortedKeysForMap(m *yaml.Node) []string { - keys := make([]string, 0) - if m.Kind == yaml.MappingNode { - for i := 0; i < len(m.Content); i += 2 { - keys = append(keys, m.Content[i].Value) - } - } - sort.Strings(keys) - return keys -} +var SortedKeysForMap = compiler.SortedKeysForMap // MapHasKey returns true if a yamlv2.MapSlice contains a specified key. -func MapHasKey(m *yaml.Node, key string) bool { - if m == nil { - return false - } - if m.Kind == yaml.MappingNode { - for i := 0; i < len(m.Content); i += 2 { - itemKey := m.Content[i].Value - if key == itemKey { - return true - } - } - } - return false -} +var MapHasKey = compiler.MapHasKey // MapValueForKey gets the value of a map value for a specified key. -func MapValueForKey(m *yaml.Node, key string) *yaml.Node { - if m == nil { - return nil - } - if m.Kind == yaml.MappingNode { - for i := 0; i < len(m.Content); i += 2 { - itemKey := m.Content[i].Value - if key == itemKey { - return m.Content[i+1] - } - } - } - return nil -} +var MapValueForKey = compiler.MapValueForKey // ConvertInterfaceArrayToStringArray converts an array of interfaces to an array of strings, if possible. -func ConvertInterfaceArrayToStringArray(interfaceArray []interface{}) []string { - stringArray := make([]string, 0) - for _, item := range interfaceArray { - v, ok := item.(string) - if ok { - stringArray = append(stringArray, v) - } - } - return stringArray -} +var ConvertInterfaceArrayToStringArray = compiler.ConvertInterfaceArrayToStringArray // SequenceNodeForNode returns a node if it is a SequenceNode. -func SequenceNodeForNode(node *yaml.Node) (*yaml.Node, bool) { - if node.Kind != yaml.SequenceNode { - return nil, false - } - return node, true -} +var SequenceNodeForNode = compiler.SequenceNodeForNode // BoolForScalarNode returns the bool value of a node. -func BoolForScalarNode(node *yaml.Node) (bool, bool) { - if node == nil { - return false, false - } - if node.Kind == yaml.DocumentNode { - return BoolForScalarNode(node.Content[0]) - } - if node.Kind != yaml.ScalarNode { - return false, false - } - if node.Tag != "!!bool" { - return false, false - } - v, err := strconv.ParseBool(node.Value) - if err != nil { - return false, false - } - return v, true -} +var BoolForScalarNode = compiler.BoolForScalarNode // IntForScalarNode returns the integer value of a node. -func IntForScalarNode(node *yaml.Node) (int64, bool) { - if node == nil { - return 0, false - } - if node.Kind == yaml.DocumentNode { - return IntForScalarNode(node.Content[0]) - } - if node.Kind != yaml.ScalarNode { - return 0, false - } - if node.Tag != "!!int" { - return 0, false - } - v, err := strconv.ParseInt(node.Value, 10, 64) - if err != nil { - return 0, false - } - return v, true -} +var IntForScalarNode = compiler.IntForScalarNode // FloatForScalarNode returns the float value of a node. -func FloatForScalarNode(node *yaml.Node) (float64, bool) { - if node == nil { - return 0.0, false - } - if node.Kind == yaml.DocumentNode { - return FloatForScalarNode(node.Content[0]) - } - if node.Kind != yaml.ScalarNode { - return 0.0, false - } - if (node.Tag != "!!int") && (node.Tag != "!!float") { - return 0.0, false - } - v, err := strconv.ParseFloat(node.Value, 64) - if err != nil { - return 0.0, false - } - return v, true -} +var FloatForScalarNode = compiler.FloatForScalarNode // StringForScalarNode returns the string value of a node. -func StringForScalarNode(node *yaml.Node) (string, bool) { - if node == nil { - return "", false - } - if node.Kind == yaml.DocumentNode { - return StringForScalarNode(node.Content[0]) - } - switch node.Kind { - case yaml.ScalarNode: - switch node.Tag { - case "!!int": - return node.Value, true - case "!!str": - return node.Value, true - case "!!timestamp": - return node.Value, true - case "!!null": - return "", true - default: - return "", false - } - default: - return "", false - } -} +var StringForScalarNode = compiler.StringForScalarNode // StringArrayForSequenceNode converts a sequence node to an array of strings, if possible. -func StringArrayForSequenceNode(node *yaml.Node) []string { - stringArray := make([]string, 0) - for _, item := range node.Content { - v, ok := StringForScalarNode(item) - if ok { - stringArray = append(stringArray, v) - } - } - return stringArray -} +var StringArrayForSequenceNode = compiler.StringArrayForSequenceNode // MissingKeysInMap identifies which keys from a list of required keys are not in a map. -func MissingKeysInMap(m *yaml.Node, requiredKeys []string) []string { - missingKeys := make([]string, 0) - for _, k := range requiredKeys { - if !MapHasKey(m, k) { - missingKeys = append(missingKeys, k) - } - } - return missingKeys -} +var MissingKeysInMap = compiler.MissingKeysInMap // InvalidKeysInMap returns keys in a map that don't match a list of allowed keys and patterns. -func InvalidKeysInMap(m *yaml.Node, allowedKeys []string, allowedPatterns []*regexp.Regexp) []string { - invalidKeys := make([]string, 0) - if m == nil || m.Kind != yaml.MappingNode { - return invalidKeys - } - for i := 0; i < len(m.Content); i += 2 { - key := m.Content[i].Value - found := false - // does the key match an allowed key? - for _, allowedKey := range allowedKeys { - if key == allowedKey { - found = true - break - } - } - if !found { - // does the key match an allowed pattern? - for _, allowedPattern := range allowedPatterns { - if allowedPattern.MatchString(key) { - found = true - break - } - } - if !found { - invalidKeys = append(invalidKeys, key) - } - } - } - return invalidKeys -} +var InvalidKeysInMap = compiler.InvalidKeysInMap // NewNullNode creates a new Null node. -func NewNullNode() *yaml.Node { - node := &yaml.Node{ - Kind: yaml.ScalarNode, - Tag: "!!null", - } - return node -} +var NewNullNode = compiler.NewNullNode // NewMappingNode creates a new Mapping node. -func NewMappingNode() *yaml.Node { - return &yaml.Node{ - Kind: yaml.MappingNode, - Content: make([]*yaml.Node, 0), - } -} +var NewMappingNode = compiler.NewMappingNode // NewSequenceNode creates a new Sequence node. -func NewSequenceNode() *yaml.Node { - node := &yaml.Node{ - Kind: yaml.SequenceNode, - Content: make([]*yaml.Node, 0), - } - return node -} +var NewSequenceNode = compiler.NewSequenceNode // NewScalarNodeForString creates a new node to hold a string. -func NewScalarNodeForString(s string) *yaml.Node { - return &yaml.Node{ - Kind: yaml.ScalarNode, - Tag: "!!str", - Value: s, - } -} +var NewScalarNodeForString = compiler.NewScalarNodeForString // NewSequenceNodeForStringArray creates a new node to hold an array of strings. -func NewSequenceNodeForStringArray(strings []string) *yaml.Node { - node := &yaml.Node{ - Kind: yaml.SequenceNode, - Content: make([]*yaml.Node, 0), - } - for _, s := range strings { - node.Content = append(node.Content, NewScalarNodeForString(s)) - } - return node -} +var NewSequenceNodeForStringArray = compiler.NewSequenceNodeForStringArray // NewScalarNodeForBool creates a new node to hold a bool. -func NewScalarNodeForBool(b bool) *yaml.Node { - return &yaml.Node{ - Kind: yaml.ScalarNode, - Tag: "!!bool", - Value: fmt.Sprintf("%t", b), - } -} +var NewScalarNodeForBool = compiler.NewScalarNodeForBool // NewScalarNodeForFloat creates a new node to hold a float. -func NewScalarNodeForFloat(f float64) *yaml.Node { - return &yaml.Node{ - Kind: yaml.ScalarNode, - Tag: "!!float", - Value: fmt.Sprintf("%g", f), - } -} +var NewScalarNodeForFloat = compiler.NewScalarNodeForFloat // NewScalarNodeForInt creates a new node to hold an integer. -func NewScalarNodeForInt(i int64) *yaml.Node { - return &yaml.Node{ - Kind: yaml.ScalarNode, - Tag: "!!int", - Value: fmt.Sprintf("%d", i), - } -} +var NewScalarNodeForInt = compiler.NewScalarNodeForInt // PluralProperties returns the string "properties" pluralized. -func PluralProperties(count int) string { - if count == 1 { - return "property" - } - return "properties" -} +var PluralProperties = compiler.PluralProperties // StringArrayContainsValue returns true if a string array contains a specified value. -func StringArrayContainsValue(array []string, value string) bool { - for _, item := range array { - if item == value { - return true - } - } - return false -} +var StringArrayContainsValue = compiler.StringArrayContainsValue // StringArrayContainsValues returns true if a string array contains all of a list of specified values. -func StringArrayContainsValues(array []string, values []string) bool { - for _, value := range values { - if !StringArrayContainsValue(array, value) { - return false - } - } - return true -} +var StringArrayContainsValues = compiler.StringArrayContainsValues // StringValue returns the string value of an item. -func StringValue(item interface{}) (value string, ok bool) { - value, ok = item.(string) - if ok { - return value, ok - } - intValue, ok := item.(int) - if ok { - return strconv.Itoa(intValue), true - } - return "", false -} +var StringValue = compiler.StringValue // Description returns a human-readable represention of an item. -func Description(item interface{}) string { - value, ok := item.(*yaml.Node) - if ok { - return jsonschema.Render(value) - } - return fmt.Sprintf("%+v", item) -} +var Description = compiler.Description // Display returns a description of a node for use in error messages. -func Display(node *yaml.Node) string { - switch node.Kind { - case yaml.ScalarNode: - switch node.Tag { - case "!!str": - return fmt.Sprintf("%s (string)", node.Value) - } - } - return fmt.Sprintf("%+v (%T)", node, node) -} +var Display = compiler.Display // Marshal creates a yaml version of a structure in our preferred style -func Marshal(in *yaml.Node) []byte { - clearStyle(in) - //bytes, _ := yaml.Marshal(&yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{in}}) - bytes, _ := yaml.Marshal(in) - - return bytes -} - -func clearStyle(node *yaml.Node) { - node.Style = 0 - for _, c := range node.Content { - clearStyle(c) - } -} +var Marshal = compiler.Marshal diff --git a/vendor/github.com/google/gnostic/compiler/reader.go b/vendor/github.com/google/gnostic/compiler/reader.go index be0e8b40c..b4b7c2870 100644 --- a/vendor/github.com/google/gnostic/compiler/reader.go +++ b/vendor/github.com/google/gnostic/compiler/reader.go @@ -15,293 +15,47 @@ package compiler import ( - "fmt" - "io/ioutil" - "log" - "net/http" - "net/url" - "path/filepath" - "strings" - "sync" - - yaml "gopkg.in/yaml.v3" + "github.com/google/gnostic-models/compiler" ) -var verboseReader = false - -var fileCache map[string][]byte -var infoCache map[string]*yaml.Node - -var fileCacheEnable = true -var infoCacheEnable = true - -// These locks are used to synchronize accesses to the fileCache and infoCache -// maps (above). They are global state and can throw thread-related errors -// when modified from separate goroutines. The general strategy is to protect -// all public functions in this file with mutex Lock() calls. As a result, to -// avoid deadlock, these public functions should not call other public -// functions, so some public functions have private equivalents. -// In the future, we might consider replacing the maps with sync.Map and -// eliminating these mutexes. -var fileCacheMutex sync.Mutex -var infoCacheMutex sync.Mutex - -func initializeFileCache() { - if fileCache == nil { - fileCache = make(map[string][]byte, 0) - } -} - -func initializeInfoCache() { - if infoCache == nil { - infoCache = make(map[string]*yaml.Node, 0) - } -} - // EnableFileCache turns on file caching. -func EnableFileCache() { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - fileCacheEnable = true -} +var EnableFileCache = compiler.EnableFileCache // EnableInfoCache turns on parsed info caching. -func EnableInfoCache() { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - infoCacheEnable = true -} +var EnableInfoCache = compiler.EnableInfoCache // DisableFileCache turns off file caching. -func DisableFileCache() { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - fileCacheEnable = false -} +var DisableFileCache = compiler.DisableFileCache // DisableInfoCache turns off parsed info caching. -func DisableInfoCache() { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - infoCacheEnable = false -} +var DisableInfoCache = compiler.DisableInfoCache // RemoveFromFileCache removes an entry from the file cache. -func RemoveFromFileCache(fileurl string) { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - if !fileCacheEnable { - return - } - initializeFileCache() - delete(fileCache, fileurl) -} +var RemoveFromFileCache = compiler.RemoveFromFileCache // RemoveFromInfoCache removes an entry from the info cache. -func RemoveFromInfoCache(filename string) { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - if !infoCacheEnable { - return - } - initializeInfoCache() - delete(infoCache, filename) -} +var RemoveFromInfoCache = compiler.RemoveFromInfoCache // GetInfoCache returns the info cache map. -func GetInfoCache() map[string]*yaml.Node { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - if infoCache == nil { - initializeInfoCache() - } - return infoCache -} +var GetInfoCache = compiler.GetInfoCache // ClearFileCache clears the file cache. -func ClearFileCache() { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - fileCache = make(map[string][]byte, 0) -} +var ClearFileCache = compiler.ClearFileCache // ClearInfoCache clears the info cache. -func ClearInfoCache() { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - infoCache = make(map[string]*yaml.Node) -} +var ClearInfoCache = compiler.ClearInfoCache // ClearCaches clears all caches. -func ClearCaches() { - ClearFileCache() - ClearInfoCache() -} +var ClearCaches = compiler.ClearCaches // FetchFile gets a specified file from the local filesystem or a remote location. -func FetchFile(fileurl string) ([]byte, error) { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - return fetchFile(fileurl) -} - -func fetchFile(fileurl string) ([]byte, error) { - var bytes []byte - initializeFileCache() - if fileCacheEnable { - bytes, ok := fileCache[fileurl] - if ok { - if verboseReader { - log.Printf("Cache hit %s", fileurl) - } - return bytes, nil - } - if verboseReader { - log.Printf("Fetching %s", fileurl) - } - } - response, err := http.Get(fileurl) - if err != nil { - return nil, err - } - defer response.Body.Close() - if response.StatusCode != 200 { - return nil, fmt.Errorf("Error downloading %s: %s", fileurl, response.Status) - } - bytes, err = ioutil.ReadAll(response.Body) - if fileCacheEnable && err == nil { - fileCache[fileurl] = bytes - } - return bytes, err -} +var FetchFile = compiler.FetchFile // ReadBytesForFile reads the bytes of a file. -func ReadBytesForFile(filename string) ([]byte, error) { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - return readBytesForFile(filename) -} - -func readBytesForFile(filename string) ([]byte, error) { - // is the filename a url? - fileurl, _ := url.Parse(filename) - if fileurl.Scheme != "" { - // yes, fetch it - bytes, err := fetchFile(filename) - if err != nil { - return nil, err - } - return bytes, nil - } - // no, it's a local filename - bytes, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - return bytes, nil -} +var ReadBytesForFile = compiler.ReadBytesForFile // ReadInfoFromBytes unmarshals a file as a *yaml.Node. -func ReadInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - return readInfoFromBytes(filename, bytes) -} - -func readInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { - initializeInfoCache() - if infoCacheEnable { - cachedInfo, ok := infoCache[filename] - if ok { - if verboseReader { - log.Printf("Cache hit info for file %s", filename) - } - return cachedInfo, nil - } - if verboseReader { - log.Printf("Reading info for file %s", filename) - } - } - var info yaml.Node - err := yaml.Unmarshal(bytes, &info) - if err != nil { - return nil, err - } - if infoCacheEnable && len(filename) > 0 { - infoCache[filename] = &info - } - return &info, nil -} +var ReadInfoFromBytes = compiler.ReadInfoFromBytes // ReadInfoForRef reads a file and return the fragment needed to resolve a $ref. -func ReadInfoForRef(basefile string, ref string) (*yaml.Node, error) { - fileCacheMutex.Lock() - defer fileCacheMutex.Unlock() - infoCacheMutex.Lock() - defer infoCacheMutex.Unlock() - initializeInfoCache() - if infoCacheEnable { - info, ok := infoCache[ref] - if ok { - if verboseReader { - log.Printf("Cache hit for ref %s#%s", basefile, ref) - } - return info, nil - } - if verboseReader { - log.Printf("Reading info for ref %s#%s", basefile, ref) - } - } - basedir, _ := filepath.Split(basefile) - parts := strings.Split(ref, "#") - var filename string - if parts[0] != "" { - filename = parts[0] - if _, err := url.ParseRequestURI(parts[0]); err != nil { - // It is not an URL, so the file is local - filename = basedir + parts[0] - } - } else { - filename = basefile - } - bytes, err := readBytesForFile(filename) - if err != nil { - return nil, err - } - info, err := readInfoFromBytes(filename, bytes) - if info != nil && info.Kind == yaml.DocumentNode { - info = info.Content[0] - } - if err != nil { - log.Printf("File error: %v\n", err) - } else { - if info == nil { - return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) - } - if len(parts) > 1 { - path := strings.Split(parts[1], "/") - for i, key := range path { - if i > 0 { - m := info - if true { - found := false - for i := 0; i < len(m.Content); i += 2 { - if m.Content[i].Value == key { - info = m.Content[i+1] - found = true - } - } - if !found { - infoCache[ref] = nil - return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) - } - } - } - } - } - } - if infoCacheEnable { - infoCache[ref] = info - } - return info, nil -} +var ReadInfoForRef = compiler.ReadInfoForRef diff --git a/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.go b/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.go index 28c2777d5..0dc5ed529 100644 --- a/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.go +++ b/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.go @@ -5459,3365 +5459,6 @@ func NewXml(in *yaml.Node, context *compiler.Context) (*Xml, error) { return x, compiler.NewErrorGroupOrNil(errors) } -// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. -func (m *AdditionalPropertiesItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*AdditionalPropertiesItem_Schema) - if ok { - _, err := p.Schema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Any objects. -func (m *Any) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ApiKeySecurity objects. -func (m *ApiKeySecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside BasicAuthenticationSecurity objects. -func (m *BasicAuthenticationSecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside BodyParameter objects. -func (m *BodyParameter) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schema != nil { - _, err := m.Schema.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Contact objects. -func (m *Contact) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Default objects. -func (m *Default) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Definitions objects. -func (m *Definitions) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Document objects. -func (m *Document) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Info != nil { - _, err := m.Info.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Paths != nil { - _, err := m.Paths.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Definitions != nil { - _, err := m.Definitions.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Parameters != nil { - _, err := m.Parameters.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Responses != nil { - _, err := m.Responses.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Security { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.SecurityDefinitions != nil { - _, err := m.SecurityDefinitions.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Tags { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Examples objects. -func (m *Examples) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ExternalDocs objects. -func (m *ExternalDocs) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside FileSchema objects. -func (m *FileSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside FormDataParameterSubSchema objects. -func (m *FormDataParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Header objects. -func (m *Header) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside HeaderParameterSubSchema objects. -func (m *HeaderParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Headers objects. -func (m *Headers) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Info objects. -func (m *Info) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Contact != nil { - _, err := m.Contact.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.License != nil { - _, err := m.License.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ItemsItem objects. -func (m *ItemsItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.Schema { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside JsonReference objects. -func (m *JsonReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.XRef != "" { - info, err := compiler.ReadInfoForRef(root, m.XRef) - if err != nil { - return nil, err - } - if info != nil { - replacement, err := NewJsonReference(info, nil) - if err == nil { - *m = *replacement - return m.ResolveReferences(root) - } - } - return info, nil - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside License objects. -func (m *License) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedAny objects. -func (m *NamedAny) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedHeader objects. -func (m *NamedHeader) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedParameter objects. -func (m *NamedParameter) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedPathItem objects. -func (m *NamedPathItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedResponse objects. -func (m *NamedResponse) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedResponseValue objects. -func (m *NamedResponseValue) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedSchema objects. -func (m *NamedSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedSecurityDefinitionsItem objects. -func (m *NamedSecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedString objects. -func (m *NamedString) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedStringArray objects. -func (m *NamedStringArray) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NonBodyParameter objects. -func (m *NonBodyParameter) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*NonBodyParameter_HeaderParameterSubSchema) - if ok { - _, err := p.HeaderParameterSubSchema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*NonBodyParameter_FormDataParameterSubSchema) - if ok { - _, err := p.FormDataParameterSubSchema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*NonBodyParameter_QueryParameterSubSchema) - if ok { - _, err := p.QueryParameterSubSchema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*NonBodyParameter_PathParameterSubSchema) - if ok { - _, err := p.PathParameterSubSchema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Oauth2AccessCodeSecurity objects. -func (m *Oauth2AccessCodeSecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Scopes != nil { - _, err := m.Scopes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Oauth2ApplicationSecurity objects. -func (m *Oauth2ApplicationSecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Scopes != nil { - _, err := m.Scopes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Oauth2ImplicitSecurity objects. -func (m *Oauth2ImplicitSecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Scopes != nil { - _, err := m.Scopes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Oauth2PasswordSecurity objects. -func (m *Oauth2PasswordSecurity) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Scopes != nil { - _, err := m.Scopes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Oauth2Scopes objects. -func (m *Oauth2Scopes) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Operation objects. -func (m *Operation) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Parameters { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.Responses != nil { - _, err := m.Responses.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Security { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Parameter objects. -func (m *Parameter) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*Parameter_BodyParameter) - if ok { - _, err := p.BodyParameter.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*Parameter_NonBodyParameter) - if ok { - _, err := p.NonBodyParameter.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ParameterDefinitions objects. -func (m *ParameterDefinitions) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ParametersItem objects. -func (m *ParametersItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*ParametersItem_Parameter) - if ok { - _, err := p.Parameter.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*ParametersItem_JsonReference) - if ok { - info, err := p.JsonReference.ResolveReferences(root) - if err != nil { - return nil, err - } else if info != nil { - n, err := NewParametersItem(info, nil) - if err != nil { - return nil, err - } else if n != nil { - *m = *n - return nil, nil - } - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside PathItem objects. -func (m *PathItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.XRef != "" { - info, err := compiler.ReadInfoForRef(root, m.XRef) - if err != nil { - return nil, err - } - if info != nil { - replacement, err := NewPathItem(info, nil) - if err == nil { - *m = *replacement - return m.ResolveReferences(root) - } - } - return info, nil - } - if m.Get != nil { - _, err := m.Get.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Put != nil { - _, err := m.Put.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Post != nil { - _, err := m.Post.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Delete != nil { - _, err := m.Delete.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Options != nil { - _, err := m.Options.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Head != nil { - _, err := m.Head.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Patch != nil { - _, err := m.Patch.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Parameters { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside PathParameterSubSchema objects. -func (m *PathParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Paths objects. -func (m *Paths) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.Path { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside PrimitivesItems objects. -func (m *PrimitivesItems) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Properties objects. -func (m *Properties) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside QueryParameterSubSchema objects. -func (m *QueryParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Response objects. -func (m *Response) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schema != nil { - _, err := m.Schema.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Headers != nil { - _, err := m.Headers.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Examples != nil { - _, err := m.Examples.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ResponseDefinitions objects. -func (m *ResponseDefinitions) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ResponseValue objects. -func (m *ResponseValue) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*ResponseValue_Response) - if ok { - _, err := p.Response.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*ResponseValue_JsonReference) - if ok { - info, err := p.JsonReference.ResolveReferences(root) - if err != nil { - return nil, err - } else if info != nil { - n, err := NewResponseValue(info, nil) - if err != nil { - return nil, err - } else if n != nil { - *m = *n - return nil, nil - } - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Responses objects. -func (m *Responses) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.ResponseCode { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Schema objects. -func (m *Schema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.XRef != "" { - info, err := compiler.ReadInfoForRef(root, m.XRef) - if err != nil { - return nil, err - } - if info != nil { - replacement, err := NewSchema(info, nil) - if err == nil { - *m = *replacement - return m.ResolveReferences(root) - } - } - return info, nil - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.AdditionalProperties != nil { - _, err := m.AdditionalProperties.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Type != nil { - _, err := m.Type.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.AllOf { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.Properties != nil { - _, err := m.Properties.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Xml != nil { - _, err := m.Xml.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SchemaItem objects. -func (m *SchemaItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*SchemaItem_Schema) - if ok { - _, err := p.Schema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SchemaItem_FileSchema) - if ok { - _, err := p.FileSchema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecurityDefinitions objects. -func (m *SecurityDefinitions) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecurityDefinitionsItem objects. -func (m *SecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_BasicAuthenticationSecurity) - if ok { - _, err := p.BasicAuthenticationSecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_ApiKeySecurity) - if ok { - _, err := p.ApiKeySecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ImplicitSecurity) - if ok { - _, err := p.Oauth2ImplicitSecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2PasswordSecurity) - if ok { - _, err := p.Oauth2PasswordSecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ApplicationSecurity) - if ok { - _, err := p.Oauth2ApplicationSecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) - if ok { - _, err := p.Oauth2AccessCodeSecurity.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecurityRequirement objects. -func (m *SecurityRequirement) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside StringArray objects. -func (m *StringArray) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Tag objects. -func (m *Tag) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside TypeItem objects. -func (m *TypeItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside VendorExtension objects. -func (m *VendorExtension) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Xml objects. -func (m *Xml) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.VendorExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. -func (m *AdditionalPropertiesItem) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // AdditionalPropertiesItem - // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetSchema() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { - return compiler.NewScalarNodeForBool(v1.Boolean) - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Any suitable for JSON or YAML export. -func (m *Any) ToRawInfo() *yaml.Node { - var err error - var node yaml.Node - err = yaml.Unmarshal([]byte(m.Yaml), &node) - if err == nil { - if node.Kind == yaml.DocumentNode { - return node.Content[0] - } - return &node - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of ApiKeySecurity suitable for JSON or YAML export. -func (m *ApiKeySecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of BasicAuthenticationSecurity suitable for JSON or YAML export. -func (m *BasicAuthenticationSecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of BodyParameter suitable for JSON or YAML export. -func (m *BodyParameter) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, m.Schema.ToRawInfo()) - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Contact suitable for JSON or YAML export. -func (m *Contact) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Url != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - } - if m.Email != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("email")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Email)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Default suitable for JSON or YAML export. -func (m *Default) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Definitions suitable for JSON or YAML export. -func (m *Definitions) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Document suitable for JSON or YAML export. -func (m *Document) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("swagger")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Swagger)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("info")) - info.Content = append(info.Content, m.Info.ToRawInfo()) - if m.Host != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("host")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Host)) - } - if m.BasePath != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("basePath")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.BasePath)) - } - if len(m.Schemes) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) - } - if len(m.Consumes) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) - } - if len(m.Produces) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("paths")) - info.Content = append(info.Content, m.Paths.ToRawInfo()) - if m.Definitions != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("definitions")) - info.Content = append(info.Content, m.Definitions.ToRawInfo()) - } - if m.Parameters != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, m.Parameters.ToRawInfo()) - } - if m.Responses != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) - info.Content = append(info.Content, m.Responses.ToRawInfo()) - } - if len(m.Security) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Security { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) - info.Content = append(info.Content, items) - } - if m.SecurityDefinitions != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("securityDefinitions")) - info.Content = append(info.Content, m.SecurityDefinitions.ToRawInfo()) - } - if len(m.Tags) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Tags { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) - info.Content = append(info.Content, items) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Examples suitable for JSON or YAML export. -func (m *Examples) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. -func (m *ExternalDocs) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of FileSchema suitable for JSON or YAML export. -func (m *FileSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Title != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if len(m.Required) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - if m.ReadOnly != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of FormDataParameterSubSchema suitable for JSON or YAML export. -func (m *FormDataParameterSubSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.In != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.AllowEmptyValue != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Header suitable for JSON or YAML export. -func (m *Header) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of HeaderParameterSubSchema suitable for JSON or YAML export. -func (m *HeaderParameterSubSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.In != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Headers suitable for JSON or YAML export. -func (m *Headers) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Info suitable for JSON or YAML export. -func (m *Info) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("version")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Version)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.TermsOfService != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("termsOfService")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TermsOfService)) - } - if m.Contact != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("contact")) - info.Content = append(info.Content, m.Contact.ToRawInfo()) - } - if m.License != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("license")) - info.Content = append(info.Content, m.License.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. -func (m *ItemsItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.Schema) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Schema { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, items) - } - return info -} - -// ToRawInfo returns a description of JsonReference suitable for JSON or YAML export. -func (m *JsonReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - return info -} - -// ToRawInfo returns a description of License suitable for JSON or YAML export. -func (m *License) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - if m.Url != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. -func (m *NamedAny) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Value != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, m.Value.ToRawInfo()) - } - return info -} - -// ToRawInfo returns a description of NamedHeader suitable for JSON or YAML export. -func (m *NamedHeader) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export. -func (m *NamedParameter) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. -func (m *NamedPathItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedResponse suitable for JSON or YAML export. -func (m *NamedResponse) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedResponseValue suitable for JSON or YAML export. -func (m *NamedResponseValue) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:ResponseValue StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export. -func (m *NamedSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedSecurityDefinitionsItem suitable for JSON or YAML export. -func (m *NamedSecurityDefinitionsItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:SecurityDefinitionsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. -func (m *NamedString) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Value != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Value)) - } - return info -} - -// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. -func (m *NamedStringArray) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NonBodyParameter suitable for JSON or YAML export. -func (m *NonBodyParameter) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // NonBodyParameter - // {Name:headerParameterSubSchema Type:HeaderParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetHeaderParameterSubSchema() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:formDataParameterSubSchema Type:FormDataParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetFormDataParameterSubSchema() - if v1 != nil { - return v1.ToRawInfo() - } - // {Name:queryParameterSubSchema Type:QueryParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v2 := m.GetQueryParameterSubSchema() - if v2 != nil { - return v2.ToRawInfo() - } - // {Name:pathParameterSubSchema Type:PathParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v3 := m.GetPathParameterSubSchema() - if v3 != nil { - return v3.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Oauth2AccessCodeSecurity suitable for JSON or YAML export. -func (m *Oauth2AccessCodeSecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) - if m.Scopes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) - info.Content = append(info.Content, m.Scopes.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Oauth2ApplicationSecurity suitable for JSON or YAML export. -func (m *Oauth2ApplicationSecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) - if m.Scopes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) - info.Content = append(info.Content, m.Scopes.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Oauth2ImplicitSecurity suitable for JSON or YAML export. -func (m *Oauth2ImplicitSecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) - if m.Scopes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) - info.Content = append(info.Content, m.Scopes.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Oauth2PasswordSecurity suitable for JSON or YAML export. -func (m *Oauth2PasswordSecurity) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) - if m.Scopes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) - info.Content = append(info.Content, m.Scopes.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Oauth2Scopes suitable for JSON or YAML export. -func (m *Oauth2Scopes) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Value)) - } - } - return info -} - -// ToRawInfo returns a description of Operation suitable for JSON or YAML export. -func (m *Operation) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.Tags) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Tags)) - } - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.OperationId != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) - } - if len(m.Produces) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) - } - if len(m.Consumes) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) - } - if len(m.Parameters) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Parameters { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, items) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) - info.Content = append(info.Content, m.Responses.ToRawInfo()) - if len(m.Schemes) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) - } - if m.Deprecated != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) - } - if len(m.Security) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Security { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) - info.Content = append(info.Content, items) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. -func (m *Parameter) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // Parameter - // {Name:bodyParameter Type:BodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetBodyParameter() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:nonBodyParameter Type:NonBodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetNonBodyParameter() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of ParameterDefinitions suitable for JSON or YAML export. -func (m *ParameterDefinitions) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ParametersItem suitable for JSON or YAML export. -func (m *ParametersItem) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // ParametersItem - // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetParameter() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetJsonReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. -func (m *PathItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.XRef != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) - } - if m.Get != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("get")) - info.Content = append(info.Content, m.Get.ToRawInfo()) - } - if m.Put != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("put")) - info.Content = append(info.Content, m.Put.ToRawInfo()) - } - if m.Post != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("post")) - info.Content = append(info.Content, m.Post.ToRawInfo()) - } - if m.Delete != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("delete")) - info.Content = append(info.Content, m.Delete.ToRawInfo()) - } - if m.Options != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("options")) - info.Content = append(info.Content, m.Options.ToRawInfo()) - } - if m.Head != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("head")) - info.Content = append(info.Content, m.Head.ToRawInfo()) - } - if m.Patch != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("patch")) - info.Content = append(info.Content, m.Patch.ToRawInfo()) - } - if len(m.Parameters) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Parameters { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, items) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of PathParameterSubSchema suitable for JSON or YAML export. -func (m *PathParameterSubSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - if m.In != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Paths suitable for JSON or YAML export. -func (m *Paths) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - if m.Path != nil { - for _, item := range m.Path { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of PrimitivesItems suitable for JSON or YAML export. -func (m *PrimitivesItems) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Properties suitable for JSON or YAML export. -func (m *Properties) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of QueryParameterSubSchema suitable for JSON or YAML export. -func (m *QueryParameterSubSchema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.In != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.AllowEmptyValue != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Items != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, m.Items.ToRawInfo()) - } - if m.CollectionFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Response suitable for JSON or YAML export. -func (m *Response) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - if m.Schema != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, m.Schema.ToRawInfo()) - } - if m.Headers != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) - info.Content = append(info.Content, m.Headers.ToRawInfo()) - } - if m.Examples != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) - info.Content = append(info.Content, m.Examples.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ResponseDefinitions suitable for JSON or YAML export. -func (m *ResponseDefinitions) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ResponseValue suitable for JSON or YAML export. -func (m *ResponseValue) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // ResponseValue - // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetResponse() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetJsonReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Responses suitable for JSON or YAML export. -func (m *Responses) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.ResponseCode != nil { - for _, item := range m.ResponseCode { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Schema suitable for JSON or YAML export. -func (m *Schema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.XRef != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.Title != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if m.MaxProperties != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxProperties")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxProperties)) - } - if m.MinProperties != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minProperties")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinProperties)) - } - if len(m.Required) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.AdditionalProperties != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("additionalProperties")) - info.Content = append(info.Content, m.AdditionalProperties.ToRawInfo()) - } - if m.Type != nil { - if len(m.Type.Value) == 1 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type.Value[0])) - } else { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Type.Value)) - } - } - if m.Items != nil { - items := compiler.NewSequenceNode() - for _, item := range m.Items.Schema { - items.Content = append(items.Content, item.ToRawInfo()) - } - if len(items.Content) == 1 { - items = items.Content[0] - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, items) - } - if len(m.AllOf) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.AllOf { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("allOf")) - info.Content = append(info.Content, items) - } - if m.Properties != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("properties")) - info.Content = append(info.Content, m.Properties.ToRawInfo()) - } - if m.Discriminator != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("discriminator")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Discriminator)) - } - if m.ReadOnly != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) - } - if m.Xml != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("xml")) - info.Content = append(info.Content, m.Xml.ToRawInfo()) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SchemaItem suitable for JSON or YAML export. -func (m *SchemaItem) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // SchemaItem - // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetSchema() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:fileSchema Type:FileSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetFileSchema() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of SecurityDefinitions suitable for JSON or YAML export. -func (m *SecurityDefinitions) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SecurityDefinitionsItem suitable for JSON or YAML export. -func (m *SecurityDefinitionsItem) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // SecurityDefinitionsItem - // {Name:basicAuthenticationSecurity Type:BasicAuthenticationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetBasicAuthenticationSecurity() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:apiKeySecurity Type:ApiKeySecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetApiKeySecurity() - if v1 != nil { - return v1.ToRawInfo() - } - // {Name:oauth2ImplicitSecurity Type:Oauth2ImplicitSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v2 := m.GetOauth2ImplicitSecurity() - if v2 != nil { - return v2.ToRawInfo() - } - // {Name:oauth2PasswordSecurity Type:Oauth2PasswordSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v3 := m.GetOauth2PasswordSecurity() - if v3 != nil { - return v3.ToRawInfo() - } - // {Name:oauth2ApplicationSecurity Type:Oauth2ApplicationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v4 := m.GetOauth2ApplicationSecurity() - if v4 != nil { - return v4.ToRawInfo() - } - // {Name:oauth2AccessCodeSecurity Type:Oauth2AccessCodeSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v5 := m.GetOauth2AccessCodeSecurity() - if v5 != nil { - return v5.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. -func (m *SecurityRequirement) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. -func (m *StringArray) ToRawInfo() *yaml.Node { - return compiler.NewSequenceNodeForStringArray(m.Value) -} - -// ToRawInfo returns a description of Tag suitable for JSON or YAML export. -func (m *Tag) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of TypeItem suitable for JSON or YAML export. -func (m *TypeItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.Value) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Value)) - } - return info -} - -// ToRawInfo returns a description of VendorExtension suitable for JSON or YAML export. -func (m *VendorExtension) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Xml suitable for JSON or YAML export. -func (m *Xml) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Namespace != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("namespace")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Namespace)) - } - if m.Prefix != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("prefix")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Prefix)) - } - if m.Attribute != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("attribute")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Attribute)) - } - if m.Wrapped != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("wrapped")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Wrapped)) - } - if m.VendorExtension != nil { - for _, item := range m.VendorExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - var ( pattern0 = regexp.MustCompile("^x-") pattern1 = regexp.MustCompile("^/") diff --git a/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pbalias.go b/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pbalias.go new file mode 100644 index 000000000..05626db6f --- /dev/null +++ b/vendor/github.com/google/gnostic/openapiv2/OpenAPIv2.pbalias.go @@ -0,0 +1,102 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v2 + +import ( + openapiv2 "github.com/google/gnostic-models/openapiv2" +) + +type AdditionalPropertiesItem = openapiv2.AdditionalPropertiesItem +type AdditionalPropertiesItem_Schema = openapiv2.AdditionalPropertiesItem_Schema +type AdditionalPropertiesItem_Boolean = openapiv2.AdditionalPropertiesItem_Boolean +type Any = openapiv2.Any +type ApiKeySecurity = openapiv2.ApiKeySecurity +type BasicAuthenticationSecurity = openapiv2.BasicAuthenticationSecurity +type BodyParameter = openapiv2.BodyParameter +type Contact = openapiv2.Contact +type Default = openapiv2.Default +type Definitions = openapiv2.Definitions +type Document = openapiv2.Document +type Examples = openapiv2.Examples +type ExternalDocs = openapiv2.ExternalDocs +type FileSchema = openapiv2.FileSchema +type FormDataParameterSubSchema = openapiv2.FormDataParameterSubSchema +type Header = openapiv2.Header +type HeaderParameterSubSchema = openapiv2.HeaderParameterSubSchema +type Headers = openapiv2.Headers +type Info = openapiv2.Info +type ItemsItem = openapiv2.ItemsItem +type JsonReference = openapiv2.JsonReference +type License = openapiv2.License +type NamedAny = openapiv2.NamedAny +type NamedHeader = openapiv2.NamedHeader +type NamedParameter = openapiv2.NamedParameter +type NamedPathItem = openapiv2.NamedPathItem +type NamedResponse = openapiv2.NamedResponse +type NamedResponseValue = openapiv2.NamedResponseValue +type NamedSchema = openapiv2.NamedSchema +type NamedSecurityDefinitionsItem = openapiv2.NamedSecurityDefinitionsItem +type NamedString = openapiv2.NamedString +type NamedStringArray = openapiv2.NamedStringArray +type NonBodyParameter = openapiv2.NonBodyParameter +type NonBodyParameter_HeaderParameterSubSchema = openapiv2.NonBodyParameter_HeaderParameterSubSchema +type NonBodyParameter_FormDataParameterSubSchema = openapiv2.NonBodyParameter_FormDataParameterSubSchema +type NonBodyParameter_QueryParameterSubSchema = openapiv2.NonBodyParameter_QueryParameterSubSchema +type NonBodyParameter_PathParameterSubSchema = openapiv2.NonBodyParameter_PathParameterSubSchema +type Oauth2AccessCodeSecurity = openapiv2.Oauth2AccessCodeSecurity +type Oauth2ApplicationSecurity = openapiv2.Oauth2ApplicationSecurity +type Oauth2ImplicitSecurity = openapiv2.Oauth2ImplicitSecurity +type Oauth2PasswordSecurity = openapiv2.Oauth2PasswordSecurity +type Oauth2Scopes = openapiv2.Oauth2Scopes +type Operation = openapiv2.Operation +type Parameter = openapiv2.Parameter +type Parameter_BodyParameter = openapiv2.Parameter_BodyParameter +type Parameter_NonBodyParameter = openapiv2.Parameter_NonBodyParameter +type ParameterDefinitions = openapiv2.ParameterDefinitions +type ParametersItem = openapiv2.ParametersItem +type ParametersItem_Parameter = openapiv2.ParametersItem_Parameter +type ParametersItem_JsonReference = openapiv2.ParametersItem_JsonReference +type PathItem = openapiv2.PathItem +type PathParameterSubSchema = openapiv2.PathParameterSubSchema +type Paths = openapiv2.Paths +type PrimitivesItems = openapiv2.PrimitivesItems +type Properties = openapiv2.Properties +type QueryParameterSubSchema = openapiv2.QueryParameterSubSchema +type Response = openapiv2.Response +type ResponseDefinitions = openapiv2.ResponseDefinitions +type ResponseValue = openapiv2.ResponseValue +type ResponseValue_Response = openapiv2.ResponseValue_Response +type ResponseValue_JsonReference = openapiv2.ResponseValue_JsonReference +type Responses = openapiv2.Responses +type Schema = openapiv2.Schema +type SchemaItem = openapiv2.SchemaItem +type SchemaItem_Schema = openapiv2.SchemaItem_Schema +type SchemaItem_FileSchema = openapiv2.SchemaItem_FileSchema +type SecurityDefinitions = openapiv2.SecurityDefinitions +type SecurityDefinitionsItem = openapiv2.SecurityDefinitionsItem +type SecurityDefinitionsItem_BasicAuthenticationSecurity = openapiv2.SecurityDefinitionsItem_BasicAuthenticationSecurity +type SecurityDefinitionsItem_ApiKeySecurity = openapiv2.SecurityDefinitionsItem_ApiKeySecurity +type SecurityDefinitionsItem_Oauth2ImplicitSecurity = openapiv2.SecurityDefinitionsItem_Oauth2ImplicitSecurity +type SecurityDefinitionsItem_Oauth2PasswordSecurity = openapiv2.SecurityDefinitionsItem_Oauth2PasswordSecurity +type SecurityDefinitionsItem_Oauth2ApplicationSecurity = openapiv2.SecurityDefinitionsItem_Oauth2ApplicationSecurity +type SecurityDefinitionsItem_Oauth2AccessCodeSecurity = openapiv2.SecurityDefinitionsItem_Oauth2AccessCodeSecurity +type SecurityRequirement = openapiv2.SecurityRequirement +type StringArray = openapiv2.StringArray +type Tag = openapiv2.Tag +type TypeItem = openapiv2.TypeItem +type VendorExtension = openapiv2.VendorExtension +type Xml = openapiv2.Xml + +var File_openapiv2_OpenAPIv2_proto = openapiv2.File_openapiv2_OpenAPIv2_proto diff --git a/vendor/github.com/google/gnostic/openapiv2/document.go b/vendor/github.com/google/gnostic/openapiv2/document.go index 0021ae871..dd43221be 100644 --- a/vendor/github.com/google/gnostic/openapiv2/document.go +++ b/vendor/github.com/google/gnostic/openapiv2/document.go @@ -15,7 +15,7 @@ package openapi_v2 import ( - "gopkg.in/yaml.v3" + "errors" "github.com/google/gnostic/compiler" ) @@ -26,17 +26,11 @@ func ParseDocument(b []byte) (*Document, error) { if err != nil { return nil, err } - root := info.Content[0] - return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) -} -// YAMLValue produces a serialized YAML representation of the document. -func (d *Document) YAMLValue(comment string) ([]byte, error) { - rawInfo := d.ToRawInfo() - rawInfo = &yaml.Node{ - Kind: yaml.DocumentNode, - Content: []*yaml.Node{rawInfo}, - HeadComment: comment, + if len(info.Content) < 1 { + return nil, errors.New("document has no content") } - return yaml.Marshal(rawInfo) + + root := info.Content[0] + return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) } diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go b/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go index d54a84db7..2d5092fe8 100644 --- a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go +++ b/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go @@ -5028,3608 +5028,6 @@ func NewXml(in *yaml.Node, context *compiler.Context) (*Xml, error) { return x, compiler.NewErrorGroupOrNil(errors) } -// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. -func (m *AdditionalPropertiesItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*AdditionalPropertiesItem_SchemaOrReference) - if ok { - _, err := p.SchemaOrReference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Any objects. -func (m *Any) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside AnyOrExpression objects. -func (m *AnyOrExpression) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*AnyOrExpression_Any) - if ok { - _, err := p.Any.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*AnyOrExpression_Expression) - if ok { - _, err := p.Expression.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Callback objects. -func (m *Callback) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.Path { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside CallbackOrReference objects. -func (m *CallbackOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*CallbackOrReference_Callback) - if ok { - _, err := p.Callback.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*CallbackOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside CallbacksOrReferences objects. -func (m *CallbacksOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Components objects. -func (m *Components) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schemas != nil { - _, err := m.Schemas.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Responses != nil { - _, err := m.Responses.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Parameters != nil { - _, err := m.Parameters.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Examples != nil { - _, err := m.Examples.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.RequestBodies != nil { - _, err := m.RequestBodies.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Headers != nil { - _, err := m.Headers.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.SecuritySchemes != nil { - _, err := m.SecuritySchemes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Links != nil { - _, err := m.Links.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Callbacks != nil { - _, err := m.Callbacks.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Contact objects. -func (m *Contact) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside DefaultType objects. -func (m *DefaultType) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Discriminator objects. -func (m *Discriminator) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Mapping != nil { - _, err := m.Mapping.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Document objects. -func (m *Document) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Info != nil { - _, err := m.Info.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Servers { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.Paths != nil { - _, err := m.Paths.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Components != nil { - _, err := m.Components.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Security { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.Tags { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Encoding objects. -func (m *Encoding) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Headers != nil { - _, err := m.Headers.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Encodings objects. -func (m *Encodings) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Example objects. -func (m *Example) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ExampleOrReference objects. -func (m *ExampleOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*ExampleOrReference_Example) - if ok { - _, err := p.Example.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*ExampleOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ExamplesOrReferences objects. -func (m *ExamplesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Expression objects. -func (m *Expression) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ExternalDocs objects. -func (m *ExternalDocs) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Header objects. -func (m *Header) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schema != nil { - _, err := m.Schema.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Examples != nil { - _, err := m.Examples.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Content != nil { - _, err := m.Content.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside HeaderOrReference objects. -func (m *HeaderOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*HeaderOrReference_Header) - if ok { - _, err := p.Header.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*HeaderOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside HeadersOrReferences objects. -func (m *HeadersOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Info objects. -func (m *Info) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Contact != nil { - _, err := m.Contact.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.License != nil { - _, err := m.License.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ItemsItem objects. -func (m *ItemsItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SchemaOrReference { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside License objects. -func (m *License) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Link objects. -func (m *Link) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Parameters != nil { - _, err := m.Parameters.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.RequestBody != nil { - _, err := m.RequestBody.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Server != nil { - _, err := m.Server.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside LinkOrReference objects. -func (m *LinkOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*LinkOrReference_Link) - if ok { - _, err := p.Link.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*LinkOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside LinksOrReferences objects. -func (m *LinksOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside MediaType objects. -func (m *MediaType) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schema != nil { - _, err := m.Schema.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Examples != nil { - _, err := m.Examples.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Encoding != nil { - _, err := m.Encoding.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside MediaTypes objects. -func (m *MediaTypes) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedAny objects. -func (m *NamedAny) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedCallbackOrReference objects. -func (m *NamedCallbackOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedEncoding objects. -func (m *NamedEncoding) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedExampleOrReference objects. -func (m *NamedExampleOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedHeaderOrReference objects. -func (m *NamedHeaderOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedLinkOrReference objects. -func (m *NamedLinkOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedMediaType objects. -func (m *NamedMediaType) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedParameterOrReference objects. -func (m *NamedParameterOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedPathItem objects. -func (m *NamedPathItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedRequestBodyOrReference objects. -func (m *NamedRequestBodyOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedResponseOrReference objects. -func (m *NamedResponseOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedSchemaOrReference objects. -func (m *NamedSchemaOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedSecuritySchemeOrReference objects. -func (m *NamedSecuritySchemeOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedServerVariable objects. -func (m *NamedServerVariable) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedString objects. -func (m *NamedString) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside NamedStringArray objects. -func (m *NamedStringArray) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Value != nil { - _, err := m.Value.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside OauthFlow objects. -func (m *OauthFlow) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Scopes != nil { - _, err := m.Scopes.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside OauthFlows objects. -func (m *OauthFlows) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Implicit != nil { - _, err := m.Implicit.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Password != nil { - _, err := m.Password.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.ClientCredentials != nil { - _, err := m.ClientCredentials.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.AuthorizationCode != nil { - _, err := m.AuthorizationCode.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Object objects. -func (m *Object) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Operation objects. -func (m *Operation) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Parameters { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.RequestBody != nil { - _, err := m.RequestBody.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Responses != nil { - _, err := m.Responses.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Callbacks != nil { - _, err := m.Callbacks.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Security { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.Servers { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Parameter objects. -func (m *Parameter) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Schema != nil { - _, err := m.Schema.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Examples != nil { - _, err := m.Examples.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Content != nil { - _, err := m.Content.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ParameterOrReference objects. -func (m *ParameterOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*ParameterOrReference_Parameter) - if ok { - _, err := p.Parameter.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*ParameterOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ParametersOrReferences objects. -func (m *ParametersOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside PathItem objects. -func (m *PathItem) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.XRef != "" { - info, err := compiler.ReadInfoForRef(root, m.XRef) - if err != nil { - return nil, err - } - if info != nil { - replacement, err := NewPathItem(info, nil) - if err == nil { - *m = *replacement - return m.ResolveReferences(root) - } - } - return info, nil - } - if m.Get != nil { - _, err := m.Get.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Put != nil { - _, err := m.Put.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Post != nil { - _, err := m.Post.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Delete != nil { - _, err := m.Delete.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Options != nil { - _, err := m.Options.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Head != nil { - _, err := m.Head.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Patch != nil { - _, err := m.Patch.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Trace != nil { - _, err := m.Trace.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Servers { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.Parameters { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Paths objects. -func (m *Paths) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.Path { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Properties objects. -func (m *Properties) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Reference objects. -func (m *Reference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.XRef != "" { - info, err := compiler.ReadInfoForRef(root, m.XRef) - if err != nil { - return nil, err - } - if info != nil { - replacement, err := NewReference(info, nil) - if err == nil { - *m = *replacement - return m.ResolveReferences(root) - } - } - return info, nil - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside RequestBodiesOrReferences objects. -func (m *RequestBodiesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside RequestBody objects. -func (m *RequestBody) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Content != nil { - _, err := m.Content.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside RequestBodyOrReference objects. -func (m *RequestBodyOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*RequestBodyOrReference_RequestBody) - if ok { - _, err := p.RequestBody.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*RequestBodyOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Response objects. -func (m *Response) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Headers != nil { - _, err := m.Headers.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Content != nil { - _, err := m.Content.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Links != nil { - _, err := m.Links.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ResponseOrReference objects. -func (m *ResponseOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*ResponseOrReference_Response) - if ok { - _, err := p.Response.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*ResponseOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Responses objects. -func (m *Responses) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.ResponseOrReference { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ResponsesOrReferences objects. -func (m *ResponsesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Schema objects. -func (m *Schema) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Discriminator != nil { - _, err := m.Discriminator.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Xml != nil { - _, err := m.Xml.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Example != nil { - _, err := m.Example.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.Enum { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.AllOf { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.OneOf { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - for _, item := range m.AnyOf { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - if m.Not != nil { - _, err := m.Not.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Items != nil { - _, err := m.Items.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Properties != nil { - _, err := m.Properties.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.AdditionalProperties != nil { - _, err := m.AdditionalProperties.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - if m.Default != nil { - _, err := m.Default.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SchemaOrReference objects. -func (m *SchemaOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*SchemaOrReference_Schema) - if ok { - _, err := p.Schema.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SchemaOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SchemasOrReferences objects. -func (m *SchemasOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecurityRequirement objects. -func (m *SecurityRequirement) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecurityScheme objects. -func (m *SecurityScheme) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Flows != nil { - _, err := m.Flows.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecuritySchemeOrReference objects. -func (m *SecuritySchemeOrReference) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - { - p, ok := m.Oneof.(*SecuritySchemeOrReference_SecurityScheme) - if ok { - _, err := p.SecurityScheme.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - { - p, ok := m.Oneof.(*SecuritySchemeOrReference_Reference) - if ok { - _, err := p.Reference.ResolveReferences(root) - if err != nil { - return nil, err - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SecuritySchemesOrReferences objects. -func (m *SecuritySchemesOrReferences) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Server objects. -func (m *Server) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.Variables != nil { - _, err := m.Variables.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ServerVariable objects. -func (m *ServerVariable) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside ServerVariables objects. -func (m *ServerVariables) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside SpecificationExtension objects. -func (m *SpecificationExtension) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside StringArray objects. -func (m *StringArray) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Strings objects. -func (m *Strings) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.AdditionalProperties { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Tag objects. -func (m *Tag) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - if m.ExternalDocs != nil { - _, err := m.ExternalDocs.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ResolveReferences resolves references found inside Xml objects. -func (m *Xml) ResolveReferences(root string) (*yaml.Node, error) { - errors := make([]error, 0) - for _, item := range m.SpecificationExtension { - if item != nil { - _, err := item.ResolveReferences(root) - if err != nil { - errors = append(errors, err) - } - } - } - return nil, compiler.NewErrorGroupOrNil(errors) -} - -// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. -func (m *AdditionalPropertiesItem) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // AdditionalPropertiesItem - // {Name:schemaOrReference Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetSchemaOrReference() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { - return compiler.NewScalarNodeForBool(v1.Boolean) - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Any suitable for JSON or YAML export. -func (m *Any) ToRawInfo() *yaml.Node { - var err error - var node yaml.Node - err = yaml.Unmarshal([]byte(m.Yaml), &node) - if err == nil { - if node.Kind == yaml.DocumentNode { - return node.Content[0] - } - return &node - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of AnyOrExpression suitable for JSON or YAML export. -func (m *AnyOrExpression) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // AnyOrExpression - // {Name:any Type:Any StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetAny() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:expression Type:Expression StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetExpression() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Callback suitable for JSON or YAML export. -func (m *Callback) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Path != nil { - for _, item := range m.Path { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of CallbackOrReference suitable for JSON or YAML export. -func (m *CallbackOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // CallbackOrReference - // {Name:callback Type:Callback StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetCallback() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of CallbacksOrReferences suitable for JSON or YAML export. -func (m *CallbacksOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Components suitable for JSON or YAML export. -func (m *Components) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Schemas != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schemas")) - info.Content = append(info.Content, m.Schemas.ToRawInfo()) - } - if m.Responses != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) - info.Content = append(info.Content, m.Responses.ToRawInfo()) - } - if m.Parameters != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, m.Parameters.ToRawInfo()) - } - if m.Examples != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) - info.Content = append(info.Content, m.Examples.ToRawInfo()) - } - if m.RequestBodies != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBodies")) - info.Content = append(info.Content, m.RequestBodies.ToRawInfo()) - } - if m.Headers != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) - info.Content = append(info.Content, m.Headers.ToRawInfo()) - } - if m.SecuritySchemes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("securitySchemes")) - info.Content = append(info.Content, m.SecuritySchemes.ToRawInfo()) - } - if m.Links != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("links")) - info.Content = append(info.Content, m.Links.ToRawInfo()) - } - if m.Callbacks != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("callbacks")) - info.Content = append(info.Content, m.Callbacks.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Contact suitable for JSON or YAML export. -func (m *Contact) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Url != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - } - if m.Email != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("email")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Email)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of DefaultType suitable for JSON or YAML export. -func (m *DefaultType) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // DefaultType - // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v0, ok := m.GetOneof().(*DefaultType_Number); ok { - return compiler.NewScalarNodeForFloat(v0.Number) - } - // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v1, ok := m.GetOneof().(*DefaultType_Boolean); ok { - return compiler.NewScalarNodeForBool(v1.Boolean) - } - // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v2, ok := m.GetOneof().(*DefaultType_String_); ok { - return compiler.NewScalarNodeForString(v2.String_) - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Discriminator suitable for JSON or YAML export. -func (m *Discriminator) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("propertyName")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.PropertyName)) - if m.Mapping != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("mapping")) - info.Content = append(info.Content, m.Mapping.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Document suitable for JSON or YAML export. -func (m *Document) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("openapi")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Openapi)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("info")) - info.Content = append(info.Content, m.Info.ToRawInfo()) - if len(m.Servers) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Servers { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) - info.Content = append(info.Content, items) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("paths")) - info.Content = append(info.Content, m.Paths.ToRawInfo()) - if m.Components != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("components")) - info.Content = append(info.Content, m.Components.ToRawInfo()) - } - if len(m.Security) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Security { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) - info.Content = append(info.Content, items) - } - if len(m.Tags) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Tags { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) - info.Content = append(info.Content, items) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Encoding suitable for JSON or YAML export. -func (m *Encoding) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.ContentType != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("contentType")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.ContentType)) - } - if m.Headers != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) - info.Content = append(info.Content, m.Headers.ToRawInfo()) - } - if m.Style != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) - } - if m.Explode != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) - } - if m.AllowReserved != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Encodings suitable for JSON or YAML export. -func (m *Encodings) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Example suitable for JSON or YAML export. -func (m *Example) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Value != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, m.Value.ToRawInfo()) - } - if m.ExternalValue != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalValue")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.ExternalValue)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ExampleOrReference suitable for JSON or YAML export. -func (m *ExampleOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // ExampleOrReference - // {Name:example Type:Example StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetExample() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of ExamplesOrReferences suitable for JSON or YAML export. -func (m *ExamplesOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Expression suitable for JSON or YAML export. -func (m *Expression) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. -func (m *ExternalDocs) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Header suitable for JSON or YAML export. -func (m *Header) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.Deprecated != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) - } - if m.AllowEmptyValue != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) - } - if m.Style != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) - } - if m.Explode != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) - } - if m.AllowReserved != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) - } - if m.Schema != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, m.Schema.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.Examples != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) - info.Content = append(info.Content, m.Examples.ToRawInfo()) - } - if m.Content != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) - info.Content = append(info.Content, m.Content.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of HeaderOrReference suitable for JSON or YAML export. -func (m *HeaderOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // HeaderOrReference - // {Name:header Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetHeader() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of HeadersOrReferences suitable for JSON or YAML export. -func (m *HeadersOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Info suitable for JSON or YAML export. -func (m *Info) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.TermsOfService != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("termsOfService")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TermsOfService)) - } - if m.Contact != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("contact")) - info.Content = append(info.Content, m.Contact.ToRawInfo()) - } - if m.License != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("license")) - info.Content = append(info.Content, m.License.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("version")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Version)) - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. -func (m *ItemsItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.SchemaOrReference) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.SchemaOrReference { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("schemaOrReference")) - info.Content = append(info.Content, items) - } - return info -} - -// ToRawInfo returns a description of License suitable for JSON or YAML export. -func (m *License) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - if m.Url != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Link suitable for JSON or YAML export. -func (m *Link) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.OperationRef != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("operationRef")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationRef)) - } - if m.OperationId != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) - } - if m.Parameters != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, m.Parameters.ToRawInfo()) - } - if m.RequestBody != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBody")) - info.Content = append(info.Content, m.RequestBody.ToRawInfo()) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Server != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("server")) - info.Content = append(info.Content, m.Server.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of LinkOrReference suitable for JSON or YAML export. -func (m *LinkOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // LinkOrReference - // {Name:link Type:Link StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetLink() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of LinksOrReferences suitable for JSON or YAML export. -func (m *LinksOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of MediaType suitable for JSON or YAML export. -func (m *MediaType) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Schema != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, m.Schema.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.Examples != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) - info.Content = append(info.Content, m.Examples.ToRawInfo()) - } - if m.Encoding != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("encoding")) - info.Content = append(info.Content, m.Encoding.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of MediaTypes suitable for JSON or YAML export. -func (m *MediaTypes) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. -func (m *NamedAny) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Value != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, m.Value.ToRawInfo()) - } - return info -} - -// ToRawInfo returns a description of NamedCallbackOrReference suitable for JSON or YAML export. -func (m *NamedCallbackOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:CallbackOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedEncoding suitable for JSON or YAML export. -func (m *NamedEncoding) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:Encoding StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedExampleOrReference suitable for JSON or YAML export. -func (m *NamedExampleOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:ExampleOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedHeaderOrReference suitable for JSON or YAML export. -func (m *NamedHeaderOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:HeaderOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedLinkOrReference suitable for JSON or YAML export. -func (m *NamedLinkOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:LinkOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedMediaType suitable for JSON or YAML export. -func (m *NamedMediaType) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:MediaType StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedParameterOrReference suitable for JSON or YAML export. -func (m *NamedParameterOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:ParameterOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. -func (m *NamedPathItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedRequestBodyOrReference suitable for JSON or YAML export. -func (m *NamedRequestBodyOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:RequestBodyOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedResponseOrReference suitable for JSON or YAML export. -func (m *NamedResponseOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:ResponseOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedSchemaOrReference suitable for JSON or YAML export. -func (m *NamedSchemaOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:SchemaOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedSecuritySchemeOrReference suitable for JSON or YAML export. -func (m *NamedSecuritySchemeOrReference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:SecuritySchemeOrReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedServerVariable suitable for JSON or YAML export. -func (m *NamedServerVariable) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:ServerVariable StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. -func (m *NamedString) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Value != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Value)) - } - return info -} - -// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. -func (m *NamedStringArray) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} - return info -} - -// ToRawInfo returns a description of OauthFlow suitable for JSON or YAML export. -func (m *OauthFlow) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AuthorizationUrl != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) - } - if m.TokenUrl != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) - } - if m.RefreshUrl != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("refreshUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.RefreshUrl)) - } - if m.Scopes != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) - info.Content = append(info.Content, m.Scopes.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of OauthFlows suitable for JSON or YAML export. -func (m *OauthFlows) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Implicit != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("implicit")) - info.Content = append(info.Content, m.Implicit.ToRawInfo()) - } - if m.Password != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("password")) - info.Content = append(info.Content, m.Password.ToRawInfo()) - } - if m.ClientCredentials != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("clientCredentials")) - info.Content = append(info.Content, m.ClientCredentials.ToRawInfo()) - } - if m.AuthorizationCode != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationCode")) - info.Content = append(info.Content, m.AuthorizationCode.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Object suitable for JSON or YAML export. -func (m *Object) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Operation suitable for JSON or YAML export. -func (m *Operation) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.Tags) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Tags)) - } - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.OperationId != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) - } - if len(m.Parameters) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Parameters { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, items) - } - if m.RequestBody != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("requestBody")) - info.Content = append(info.Content, m.RequestBody.ToRawInfo()) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) - info.Content = append(info.Content, m.Responses.ToRawInfo()) - if m.Callbacks != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("callbacks")) - info.Content = append(info.Content, m.Callbacks.ToRawInfo()) - } - if m.Deprecated != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) - } - if len(m.Security) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Security { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) - info.Content = append(info.Content, items) - } - if len(m.Servers) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Servers { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) - info.Content = append(info.Content, items) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. -func (m *Parameter) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.Deprecated != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) - } - if m.AllowEmptyValue != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) - } - if m.Style != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("style")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Style)) - } - if m.Explode != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("explode")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Explode)) - } - if m.AllowReserved != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("allowReserved")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowReserved)) - } - if m.Schema != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) - info.Content = append(info.Content, m.Schema.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.Examples != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) - info.Content = append(info.Content, m.Examples.ToRawInfo()) - } - if m.Content != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) - info.Content = append(info.Content, m.Content.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ParameterOrReference suitable for JSON or YAML export. -func (m *ParameterOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // ParameterOrReference - // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetParameter() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of ParametersOrReferences suitable for JSON or YAML export. -func (m *ParametersOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. -func (m *PathItem) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.XRef != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) - } - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Get != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("get")) - info.Content = append(info.Content, m.Get.ToRawInfo()) - } - if m.Put != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("put")) - info.Content = append(info.Content, m.Put.ToRawInfo()) - } - if m.Post != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("post")) - info.Content = append(info.Content, m.Post.ToRawInfo()) - } - if m.Delete != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("delete")) - info.Content = append(info.Content, m.Delete.ToRawInfo()) - } - if m.Options != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("options")) - info.Content = append(info.Content, m.Options.ToRawInfo()) - } - if m.Head != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("head")) - info.Content = append(info.Content, m.Head.ToRawInfo()) - } - if m.Patch != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("patch")) - info.Content = append(info.Content, m.Patch.ToRawInfo()) - } - if m.Trace != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("trace")) - info.Content = append(info.Content, m.Trace.ToRawInfo()) - } - if len(m.Servers) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Servers { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("servers")) - info.Content = append(info.Content, items) - } - if len(m.Parameters) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Parameters { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) - info.Content = append(info.Content, items) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Paths suitable for JSON or YAML export. -func (m *Paths) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Path != nil { - for _, item := range m.Path { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Properties suitable for JSON or YAML export. -func (m *Properties) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Reference suitable for JSON or YAML export. -func (m *Reference) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) - if m.Summary != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - return info -} - -// ToRawInfo returns a description of RequestBodiesOrReferences suitable for JSON or YAML export. -func (m *RequestBodiesOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of RequestBody suitable for JSON or YAML export. -func (m *RequestBody) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) - info.Content = append(info.Content, m.Content.ToRawInfo()) - if m.Required != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of RequestBodyOrReference suitable for JSON or YAML export. -func (m *RequestBodyOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // RequestBodyOrReference - // {Name:requestBody Type:RequestBody StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetRequestBody() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Response suitable for JSON or YAML export. -func (m *Response) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - if m.Headers != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) - info.Content = append(info.Content, m.Headers.ToRawInfo()) - } - if m.Content != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("content")) - info.Content = append(info.Content, m.Content.ToRawInfo()) - } - if m.Links != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("links")) - info.Content = append(info.Content, m.Links.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ResponseOrReference suitable for JSON or YAML export. -func (m *ResponseOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // ResponseOrReference - // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetResponse() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of Responses suitable for JSON or YAML export. -func (m *Responses) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.ResponseOrReference != nil { - for _, item := range m.ResponseOrReference { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ResponsesOrReferences suitable for JSON or YAML export. -func (m *ResponsesOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Schema suitable for JSON or YAML export. -func (m *Schema) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Nullable != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("nullable")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Nullable)) - } - if m.Discriminator != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("discriminator")) - info.Content = append(info.Content, m.Discriminator.ToRawInfo()) - } - if m.ReadOnly != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) - } - if m.WriteOnly != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("writeOnly")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.WriteOnly)) - } - if m.Xml != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("xml")) - info.Content = append(info.Content, m.Xml.ToRawInfo()) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.Example != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) - info.Content = append(info.Content, m.Example.ToRawInfo()) - } - if m.Deprecated != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) - } - if m.Title != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) - } - if m.MultipleOf != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) - } - if m.Maximum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) - } - if m.ExclusiveMaximum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) - } - if m.Minimum != 0.0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) - } - if m.ExclusiveMinimum != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) - } - if m.MaxLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) - } - if m.MinLength != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) - } - if m.Pattern != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) - } - if m.MaxItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) - } - if m.MinItems != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) - } - if m.UniqueItems != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) - } - if m.MaxProperties != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("maxProperties")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxProperties)) - } - if m.MinProperties != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("minProperties")) - info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinProperties)) - } - if len(m.Required) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) - } - if len(m.Enum) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.Enum { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, items) - } - if m.Type != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - } - if len(m.AllOf) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.AllOf { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("allOf")) - info.Content = append(info.Content, items) - } - if len(m.OneOf) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.OneOf { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("oneOf")) - info.Content = append(info.Content, items) - } - if len(m.AnyOf) != 0 { - items := compiler.NewSequenceNode() - for _, item := range m.AnyOf { - items.Content = append(items.Content, item.ToRawInfo()) - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("anyOf")) - info.Content = append(info.Content, items) - } - if m.Not != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("not")) - info.Content = append(info.Content, m.Not.ToRawInfo()) - } - if m.Items != nil { - items := compiler.NewSequenceNode() - for _, item := range m.Items.SchemaOrReference { - items.Content = append(items.Content, item.ToRawInfo()) - } - if len(items.Content) == 1 { - items = items.Content[0] - } - info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) - info.Content = append(info.Content, items) - } - if m.Properties != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("properties")) - info.Content = append(info.Content, m.Properties.ToRawInfo()) - } - if m.AdditionalProperties != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("additionalProperties")) - info.Content = append(info.Content, m.AdditionalProperties.ToRawInfo()) - } - if m.Default != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, m.Default.ToRawInfo()) - } - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Format != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SchemaOrReference suitable for JSON or YAML export. -func (m *SchemaOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // SchemaOrReference - // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetSchema() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of SchemasOrReferences suitable for JSON or YAML export. -func (m *SchemasOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. -func (m *SecurityRequirement) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SecurityScheme suitable for JSON or YAML export. -func (m *SecurityScheme) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.In != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) - } - if m.Scheme != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("scheme")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Scheme)) - } - if m.BearerFormat != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("bearerFormat")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.BearerFormat)) - } - if m.Flows != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("flows")) - info.Content = append(info.Content, m.Flows.ToRawInfo()) - } - if m.OpenIdConnectUrl != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("openIdConnectUrl")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OpenIdConnectUrl)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SecuritySchemeOrReference suitable for JSON or YAML export. -func (m *SecuritySchemeOrReference) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // SecuritySchemeOrReference - // {Name:securityScheme Type:SecurityScheme StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v0 := m.GetSecurityScheme() - if v0 != nil { - return v0.ToRawInfo() - } - // {Name:reference Type:Reference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - v1 := m.GetReference() - if v1 != nil { - return v1.ToRawInfo() - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of SecuritySchemesOrReferences suitable for JSON or YAML export. -func (m *SecuritySchemesOrReferences) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Server suitable for JSON or YAML export. -func (m *Server) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.Variables != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("variables")) - info.Content = append(info.Content, m.Variables.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ServerVariable suitable for JSON or YAML export. -func (m *ServerVariable) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if len(m.Enum) != 0 { - info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) - info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Enum)) - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Default)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of ServerVariables suitable for JSON or YAML export. -func (m *ServerVariables) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of SpecificationExtension suitable for JSON or YAML export. -func (m *SpecificationExtension) ToRawInfo() *yaml.Node { - // ONE OF WRAPPER - // SpecificationExtension - // {Name:number Type:float StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v0, ok := m.GetOneof().(*SpecificationExtension_Number); ok { - return compiler.NewScalarNodeForFloat(v0.Number) - } - // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v1, ok := m.GetOneof().(*SpecificationExtension_Boolean); ok { - return compiler.NewScalarNodeForBool(v1.Boolean) - } - // {Name:string Type:string StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} - if v2, ok := m.GetOneof().(*SpecificationExtension_String_); ok { - return compiler.NewScalarNodeForString(v2.String_) - } - return compiler.NewNullNode() -} - -// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. -func (m *StringArray) ToRawInfo() *yaml.Node { - return compiler.NewSequenceNodeForStringArray(m.Value) -} - -// ToRawInfo returns a description of Strings suitable for JSON or YAML export. -func (m *Strings) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Value)) - } - } - return info -} - -// ToRawInfo returns a description of Tag suitable for JSON or YAML export. -func (m *Tag) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - // always include this required field. - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - if m.Description != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) - } - if m.ExternalDocs != nil { - info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) - info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - -// ToRawInfo returns a description of Xml suitable for JSON or YAML export. -func (m *Xml) ToRawInfo() *yaml.Node { - info := compiler.NewMappingNode() - if m == nil { - return info - } - if m.Name != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) - } - if m.Namespace != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("namespace")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Namespace)) - } - if m.Prefix != "" { - info.Content = append(info.Content, compiler.NewScalarNodeForString("prefix")) - info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Prefix)) - } - if m.Attribute != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("attribute")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Attribute)) - } - if m.Wrapped != false { - info.Content = append(info.Content, compiler.NewScalarNodeForString("wrapped")) - info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Wrapped)) - } - if m.SpecificationExtension != nil { - for _, item := range m.SpecificationExtension { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, item.Value.ToRawInfo()) - } - } - return info -} - var ( pattern0 = regexp.MustCompile("^") pattern1 = regexp.MustCompile("^x-") diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pbalias.go b/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pbalias.go new file mode 100644 index 000000000..51ad62bb0 --- /dev/null +++ b/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pbalias.go @@ -0,0 +1,128 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v3 + +import ( + openapiv3 "github.com/google/gnostic-models/openapiv3" +) + +type AdditionalPropertiesItem = openapiv3.AdditionalPropertiesItem +type AdditionalPropertiesItem_SchemaOrReference = openapiv3.AdditionalPropertiesItem_SchemaOrReference +type AdditionalPropertiesItem_Boolean = openapiv3.AdditionalPropertiesItem_Boolean +type Any = openapiv3.Any +type AnyOrExpression = openapiv3.AnyOrExpression +type AnyOrExpression_Any = openapiv3.AnyOrExpression_Any +type AnyOrExpression_Expression = openapiv3.AnyOrExpression_Expression +type Callback = openapiv3.Callback +type CallbackOrReference = openapiv3.CallbackOrReference +type CallbackOrReference_Callback = openapiv3.CallbackOrReference_Callback +type CallbackOrReference_Reference = openapiv3.CallbackOrReference_Reference +type CallbacksOrReferences = openapiv3.CallbacksOrReferences +type Components = openapiv3.Components +type Contact = openapiv3.Contact +type DefaultType = openapiv3.DefaultType +type DefaultType_Number = openapiv3.DefaultType_Number +type DefaultType_Boolean = openapiv3.DefaultType_Boolean +type DefaultType_String_ = openapiv3.DefaultType_String_ +type Discriminator = openapiv3.Discriminator +type Document = openapiv3.Document +type Encoding = openapiv3.Encoding +type Encodings = openapiv3.Encodings +type Example = openapiv3.Example +type ExampleOrReference = openapiv3.ExampleOrReference +type ExampleOrReference_Example = openapiv3.ExampleOrReference_Example +type ExampleOrReference_Reference = openapiv3.ExampleOrReference_Reference +type ExamplesOrReferences = openapiv3.ExamplesOrReferences +type Expression = openapiv3.Expression +type ExternalDocs = openapiv3.ExternalDocs +type Header = openapiv3.Header +type HeaderOrReference = openapiv3.HeaderOrReference +type HeaderOrReference_Header = openapiv3.HeaderOrReference_Header +type HeaderOrReference_Reference = openapiv3.HeaderOrReference_Reference +type HeadersOrReferences = openapiv3.HeadersOrReferences +type Info = openapiv3.Info +type ItemsItem = openapiv3.ItemsItem +type License = openapiv3.License +type Link = openapiv3.Link +type LinkOrReference = openapiv3.LinkOrReference +type LinkOrReference_Link = openapiv3.LinkOrReference_Link +type LinkOrReference_Reference = openapiv3.LinkOrReference_Reference +type LinksOrReferences = openapiv3.LinksOrReferences +type MediaType = openapiv3.MediaType +type MediaTypes = openapiv3.MediaTypes +type NamedAny = openapiv3.NamedAny +type NamedCallbackOrReference = openapiv3.NamedCallbackOrReference +type NamedEncoding = openapiv3.NamedEncoding +type NamedExampleOrReference = openapiv3.NamedExampleOrReference +type NamedHeaderOrReference = openapiv3.NamedHeaderOrReference +type NamedLinkOrReference = openapiv3.NamedLinkOrReference +type NamedMediaType = openapiv3.NamedMediaType +type NamedParameterOrReference = openapiv3.NamedParameterOrReference +type NamedPathItem = openapiv3.NamedPathItem +type NamedRequestBodyOrReference = openapiv3.NamedRequestBodyOrReference +type NamedResponseOrReference = openapiv3.NamedResponseOrReference +type NamedSchemaOrReference = openapiv3.NamedSchemaOrReference +type NamedSecuritySchemeOrReference = openapiv3.NamedSecuritySchemeOrReference +type NamedServerVariable = openapiv3.NamedServerVariable +type NamedString = openapiv3.NamedString +type NamedStringArray = openapiv3.NamedStringArray +type OauthFlow = openapiv3.OauthFlow +type OauthFlows = openapiv3.OauthFlows +type Object = openapiv3.Object +type Operation = openapiv3.Operation +type Parameter = openapiv3.Parameter +type ParameterOrReference = openapiv3.ParameterOrReference +type ParameterOrReference_Parameter = openapiv3.ParameterOrReference_Parameter +type ParameterOrReference_Reference = openapiv3.ParameterOrReference_Reference +type ParametersOrReferences = openapiv3.ParametersOrReferences +type PathItem = openapiv3.PathItem +type Paths = openapiv3.Paths +type Properties = openapiv3.Properties +type Reference = openapiv3.Reference +type RequestBodiesOrReferences = openapiv3.RequestBodiesOrReferences +type RequestBody = openapiv3.RequestBody +type RequestBodyOrReference = openapiv3.RequestBodyOrReference +type RequestBodyOrReference_RequestBody = openapiv3.RequestBodyOrReference_RequestBody +type RequestBodyOrReference_Reference = openapiv3.RequestBodyOrReference_Reference +type Response = openapiv3.Response +type ResponseOrReference = openapiv3.ResponseOrReference +type ResponseOrReference_Response = openapiv3.ResponseOrReference_Response +type ResponseOrReference_Reference = openapiv3.ResponseOrReference_Reference +type Responses = openapiv3.Responses +type ResponsesOrReferences = openapiv3.ResponsesOrReferences +type Schema = openapiv3.Schema +type SchemaOrReference = openapiv3.SchemaOrReference +type SchemaOrReference_Schema = openapiv3.SchemaOrReference_Schema +type SchemaOrReference_Reference = openapiv3.SchemaOrReference_Reference +type SchemasOrReferences = openapiv3.SchemasOrReferences +type SecurityRequirement = openapiv3.SecurityRequirement +type SecurityScheme = openapiv3.SecurityScheme +type SecuritySchemeOrReference = openapiv3.SecuritySchemeOrReference +type SecuritySchemeOrReference_SecurityScheme = openapiv3.SecuritySchemeOrReference_SecurityScheme +type SecuritySchemeOrReference_Reference = openapiv3.SecuritySchemeOrReference_Reference +type SecuritySchemesOrReferences = openapiv3.SecuritySchemesOrReferences +type Server = openapiv3.Server +type ServerVariable = openapiv3.ServerVariable +type ServerVariables = openapiv3.ServerVariables +type SpecificationExtension = openapiv3.SpecificationExtension +type SpecificationExtension_Number = openapiv3.SpecificationExtension_Number +type SpecificationExtension_Boolean = openapiv3.SpecificationExtension_Boolean +type SpecificationExtension_String_ = openapiv3.SpecificationExtension_String_ +type StringArray = openapiv3.StringArray +type Strings = openapiv3.Strings +type Tag = openapiv3.Tag +type Xml = openapiv3.Xml + +var File_openapiv3_OpenAPIv3_proto = openapiv3.File_openapiv3_OpenAPIv3_proto diff --git a/vendor/github.com/google/gnostic/openapiv3/README.md b/vendor/github.com/google/gnostic/openapiv3/README.md index 83603b82a..0b015bce6 100644 --- a/vendor/github.com/google/gnostic/openapiv3/README.md +++ b/vendor/github.com/google/gnostic/openapiv3/README.md @@ -22,4 +22,5 @@ openapi-3.1.json from the OpenAPI 3.1 specification document (Markdown). ### How to rebuild -`protoc -I=. -I=third_party --go_out=. --go_opt=paths=source_relative openapiv3/*.proto` \ No newline at end of file +Run: +`COMPILE-PROTOS.sh` diff --git a/vendor/github.com/google/gnostic/openapiv3/annotations.pbalias.go b/vendor/github.com/google/gnostic/openapiv3/annotations.pbalias.go new file mode 100644 index 000000000..3152b1437 --- /dev/null +++ b/vendor/github.com/google/gnostic/openapiv3/annotations.pbalias.go @@ -0,0 +1,28 @@ +// Copyright 2022 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v3 + +import ( + openapiv3 "github.com/google/gnostic-models/openapiv3" +) + +var ( + E_Document = openapiv3.E_Document + E_Operation = openapiv3.E_Operation + E_Schema = openapiv3.E_Schema + E_Property = openapiv3.E_Property +) + +var File_openapiv3_annotations_proto = openapiv3.File_openapiv3_annotations_proto diff --git a/vendor/github.com/google/gnostic/openapiv3/document.go b/vendor/github.com/google/gnostic/openapiv3/document.go index ef10d1d90..e5b305ec3 100644 --- a/vendor/github.com/google/gnostic/openapiv3/document.go +++ b/vendor/github.com/google/gnostic/openapiv3/document.go @@ -15,7 +15,7 @@ package openapi_v3 import ( - "gopkg.in/yaml.v3" + "errors" "github.com/google/gnostic/compiler" ) @@ -26,17 +26,11 @@ func ParseDocument(b []byte) (*Document, error) { if err != nil { return nil, err } - root := info.Content[0] - return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) -} -// YAMLValue produces a serialized YAML representation of the document. -func (d *Document) YAMLValue(comment string) ([]byte, error) { - rawInfo := d.ToRawInfo() - rawInfo = &yaml.Node{ - Kind: yaml.DocumentNode, - Content: []*yaml.Node{rawInfo}, - HeadComment: comment, + if len(info.Content) < 1 { + return nil, errors.New("document has no content") } - return yaml.Marshal(rawInfo) + + root := info.Content[0] + return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) } diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go index e54a76c7e..3d8d0cd3a 100644 --- a/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go +++ b/vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go @@ -7,6 +7,7 @@ package cmpopts import ( "errors" + "fmt" "math" "reflect" "time" @@ -16,10 +17,10 @@ import ( func equateAlways(_, _ interface{}) bool { return true } -// EquateEmpty returns a Comparer option that determines all maps and slices +// EquateEmpty returns a [cmp.Comparer] option that determines all maps and slices // with a length of zero to be equal, regardless of whether they are nil. // -// EquateEmpty can be used in conjunction with SortSlices and SortMaps. +// EquateEmpty can be used in conjunction with [SortSlices] and [SortMaps]. func EquateEmpty() cmp.Option { return cmp.FilterValues(isEmpty, cmp.Comparer(equateAlways)) } @@ -31,7 +32,7 @@ func isEmpty(x, y interface{}) bool { (vx.Len() == 0 && vy.Len() == 0) } -// EquateApprox returns a Comparer option that determines float32 or float64 +// EquateApprox returns a [cmp.Comparer] option that determines float32 or float64 // values to be equal if they are within a relative fraction or absolute margin. // This option is not used when either x or y is NaN or infinite. // @@ -45,7 +46,7 @@ func isEmpty(x, y interface{}) bool { // // |x-y| ≤ max(fraction*min(|x|, |y|), margin) // -// EquateApprox can be used in conjunction with EquateNaNs. +// EquateApprox can be used in conjunction with [EquateNaNs]. func EquateApprox(fraction, margin float64) cmp.Option { if margin < 0 || fraction < 0 || math.IsNaN(margin) || math.IsNaN(fraction) { panic("margin or fraction must be a non-negative number") @@ -73,10 +74,10 @@ func (a approximator) compareF32(x, y float32) bool { return a.compareF64(float64(x), float64(y)) } -// EquateNaNs returns a Comparer option that determines float32 and float64 +// EquateNaNs returns a [cmp.Comparer] option that determines float32 and float64 // NaN values to be equal. // -// EquateNaNs can be used in conjunction with EquateApprox. +// EquateNaNs can be used in conjunction with [EquateApprox]. func EquateNaNs() cmp.Option { return cmp.Options{ cmp.FilterValues(areNaNsF64s, cmp.Comparer(equateAlways)), @@ -91,8 +92,8 @@ func areNaNsF32s(x, y float32) bool { return areNaNsF64s(float64(x), float64(y)) } -// EquateApproxTime returns a Comparer option that determines two non-zero -// time.Time values to be equal if they are within some margin of one another. +// EquateApproxTime returns a [cmp.Comparer] option that determines two non-zero +// [time.Time] values to be equal if they are within some margin of one another. // If both times have a monotonic clock reading, then the monotonic time // difference will be used. The margin must be non-negative. func EquateApproxTime(margin time.Duration) cmp.Option { @@ -131,8 +132,8 @@ type anyError struct{} func (anyError) Error() string { return "any error" } func (anyError) Is(err error) bool { return err != nil } -// EquateErrors returns a Comparer option that determines errors to be equal -// if errors.Is reports them to match. The AnyError error can be used to +// EquateErrors returns a [cmp.Comparer] option that determines errors to be equal +// if [errors.Is] reports them to match. The [AnyError] error can be used to // match any non-nil error. func EquateErrors() cmp.Option { return cmp.FilterValues(areConcreteErrors, cmp.Comparer(compareErrors)) @@ -154,3 +155,31 @@ func compareErrors(x, y interface{}) bool { ye := y.(error) return errors.Is(xe, ye) || errors.Is(ye, xe) } + +// EquateComparable returns a [cmp.Option] that determines equality +// of comparable types by directly comparing them using the == operator in Go. +// The types to compare are specified by passing a value of that type. +// This option should only be used on types that are documented as being +// safe for direct == comparison. For example, [net/netip.Addr] is documented +// as being semantically safe to use with ==, while [time.Time] is documented +// to discourage the use of == on time values. +func EquateComparable(typs ...interface{}) cmp.Option { + types := make(typesFilter) + for _, typ := range typs { + switch t := reflect.TypeOf(typ); { + case !t.Comparable(): + panic(fmt.Sprintf("%T is not a comparable Go type", typ)) + case types[t]: + panic(fmt.Sprintf("%T is already specified", typ)) + default: + types[t] = true + } + } + return cmp.FilterPath(types.filter, cmp.Comparer(equateAny)) +} + +type typesFilter map[reflect.Type]bool + +func (tf typesFilter) filter(p cmp.Path) bool { return tf[p.Last().Type()] } + +func equateAny(x, y interface{}) bool { return x == y } diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go index 80c60617e..fb84d11d7 100644 --- a/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go +++ b/vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go @@ -14,7 +14,7 @@ import ( "github.com/google/go-cmp/cmp/internal/function" ) -// IgnoreFields returns an Option that ignores fields of the +// IgnoreFields returns an [cmp.Option] that ignores fields of the // given names on a single struct type. It respects the names of exported fields // that are forwarded due to struct embedding. // The struct type is specified by passing in a value of that type. @@ -26,7 +26,7 @@ func IgnoreFields(typ interface{}, names ...string) cmp.Option { return cmp.FilterPath(sf.filter, cmp.Ignore()) } -// IgnoreTypes returns an Option that ignores all values assignable to +// IgnoreTypes returns an [cmp.Option] that ignores all values assignable to // certain types, which are specified by passing in a value of each type. func IgnoreTypes(typs ...interface{}) cmp.Option { tf := newTypeFilter(typs...) @@ -59,10 +59,10 @@ func (tf typeFilter) filter(p cmp.Path) bool { return false } -// IgnoreInterfaces returns an Option that ignores all values or references of +// IgnoreInterfaces returns an [cmp.Option] that ignores all values or references of // values assignable to certain interface types. These interfaces are specified // by passing in an anonymous struct with the interface types embedded in it. -// For example, to ignore sync.Locker, pass in struct{sync.Locker}{}. +// For example, to ignore [sync.Locker], pass in struct{sync.Locker}{}. func IgnoreInterfaces(ifaces interface{}) cmp.Option { tf := newIfaceFilter(ifaces) return cmp.FilterPath(tf.filter, cmp.Ignore()) @@ -107,7 +107,7 @@ func (tf ifaceFilter) filter(p cmp.Path) bool { return false } -// IgnoreUnexported returns an Option that only ignores the immediate unexported +// IgnoreUnexported returns an [cmp.Option] that only ignores the immediate unexported // fields of a struct, including anonymous fields of unexported types. // In particular, unexported fields within the struct's exported fields // of struct types, including anonymous fields, will not be ignored unless the @@ -115,7 +115,7 @@ func (tf ifaceFilter) filter(p cmp.Path) bool { // // Avoid ignoring unexported fields of a type which you do not control (i.e. a // type from another repository), as changes to the implementation of such types -// may change how the comparison behaves. Prefer a custom Comparer instead. +// may change how the comparison behaves. Prefer a custom [cmp.Comparer] instead. func IgnoreUnexported(typs ...interface{}) cmp.Option { ux := newUnexportedFilter(typs...) return cmp.FilterPath(ux.filter, cmp.Ignore()) @@ -148,7 +148,7 @@ func isExported(id string) bool { return unicode.IsUpper(r) } -// IgnoreSliceElements returns an Option that ignores elements of []V. +// IgnoreSliceElements returns an [cmp.Option] that ignores elements of []V. // The discard function must be of the form "func(T) bool" which is used to // ignore slice elements of type V, where V is assignable to T. // Elements are ignored if the function reports true. @@ -176,7 +176,7 @@ func IgnoreSliceElements(discardFunc interface{}) cmp.Option { }, cmp.Ignore()) } -// IgnoreMapEntries returns an Option that ignores entries of map[K]V. +// IgnoreMapEntries returns an [cmp.Option] that ignores entries of map[K]V. // The discard function must be of the form "func(T, R) bool" which is used to // ignore map entries of type K and V, where K and V are assignable to T and R. // Entries are ignored if the function reports true. diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go index 0eb2a758c..c6d09dae4 100644 --- a/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go +++ b/vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go @@ -13,7 +13,7 @@ import ( "github.com/google/go-cmp/cmp/internal/function" ) -// SortSlices returns a Transformer option that sorts all []V. +// SortSlices returns a [cmp.Transformer] option that sorts all []V. // The less function must be of the form "func(T, T) bool" which is used to // sort any slice with element type V that is assignable to T. // @@ -25,7 +25,7 @@ import ( // The less function does not have to be "total". That is, if !less(x, y) and // !less(y, x) for two elements x and y, their relative order is maintained. // -// SortSlices can be used in conjunction with EquateEmpty. +// SortSlices can be used in conjunction with [EquateEmpty]. func SortSlices(lessFunc interface{}) cmp.Option { vf := reflect.ValueOf(lessFunc) if !function.IsType(vf.Type(), function.Less) || vf.IsNil() { @@ -82,13 +82,13 @@ func (ss sliceSorter) less(v reflect.Value, i, j int) bool { return ss.fnc.Call([]reflect.Value{vx, vy})[0].Bool() } -// SortMaps returns a Transformer option that flattens map[K]V types to be a +// SortMaps returns a [cmp.Transformer] option that flattens map[K]V types to be a // sorted []struct{K, V}. The less function must be of the form // "func(T, T) bool" which is used to sort any map with key K that is // assignable to T. // -// Flattening the map into a slice has the property that cmp.Equal is able to -// use Comparers on K or the K.Equal method if it exists. +// Flattening the map into a slice has the property that [cmp.Equal] is able to +// use [cmp.Comparer] options on K or the K.Equal method if it exists. // // The less function must be: // - Deterministic: less(x, y) == less(x, y) @@ -96,7 +96,7 @@ func (ss sliceSorter) less(v reflect.Value, i, j int) bool { // - Transitive: if !less(x, y) and !less(y, z), then !less(x, z) // - Total: if x != y, then either less(x, y) or less(y, x) // -// SortMaps can be used in conjunction with EquateEmpty. +// SortMaps can be used in conjunction with [EquateEmpty]. func SortMaps(lessFunc interface{}) cmp.Option { vf := reflect.ValueOf(lessFunc) if !function.IsType(vf.Type(), function.Less) || vf.IsNil() { diff --git a/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go b/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go index 8812443a2..25b4bd05b 100644 --- a/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go +++ b/vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go @@ -19,7 +19,7 @@ func (xf xformFilter) filter(p cmp.Path) bool { return true } -// AcyclicTransformer returns a Transformer with a filter applied that ensures +// AcyclicTransformer returns a [cmp.Transformer] with a filter applied that ensures // that the transformer cannot be recursively applied upon its own output. // // An example use case is a transformer that splits a string by lines: @@ -28,7 +28,7 @@ func (xf xformFilter) filter(p cmp.Path) bool { // return strings.Split(s, "\n") // }) // -// Had this been an unfiltered Transformer instead, this would result in an +// Had this been an unfiltered [cmp.Transformer] instead, this would result in an // infinite cycle converting a string to []string to [][]string and so on. func AcyclicTransformer(name string, xformFunc interface{}) cmp.Option { xf := xformFilter{cmp.Transformer(name, xformFunc)} diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go index 087320da7..0f5b8a48c 100644 --- a/vendor/github.com/google/go-cmp/cmp/compare.go +++ b/vendor/github.com/google/go-cmp/cmp/compare.go @@ -5,7 +5,7 @@ // Package cmp determines equality of values. // // This package is intended to be a more powerful and safer alternative to -// reflect.DeepEqual for comparing whether two values are semantically equal. +// [reflect.DeepEqual] for comparing whether two values are semantically equal. // It is intended to only be used in tests, as performance is not a goal and // it may panic if it cannot compare the values. Its propensity towards // panicking means that its unsuitable for production environments where a @@ -18,16 +18,17 @@ // For example, an equality function may report floats as equal so long as // they are within some tolerance of each other. // -// - Types with an Equal method may use that method to determine equality. -// This allows package authors to determine the equality operation -// for the types that they define. +// - Types with an Equal method (e.g., [time.Time.Equal]) may use that method +// to determine equality. This allows package authors to determine +// the equality operation for the types that they define. // // - If no custom equality functions are used and no Equal method is defined, // equality is determined by recursively comparing the primitive kinds on -// both values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, +// both values, much like [reflect.DeepEqual]. Unlike [reflect.DeepEqual], // unexported fields are not compared by default; they result in panics -// unless suppressed by using an Ignore option (see cmpopts.IgnoreUnexported) -// or explicitly compared using the Exporter option. +// unless suppressed by using an [Ignore] option +// (see [github.com/google/go-cmp/cmp/cmpopts.IgnoreUnexported]) +// or explicitly compared using the [Exporter] option. package cmp import ( @@ -45,14 +46,14 @@ import ( // Equal reports whether x and y are equal by recursively applying the // following rules in the given order to x and y and all of their sub-values: // -// - Let S be the set of all Ignore, Transformer, and Comparer options that +// - Let S be the set of all [Ignore], [Transformer], and [Comparer] options that // remain after applying all path filters, value filters, and type filters. -// If at least one Ignore exists in S, then the comparison is ignored. -// If the number of Transformer and Comparer options in S is non-zero, +// If at least one [Ignore] exists in S, then the comparison is ignored. +// If the number of [Transformer] and [Comparer] options in S is non-zero, // then Equal panics because it is ambiguous which option to use. -// If S contains a single Transformer, then use that to transform +// If S contains a single [Transformer], then use that to transform // the current values and recursively call Equal on the output values. -// If S contains a single Comparer, then use that to compare the current values. +// If S contains a single [Comparer], then use that to compare the current values. // Otherwise, evaluation proceeds to the next rule. // // - If the values have an Equal method of the form "(T) Equal(T) bool" or @@ -66,21 +67,22 @@ import ( // Functions are only equal if they are both nil, otherwise they are unequal. // // Structs are equal if recursively calling Equal on all fields report equal. -// If a struct contains unexported fields, Equal panics unless an Ignore option -// (e.g., cmpopts.IgnoreUnexported) ignores that field or the Exporter option -// explicitly permits comparing the unexported field. +// If a struct contains unexported fields, Equal panics unless an [Ignore] option +// (e.g., [github.com/google/go-cmp/cmp/cmpopts.IgnoreUnexported]) ignores that field +// or the [Exporter] option explicitly permits comparing the unexported field. // // Slices are equal if they are both nil or both non-nil, where recursively // calling Equal on all non-ignored slice or array elements report equal. // Empty non-nil slices and nil slices are not equal; to equate empty slices, -// consider using cmpopts.EquateEmpty. +// consider using [github.com/google/go-cmp/cmp/cmpopts.EquateEmpty]. // // Maps are equal if they are both nil or both non-nil, where recursively // calling Equal on all non-ignored map entries report equal. // Map keys are equal according to the == operator. -// To use custom comparisons for map keys, consider using cmpopts.SortMaps. +// To use custom comparisons for map keys, consider using +// [github.com/google/go-cmp/cmp/cmpopts.SortMaps]. // Empty non-nil maps and nil maps are not equal; to equate empty maps, -// consider using cmpopts.EquateEmpty. +// consider using [github.com/google/go-cmp/cmp/cmpopts.EquateEmpty]. // // Pointers and interfaces are equal if they are both nil or both non-nil, // where they have the same underlying concrete type and recursively diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export.go similarity index 94% rename from vendor/github.com/google/go-cmp/cmp/export_unsafe.go rename to vendor/github.com/google/go-cmp/cmp/export.go index e2c0f74e8..29f82fe6b 100644 --- a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/export.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !purego -// +build !purego - package cmp import ( @@ -12,8 +9,6 @@ import ( "unsafe" ) -const supportExporters = true - // retrieveUnexportedField uses unsafe to forcibly retrieve any field from // a struct such that the value has read-write permissions. // diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go deleted file mode 100644 index ae851fe53..000000000 --- a/vendor/github.com/google/go-cmp/cmp/export_panic.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2017, The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build purego -// +build purego - -package cmp - -import "reflect" - -const supportExporters = false - -func retrieveUnexportedField(reflect.Value, reflect.StructField, bool) reflect.Value { - panic("no support for forcibly accessing unexported fields") -} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer.go similarity index 95% rename from vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go rename to vendor/github.com/google/go-cmp/cmp/internal/value/pointer.go index 16e6860af..e5dfff69a 100644 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build !purego -// +build !purego - package value import ( diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go deleted file mode 100644 index 1a71bfcbd..000000000 --- a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018, The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build purego -// +build purego - -package value - -import "reflect" - -// Pointer is an opaque typed pointer and is guaranteed to be comparable. -type Pointer struct { - p uintptr - t reflect.Type -} - -// PointerOf returns a Pointer from v, which must be a -// reflect.Ptr, reflect.Slice, or reflect.Map. -func PointerOf(v reflect.Value) Pointer { - // NOTE: Storing a pointer as an uintptr is technically incorrect as it - // assumes that the GC implementation does not use a moving collector. - return Pointer{v.Pointer(), v.Type()} -} - -// IsNil reports whether the pointer is nil. -func (p Pointer) IsNil() bool { - return p.p == 0 -} - -// Uintptr returns the pointer as a uintptr. -func (p Pointer) Uintptr() uintptr { - return p.p -} diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go index 1f9ca9c48..754496f3b 100644 --- a/vendor/github.com/google/go-cmp/cmp/options.go +++ b/vendor/github.com/google/go-cmp/cmp/options.go @@ -13,15 +13,15 @@ import ( "github.com/google/go-cmp/cmp/internal/function" ) -// Option configures for specific behavior of Equal and Diff. In particular, -// the fundamental Option functions (Ignore, Transformer, and Comparer), +// Option configures for specific behavior of [Equal] and [Diff]. In particular, +// the fundamental Option functions ([Ignore], [Transformer], and [Comparer]), // configure how equality is determined. // -// The fundamental options may be composed with filters (FilterPath and -// FilterValues) to control the scope over which they are applied. +// The fundamental options may be composed with filters ([FilterPath] and +// [FilterValues]) to control the scope over which they are applied. // -// The cmp/cmpopts package provides helper functions for creating options that -// may be used with Equal and Diff. +// The [github.com/google/go-cmp/cmp/cmpopts] package provides helper functions +// for creating options that may be used with [Equal] and [Diff]. type Option interface { // filter applies all filters and returns the option that remains. // Each option may only read s.curPath and call s.callTTBFunc. @@ -56,9 +56,9 @@ type core struct{} func (core) isCore() {} -// Options is a list of Option values that also satisfies the Option interface. +// Options is a list of [Option] values that also satisfies the [Option] interface. // Helper comparison packages may return an Options value when packing multiple -// Option values into a single Option. When this package processes an Options, +// [Option] values into a single [Option]. When this package processes an Options, // it will be implicitly expanded into a flat list. // // Applying a filter on an Options is equivalent to applying that same filter @@ -105,16 +105,16 @@ func (opts Options) String() string { return fmt.Sprintf("Options{%s}", strings.Join(ss, ", ")) } -// FilterPath returns a new Option where opt is only evaluated if filter f -// returns true for the current Path in the value tree. +// FilterPath returns a new [Option] where opt is only evaluated if filter f +// returns true for the current [Path] in the value tree. // // This filter is called even if a slice element or map entry is missing and // provides an opportunity to ignore such cases. The filter function must be // symmetric such that the filter result is identical regardless of whether the // missing value is from x or y. // -// The option passed in may be an Ignore, Transformer, Comparer, Options, or -// a previously filtered Option. +// The option passed in may be an [Ignore], [Transformer], [Comparer], [Options], or +// a previously filtered [Option]. func FilterPath(f func(Path) bool, opt Option) Option { if f == nil { panic("invalid path filter function") @@ -142,7 +142,7 @@ func (f pathFilter) String() string { return fmt.Sprintf("FilterPath(%s, %v)", function.NameOf(reflect.ValueOf(f.fnc)), f.opt) } -// FilterValues returns a new Option where opt is only evaluated if filter f, +// FilterValues returns a new [Option] where opt is only evaluated if filter f, // which is a function of the form "func(T, T) bool", returns true for the // current pair of values being compared. If either value is invalid or // the type of the values is not assignable to T, then this filter implicitly @@ -154,8 +154,8 @@ func (f pathFilter) String() string { // If T is an interface, it is possible that f is called with two values with // different concrete types that both implement T. // -// The option passed in may be an Ignore, Transformer, Comparer, Options, or -// a previously filtered Option. +// The option passed in may be an [Ignore], [Transformer], [Comparer], [Options], or +// a previously filtered [Option]. func FilterValues(f interface{}, opt Option) Option { v := reflect.ValueOf(f) if !function.IsType(v.Type(), function.ValueFilter) || v.IsNil() { @@ -192,9 +192,9 @@ func (f valuesFilter) String() string { return fmt.Sprintf("FilterValues(%s, %v)", function.NameOf(f.fnc), f.opt) } -// Ignore is an Option that causes all comparisons to be ignored. -// This value is intended to be combined with FilterPath or FilterValues. -// It is an error to pass an unfiltered Ignore option to Equal. +// Ignore is an [Option] that causes all comparisons to be ignored. +// This value is intended to be combined with [FilterPath] or [FilterValues]. +// It is an error to pass an unfiltered Ignore option to [Equal]. func Ignore() Option { return ignore{} } type ignore struct{ core } @@ -234,6 +234,8 @@ func (validator) apply(s *state, vx, vy reflect.Value) { name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType if _, ok := reflect.New(t).Interface().(error); ok { help = "consider using cmpopts.EquateErrors to compare error values" + } else if t.Comparable() { + help = "consider using cmpopts.EquateComparable to compare comparable Go types" } } else { // Unnamed type with unexported fields. Derive PkgPath from field. @@ -254,7 +256,7 @@ const identRx = `[_\p{L}][_\p{L}\p{N}]*` var identsRx = regexp.MustCompile(`^` + identRx + `(\.` + identRx + `)*$`) -// Transformer returns an Option that applies a transformation function that +// Transformer returns an [Option] that applies a transformation function that // converts values of a certain type into that of another. // // The transformer f must be a function "func(T) R" that converts values of @@ -265,13 +267,14 @@ var identsRx = regexp.MustCompile(`^` + identRx + `(\.` + identRx + `)*$`) // same transform to the output of itself (e.g., in the case where the // input and output types are the same), an implicit filter is added such that // a transformer is applicable only if that exact transformer is not already -// in the tail of the Path since the last non-Transform step. +// in the tail of the [Path] since the last non-[Transform] step. // For situations where the implicit filter is still insufficient, -// consider using cmpopts.AcyclicTransformer, which adds a filter -// to prevent the transformer from being recursively applied upon itself. +// consider using [github.com/google/go-cmp/cmp/cmpopts.AcyclicTransformer], +// which adds a filter to prevent the transformer from +// being recursively applied upon itself. // -// The name is a user provided label that is used as the Transform.Name in the -// transformation PathStep (and eventually shown in the Diff output). +// The name is a user provided label that is used as the [Transform.Name] in the +// transformation [PathStep] (and eventually shown in the [Diff] output). // The name must be a valid identifier or qualified identifier in Go syntax. // If empty, an arbitrary name is used. func Transformer(name string, f interface{}) Option { @@ -329,7 +332,7 @@ func (tr transformer) String() string { return fmt.Sprintf("Transformer(%s, %s)", tr.name, function.NameOf(tr.fnc)) } -// Comparer returns an Option that determines whether two values are equal +// Comparer returns an [Option] that determines whether two values are equal // to each other. // // The comparer f must be a function "func(T, T) bool" and is implicitly @@ -377,35 +380,32 @@ func (cm comparer) String() string { return fmt.Sprintf("Comparer(%s)", function.NameOf(cm.fnc)) } -// Exporter returns an Option that specifies whether Equal is allowed to +// Exporter returns an [Option] that specifies whether [Equal] is allowed to // introspect into the unexported fields of certain struct types. // // Users of this option must understand that comparing on unexported fields // from external packages is not safe since changes in the internal -// implementation of some external package may cause the result of Equal +// implementation of some external package may cause the result of [Equal] // to unexpectedly change. However, it may be valid to use this option on types // defined in an internal package where the semantic meaning of an unexported // field is in the control of the user. // -// In many cases, a custom Comparer should be used instead that defines +// In many cases, a custom [Comparer] should be used instead that defines // equality as a function of the public API of a type rather than the underlying // unexported implementation. // -// For example, the reflect.Type documentation defines equality to be determined +// For example, the [reflect.Type] documentation defines equality to be determined // by the == operator on the interface (essentially performing a shallow pointer -// comparison) and most attempts to compare *regexp.Regexp types are interested +// comparison) and most attempts to compare *[regexp.Regexp] types are interested // in only checking that the regular expression strings are equal. -// Both of these are accomplished using Comparers: +// Both of these are accomplished using [Comparer] options: // // Comparer(func(x, y reflect.Type) bool { return x == y }) // Comparer(func(x, y *regexp.Regexp) bool { return x.String() == y.String() }) // -// In other cases, the cmpopts.IgnoreUnexported option can be used to ignore -// all unexported fields on specified struct types. +// In other cases, the [github.com/google/go-cmp/cmp/cmpopts.IgnoreUnexported] +// option can be used to ignore all unexported fields on specified struct types. func Exporter(f func(reflect.Type) bool) Option { - if !supportExporters { - panic("Exporter is not supported on purego builds") - } return exporter(f) } @@ -415,10 +415,10 @@ func (exporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableO panic("not implemented") } -// AllowUnexported returns an Options that allows Equal to forcibly introspect +// AllowUnexported returns an [Option] that allows [Equal] to forcibly introspect // unexported fields of the specified struct types. // -// See Exporter for the proper use of this option. +// See [Exporter] for the proper use of this option. func AllowUnexported(types ...interface{}) Option { m := make(map[reflect.Type]bool) for _, typ := range types { @@ -432,7 +432,7 @@ func AllowUnexported(types ...interface{}) Option { } // Result represents the comparison result for a single node and -// is provided by cmp when calling Report (see Reporter). +// is provided by cmp when calling Report (see [Reporter]). type Result struct { _ [0]func() // Make Result incomparable flags resultFlags @@ -445,7 +445,7 @@ func (r Result) Equal() bool { } // ByIgnore reports whether the node is equal because it was ignored. -// This never reports true if Equal reports false. +// This never reports true if [Result.Equal] reports false. func (r Result) ByIgnore() bool { return r.flags&reportByIgnore != 0 } @@ -455,7 +455,7 @@ func (r Result) ByMethod() bool { return r.flags&reportByMethod != 0 } -// ByFunc reports whether a Comparer function determined equality. +// ByFunc reports whether a [Comparer] function determined equality. func (r Result) ByFunc() bool { return r.flags&reportByFunc != 0 } @@ -478,7 +478,7 @@ const ( reportByCycle ) -// Reporter is an Option that can be passed to Equal. When Equal traverses +// Reporter is an [Option] that can be passed to [Equal]. When [Equal] traverses // the value trees, it calls PushStep as it descends into each node in the // tree and PopStep as it ascend out of the node. The leaves of the tree are // either compared (determined to be equal or not equal) or ignored and reported diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go index a0a588502..c3c145642 100644 --- a/vendor/github.com/google/go-cmp/cmp/path.go +++ b/vendor/github.com/google/go-cmp/cmp/path.go @@ -14,9 +14,9 @@ import ( "github.com/google/go-cmp/cmp/internal/value" ) -// Path is a list of PathSteps describing the sequence of operations to get +// Path is a list of [PathStep] describing the sequence of operations to get // from some root type to the current position in the value tree. -// The first Path element is always an operation-less PathStep that exists +// The first Path element is always an operation-less [PathStep] that exists // simply to identify the initial type. // // When traversing structs with embedded structs, the embedded struct will @@ -29,8 +29,13 @@ type Path []PathStep // a value's tree structure. Users of this package never need to implement // these types as values of this type will be returned by this package. // -// Implementations of this interface are -// StructField, SliceIndex, MapIndex, Indirect, TypeAssertion, and Transform. +// Implementations of this interface: +// - [StructField] +// - [SliceIndex] +// - [MapIndex] +// - [Indirect] +// - [TypeAssertion] +// - [Transform] type PathStep interface { String() string @@ -70,8 +75,9 @@ func (pa *Path) pop() { *pa = (*pa)[:len(*pa)-1] } -// Last returns the last PathStep in the Path. -// If the path is empty, this returns a non-nil PathStep that reports a nil Type. +// Last returns the last [PathStep] in the Path. +// If the path is empty, this returns a non-nil [PathStep] +// that reports a nil [PathStep.Type]. func (pa Path) Last() PathStep { return pa.Index(-1) } @@ -79,7 +85,8 @@ func (pa Path) Last() PathStep { // Index returns the ith step in the Path and supports negative indexing. // A negative index starts counting from the tail of the Path such that -1 // refers to the last step, -2 refers to the second-to-last step, and so on. -// If index is invalid, this returns a non-nil PathStep that reports a nil Type. +// If index is invalid, this returns a non-nil [PathStep] +// that reports a nil [PathStep.Type]. func (pa Path) Index(i int) PathStep { if i < 0 { i = len(pa) + i @@ -168,7 +175,8 @@ func (ps pathStep) String() string { return fmt.Sprintf("{%s}", s) } -// StructField represents a struct field access on a field called Name. +// StructField is a [PathStep] that represents a struct field access +// on a field called [StructField.Name]. type StructField struct{ *structField } type structField struct { pathStep @@ -204,10 +212,11 @@ func (sf StructField) String() string { return fmt.Sprintf(".%s", sf.name) } func (sf StructField) Name() string { return sf.name } // Index is the index of the field in the parent struct type. -// See reflect.Type.Field. +// See [reflect.Type.Field]. func (sf StructField) Index() int { return sf.idx } -// SliceIndex is an index operation on a slice or array at some index Key. +// SliceIndex is a [PathStep] that represents an index operation on +// a slice or array at some index [SliceIndex.Key]. type SliceIndex struct{ *sliceIndex } type sliceIndex struct { pathStep @@ -247,12 +256,12 @@ func (si SliceIndex) Key() int { // all of the indexes to be shifted. If an index is -1, then that // indicates that the element does not exist in the associated slice. // -// Key is guaranteed to return -1 if and only if the indexes returned -// by SplitKeys are not the same. SplitKeys will never return -1 for +// [SliceIndex.Key] is guaranteed to return -1 if and only if the indexes +// returned by SplitKeys are not the same. SplitKeys will never return -1 for // both indexes. func (si SliceIndex) SplitKeys() (ix, iy int) { return si.xkey, si.ykey } -// MapIndex is an index operation on a map at some index Key. +// MapIndex is a [PathStep] that represents an index operation on a map at some index Key. type MapIndex struct{ *mapIndex } type mapIndex struct { pathStep @@ -266,7 +275,7 @@ func (mi MapIndex) String() string { return fmt.Sprintf("[%#v]", // Key is the value of the map key. func (mi MapIndex) Key() reflect.Value { return mi.key } -// Indirect represents pointer indirection on the parent type. +// Indirect is a [PathStep] that represents pointer indirection on the parent type. type Indirect struct{ *indirect } type indirect struct { pathStep @@ -276,7 +285,7 @@ func (in Indirect) Type() reflect.Type { return in.typ } func (in Indirect) Values() (vx, vy reflect.Value) { return in.vx, in.vy } func (in Indirect) String() string { return "*" } -// TypeAssertion represents a type assertion on an interface. +// TypeAssertion is a [PathStep] that represents a type assertion on an interface. type TypeAssertion struct{ *typeAssertion } type typeAssertion struct { pathStep @@ -286,7 +295,8 @@ func (ta TypeAssertion) Type() reflect.Type { return ta.typ } func (ta TypeAssertion) Values() (vx, vy reflect.Value) { return ta.vx, ta.vy } func (ta TypeAssertion) String() string { return fmt.Sprintf(".(%v)", value.TypeString(ta.typ, false)) } -// Transform is a transformation from the parent type to the current type. +// Transform is a [PathStep] that represents a transformation +// from the parent type to the current type. type Transform struct{ *transform } type transform struct { pathStep @@ -297,13 +307,13 @@ func (tf Transform) Type() reflect.Type { return tf.typ } func (tf Transform) Values() (vx, vy reflect.Value) { return tf.vx, tf.vy } func (tf Transform) String() string { return fmt.Sprintf("%s()", tf.trans.name) } -// Name is the name of the Transformer. +// Name is the name of the [Transformer]. func (tf Transform) Name() string { return tf.trans.name } // Func is the function pointer to the transformer function. func (tf Transform) Func() reflect.Value { return tf.trans.fnc } -// Option returns the originally constructed Transformer option. +// Option returns the originally constructed [Transformer] option. // The == operator can be used to detect the exact option used. func (tf Transform) Option() Option { return tf.trans } diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go index 2ab41fad3..e39f42284 100644 --- a/vendor/github.com/google/go-cmp/cmp/report_reflect.go +++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go @@ -199,7 +199,7 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, break } sf := t.Field(i) - if supportExporters && !isExported(sf.Name) { + if !isExported(sf.Name) { vv = retrieveUnexportedField(v, sf, true) } s := opts.WithTypeMode(autoType).FormatValue(vv, t.Kind(), ptrs) diff --git a/vendor/github.com/google/go-github/scrape/apps.go b/vendor/github.com/google/go-github/scrape/apps.go index 290390c9f..851c4e1d2 100644 --- a/vendor/github.com/google/go-github/scrape/apps.go +++ b/vendor/github.com/google/go-github/scrape/apps.go @@ -17,7 +17,7 @@ import ( "strings" "github.com/PuerkitoBio/goquery" - "github.com/google/go-github/v53/github" + "github.com/google/go-github/v56/github" ) // AppRestrictionsEnabled returns whether the specified organization has diff --git a/vendor/github.com/google/go-github/scrape/scrape.go b/vendor/github.com/google/go-github/scrape/scrape.go index c3be9af10..dc7aa88fa 100644 --- a/vendor/github.com/google/go-github/scrape/scrape.go +++ b/vendor/github.com/google/go-github/scrape/scrape.go @@ -97,15 +97,17 @@ func (c *Client) get(urlStr string, a ...interface{}) (*goquery.Document, error) if err != nil { return nil, fmt.Errorf("error parsing URL: %q: %v", urlStr, err) } + resp, err := c.Client.Get(u.String()) if err != nil { return nil, fmt.Errorf("error fetching url %q: %v", u, err) } + defer resp.Body.Close() + if resp.StatusCode == http.StatusNotFound { return nil, fmt.Errorf("received %v response fetching URL %q", resp.StatusCode, u) } - defer resp.Body.Close() doc, err := goquery.NewDocumentFromReader(resp.Body) if err != nil { return nil, fmt.Errorf("error parsing response: %v", err) diff --git a/vendor/github.com/google/go-github/v53/github/event.go b/vendor/github.com/google/go-github/v53/github/event.go deleted file mode 100644 index 4ee25603a..000000000 --- a/vendor/github.com/google/go-github/v53/github/event.go +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2018 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "encoding/json" -) - -// Event represents a GitHub event. -type Event struct { - Type *string `json:"type,omitempty"` - Public *bool `json:"public,omitempty"` - RawPayload *json.RawMessage `json:"payload,omitempty"` - Repo *Repository `json:"repo,omitempty"` - Actor *User `json:"actor,omitempty"` - Org *Organization `json:"org,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - ID *string `json:"id,omitempty"` -} - -func (e Event) String() string { - return Stringify(e) -} - -// ParsePayload parses the event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -func (e *Event) ParsePayload() (payload interface{}, err error) { - switch *e.Type { - case "BranchProtectionRuleEvent": - payload = &BranchProtectionRuleEvent{} - case "CheckRunEvent": - payload = &CheckRunEvent{} - case "CheckSuiteEvent": - payload = &CheckSuiteEvent{} - case "CodeScanningAlertEvent": - payload = &CodeScanningAlertEvent{} - case "CommitCommentEvent": - payload = &CommitCommentEvent{} - case "ContentReferenceEvent": - payload = &ContentReferenceEvent{} - case "CreateEvent": - payload = &CreateEvent{} - case "DeleteEvent": - payload = &DeleteEvent{} - case "DeployKeyEvent": - payload = &DeployKeyEvent{} - case "DeploymentEvent": - payload = &DeploymentEvent{} - case "DeploymentProtectionRuleEvent": - payload = &DeploymentProtectionRuleEvent{} - case "DeploymentStatusEvent": - payload = &DeploymentStatusEvent{} - case "DiscussionEvent": - payload = &DiscussionEvent{} - case "DiscussionCommentEvent": - payload = &DiscussionCommentEvent{} - case "ForkEvent": - payload = &ForkEvent{} - case "GitHubAppAuthorizationEvent": - payload = &GitHubAppAuthorizationEvent{} - case "GollumEvent": - payload = &GollumEvent{} - case "InstallationEvent": - payload = &InstallationEvent{} - case "InstallationRepositoriesEvent": - payload = &InstallationRepositoriesEvent{} - case "IssueCommentEvent": - payload = &IssueCommentEvent{} - case "IssuesEvent": - payload = &IssuesEvent{} - case "LabelEvent": - payload = &LabelEvent{} - case "MarketplacePurchaseEvent": - payload = &MarketplacePurchaseEvent{} - case "MemberEvent": - payload = &MemberEvent{} - case "MembershipEvent": - payload = &MembershipEvent{} - case "MergeGroupEvent": - payload = &MergeGroupEvent{} - case "MetaEvent": - payload = &MetaEvent{} - case "MilestoneEvent": - payload = &MilestoneEvent{} - case "OrganizationEvent": - payload = &OrganizationEvent{} - case "OrgBlockEvent": - payload = &OrgBlockEvent{} - case "PackageEvent": - payload = &PackageEvent{} - case "PageBuildEvent": - payload = &PageBuildEvent{} - case "PingEvent": - payload = &PingEvent{} - case "ProjectEvent": - payload = &ProjectEvent{} - case "ProjectCardEvent": - payload = &ProjectCardEvent{} - case "ProjectColumnEvent": - payload = &ProjectColumnEvent{} - case "PublicEvent": - payload = &PublicEvent{} - case "PullRequestEvent": - payload = &PullRequestEvent{} - case "PullRequestReviewEvent": - payload = &PullRequestReviewEvent{} - case "PullRequestReviewCommentEvent": - payload = &PullRequestReviewCommentEvent{} - case "PullRequestReviewThreadEvent": - payload = &PullRequestReviewThreadEvent{} - case "PullRequestTargetEvent": - payload = &PullRequestTargetEvent{} - case "PushEvent": - payload = &PushEvent{} - case "ReleaseEvent": - payload = &ReleaseEvent{} - case "RepositoryEvent": - payload = &RepositoryEvent{} - case "RepositoryDispatchEvent": - payload = &RepositoryDispatchEvent{} - case "RepositoryImportEvent": - payload = &RepositoryImportEvent{} - case "RepositoryVulnerabilityAlertEvent": - payload = &RepositoryVulnerabilityAlertEvent{} - case "SecretScanningAlertEvent": - payload = &SecretScanningAlertEvent{} - case "SecurityAdvisoryEvent": - payload = &SecurityAdvisoryEvent{} - case "StarEvent": - payload = &StarEvent{} - case "StatusEvent": - payload = &StatusEvent{} - case "TeamEvent": - payload = &TeamEvent{} - case "TeamAddEvent": - payload = &TeamAddEvent{} - case "UserEvent": - payload = &UserEvent{} - case "WatchEvent": - payload = &WatchEvent{} - case "WorkflowDispatchEvent": - payload = &WorkflowDispatchEvent{} - case "WorkflowJobEvent": - payload = &WorkflowJobEvent{} - case "WorkflowRunEvent": - payload = &WorkflowRunEvent{} - } - err = json.Unmarshal(*e.RawPayload, &payload) - return payload, err -} - -// Payload returns the parsed event payload. For recognized event types, -// a value of the corresponding struct type will be returned. -// -// Deprecated: Use ParsePayload instead, which returns an error -// rather than panics if JSON unmarshaling raw payload fails. -func (e *Event) Payload() (payload interface{}) { - var err error - payload, err = e.ParsePayload() - if err != nil { - panic(err) - } - return payload -} diff --git a/vendor/github.com/google/go-github/v53/AUTHORS b/vendor/github.com/google/go-github/v55/AUTHORS similarity index 89% rename from vendor/github.com/google/go-github/v53/AUTHORS rename to vendor/github.com/google/go-github/v55/AUTHORS index 5e40cd1f3..74a21dc60 100644 --- a/vendor/github.com/google/go-github/v53/AUTHORS +++ b/vendor/github.com/google/go-github/v55/AUTHORS @@ -11,13 +11,17 @@ 178inaba 2BFL 413x +6543 <6543@obermui.de> Abed Kibbe Abhinav Gupta Abhishek Veeramalla aboy +Adam Kohring adrienzieba afdesk +Ahmad Nurus S Ahmed Hagy +Aidan Aidan Steele Ainsley Chong ajz01 @@ -26,6 +30,7 @@ Akhil Mohan Alec Thomas Aleks Clark Alex Bramley +Alex Ellis Alex Orr Alex Su Alex Unger @@ -51,6 +56,7 @@ Anton Nguyen Anubha Kushwaha appilon aprp +apurwaj2 Aravind Arda Kuyumcu Arıl Bozoluk @@ -60,12 +66,15 @@ Austin Dizzy Azuka Okuleye Ben Batha Benjamen Keroack +Berkay Tacyildiz Beshr Kayali Beyang Liu Billy Keyes Billy Lynch +Bingtan Lu Bjorn Neergaard Björn Häuser +Bo Huang boljen Bracken Brad Harris @@ -74,6 +83,7 @@ Bradley Falzon Bradley McAllister Brandon Butler Brandon Cook +Brandon Stubbs Brett Kuhlman Brett Logan Brian Egizi @@ -96,6 +106,7 @@ Chris Schaefer chrisforrette Christian Bargmann Christian Muehlhaeuser +Christoph Jerolimov Christoph Sassenberg CI Monk Colin Misare @@ -125,12 +136,14 @@ Derek Jobst DeviousLab Dhi Aurrahman Diego Lapiduz +Diogo Vilela Dmitri Shuralyov dmnlk Don Petersen Doug Turner Drew Fradette Dustin Deus +Dustin Lish Eivind Eli Uriegas Elliott Beach @@ -141,10 +154,12 @@ eperm Erick Fejta Erik Nobel erwinvaneyk +Evan Anderson Evan Elias Fabian Holler Fabrice Fatema-Moaiyadi +Federico Di Pierro Felix Geisendörfer Filippo Valsorda Florian Forster @@ -155,6 +170,7 @@ Francisco Guimarães François de Metz Fredrik Jönsson Gabriel +Gal Ofri Garrett Squire George Kontridze Georgy Buranov @@ -163,6 +179,7 @@ Gnahz Google Inc. Grachev Mikhail griffin_stewie +guangwu Guillaume Jacquet Guz Alexander Guðmundur Bjarni Ólafsson @@ -178,6 +195,7 @@ huydx i2bskn Iain Steers Ikko Ashimine +Ilia Choly Ioannis Georgoulas Isao Jonas ishan upadhyay @@ -189,11 +207,15 @@ Jameel Haffejee James Bowes James Cockbain James Loh +James Maguire +James Turley Jamie West Jan Kosecki Jan Švábík +Jason Field Javier Campanini Jef LeCompte +Jeff Wenzbauer Jens Rantil Jeremy Morris Jesse Haka @@ -208,7 +230,9 @@ John Engelman John Jones John Liu Jordan Brockopp +Jordan Burandt Jordan Sussman +Jorge Gómez Reus Joshua Bezaleel Abednego João Cerqueira JP Phillips @@ -222,14 +246,17 @@ Justin Abrahms Justin Toh Jusung Lee jzhoucliqr +k0ral k1rnt kadern0 +Karthik Sundari Katrina Owen Kautilya Tripathi Keita Urashima Kevin Burke Kevin Wang Kevin Zhao +kgalli Kirill Konrad Malawski Kookheon Kwon @@ -239,19 +266,24 @@ Kshitij Saraogi Kumar Saurabh Kyle Kurz kyokomi +Lars Lehtonen Laurent Verdoïa leopoldwang Liam Galvin Lluis Campos Lovro Mažgon +Loïs Postula Luca Campese Lucas Alcantara +Lucas Martin-King Luis Davim Luke Evers +Luke Hinds Luke Kysow Luke Roberts Luke Young lynn [they] +Magnus Kulke Maksim Zhylinski Marc Binder Marcelo Carlos @@ -266,9 +298,11 @@ Matija Horvat Matin Rahmanian Matt Matt Brender +Matt Dainty Matt Gaunt Matt Landis Matt Moore +Matt Simons Maxime Bury Michael Meng Michael Spiegel @@ -277,26 +311,33 @@ Michał Glapa Michelangelo Morrillo Miguel Elias dos Santos Mike Chen +mohammad ali <2018cs92@student.uet.edu.pk> Mohammed AlDujaili Mukundan Senthil Munia Balayil Mustafa Abban Nadav Kaner +Naoki Kanatani Nathan VanBenschoten Navaneeth Suresh +Neal Caffery Neil O'Toole Nick Miyake Nick Platt Nick Spragg +Nicolas Chapurlat Nikhita Raghunath Nilesh Singh Noah Hanjun Lee Noah Zoschke ns-cweber +nxya Ole Orhagen Oleg Kovalov Ondřej Kupka Ori Talmor +Osama Faqhruldin +oslowalk Pablo Pérez Schröder Palash Nigam Panagiotis Moustafellos @@ -307,6 +348,7 @@ parkhyukjun89 Pat Alwell Patrick DeVivo Patrick Marabeas +Patrik Nordlén Pavel Dvoinos Pavel Shtanko Pete Wagner @@ -323,6 +365,7 @@ Quinn Slack Rackspace US, Inc. Radek Simko Radliński Ignacy +Rafael Aramizu Gomes Rajat Jindal Rajendra arora Rajkumar @@ -351,6 +394,7 @@ Ryo Nakao Saaarah Safwan Olaimat Sahil Dua +Sai Ravi Teja Chintakrindi saisi Sam Minnée Sandeep Sukhani @@ -377,6 +421,7 @@ Sho Okada Shrikrishna Singh Simon Davis sona-tar +soniachikh SoundCloud, Ltd. Sridhar Mocherla SriVignessh Pss @@ -387,6 +432,7 @@ Suhaib Mujahid sushmita wable Szymon Kodrebski Søren Hansen +T.J. Corrigan Takashi Yoneuchi Takayuki Watanabe Taketoshi Fujiwara @@ -398,6 +444,7 @@ Theofilos Petsios Thomas Aidan Curran Thomas Bruyelle Tim Rogers +Timothy O'Brien Timothée Peignier Tingluo Huang tkhandel @@ -410,11 +457,15 @@ Vaibhav Singh Varadarajan Aravamudhan Victor Castell Victor Vrantchan +Victory Osikwemhe vikkyomkar +Vivek Vlad Ungureanu Wasim Thabraze Weslei Juan Moser Pereira +Wheeler Law Will Maier +Will Norris Willem D'Haeseleer William Bailey William Cooke @@ -422,12 +473,15 @@ Xabi xibz Yann Malet Yannick Utard +Yarden Shoham Yicheng Qin Yosuke Akatsuka Yumikiyo Osanai +Yurii Soldak Yusef Mohamadi Yusuke Kuoka Zach Latta zhouhaibing089 六开箱 缘生 +蒋航 diff --git a/vendor/github.com/google/go-github/v53/LICENSE b/vendor/github.com/google/go-github/v55/LICENSE similarity index 100% rename from vendor/github.com/google/go-github/v53/LICENSE rename to vendor/github.com/google/go-github/v55/LICENSE diff --git a/vendor/github.com/google/go-github/v53/github/actions.go b/vendor/github.com/google/go-github/v55/github/actions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions.go rename to vendor/github.com/google/go-github/v55/github/actions.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_artifacts.go b/vendor/github.com/google/go-github/v55/github/actions_artifacts.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_artifacts.go rename to vendor/github.com/google/go-github/v55/github/actions_artifacts.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_cache.go b/vendor/github.com/google/go-github/v55/github/actions_cache.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_cache.go rename to vendor/github.com/google/go-github/v55/github/actions_cache.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_oidc.go b/vendor/github.com/google/go-github/v55/github/actions_oidc.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_oidc.go rename to vendor/github.com/google/go-github/v55/github/actions_oidc.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v55/github/actions_required_workflows.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_required_workflows.go rename to vendor/github.com/google/go-github/v55/github/actions_required_workflows.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v55/github/actions_runner_groups.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_runner_groups.go rename to vendor/github.com/google/go-github/v55/github/actions_runner_groups.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_runners.go b/vendor/github.com/google/go-github/v55/github/actions_runners.go similarity index 99% rename from vendor/github.com/google/go-github/v53/github/actions_runners.go rename to vendor/github.com/google/go-github/v55/github/actions_runners.go index 3990a5a90..7427451cc 100644 --- a/vendor/github.com/google/go-github/v53/github/actions_runners.go +++ b/vendor/github.com/google/go-github/v55/github/actions_runners.go @@ -58,6 +58,7 @@ type GenerateJITConfigRequest struct { // JITRunnerConfig represents encoded JIT configuration that can be used to bootstrap a self-hosted runner. type JITRunnerConfig struct { + Runner *Runner `json:"runner,omitempty"` EncodedJITConfig *string `json:"encoded_jit_config,omitempty"` } diff --git a/vendor/github.com/google/go-github/v53/github/actions_secrets.go b/vendor/github.com/google/go-github/v55/github/actions_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_secrets.go rename to vendor/github.com/google/go-github/v55/github/actions_secrets.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_variables.go b/vendor/github.com/google/go-github/v55/github/actions_variables.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_variables.go rename to vendor/github.com/google/go-github/v55/github/actions_variables.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v55/github/actions_workflow_jobs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_workflow_jobs.go rename to vendor/github.com/google/go-github/v55/github/actions_workflow_jobs.go diff --git a/vendor/github.com/google/go-github/v53/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v55/github/actions_workflow_runs.go similarity index 99% rename from vendor/github.com/google/go-github/v53/github/actions_workflow_runs.go rename to vendor/github.com/google/go-github/v55/github/actions_workflow_runs.go index 61f736be4..002210861 100644 --- a/vendor/github.com/google/go-github/v53/github/actions_workflow_runs.go +++ b/vendor/github.com/google/go-github/v55/github/actions_workflow_runs.go @@ -46,6 +46,7 @@ type WorkflowRun struct { Repository *Repository `json:"repository,omitempty"` HeadRepository *Repository `json:"head_repository,omitempty"` Actor *User `json:"actor,omitempty"` + TriggeringActor *User `json:"triggering_actor,omitempty"` } // WorkflowRuns represents a slice of repository action workflow run. diff --git a/vendor/github.com/google/go-github/v53/github/actions_workflows.go b/vendor/github.com/google/go-github/v55/github/actions_workflows.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/actions_workflows.go rename to vendor/github.com/google/go-github/v55/github/actions_workflows.go diff --git a/vendor/github.com/google/go-github/v53/github/activity.go b/vendor/github.com/google/go-github/v55/github/activity.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/activity.go rename to vendor/github.com/google/go-github/v55/github/activity.go diff --git a/vendor/github.com/google/go-github/v53/github/activity_events.go b/vendor/github.com/google/go-github/v55/github/activity_events.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/activity_events.go rename to vendor/github.com/google/go-github/v55/github/activity_events.go diff --git a/vendor/github.com/google/go-github/v53/github/activity_notifications.go b/vendor/github.com/google/go-github/v55/github/activity_notifications.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/activity_notifications.go rename to vendor/github.com/google/go-github/v55/github/activity_notifications.go diff --git a/vendor/github.com/google/go-github/v53/github/activity_star.go b/vendor/github.com/google/go-github/v55/github/activity_star.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/activity_star.go rename to vendor/github.com/google/go-github/v55/github/activity_star.go diff --git a/vendor/github.com/google/go-github/v53/github/activity_watching.go b/vendor/github.com/google/go-github/v55/github/activity_watching.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/activity_watching.go rename to vendor/github.com/google/go-github/v55/github/activity_watching.go diff --git a/vendor/github.com/google/go-github/v53/github/admin.go b/vendor/github.com/google/go-github/v55/github/admin.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/admin.go rename to vendor/github.com/google/go-github/v55/github/admin.go diff --git a/vendor/github.com/google/go-github/v53/github/admin_orgs.go b/vendor/github.com/google/go-github/v55/github/admin_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/admin_orgs.go rename to vendor/github.com/google/go-github/v55/github/admin_orgs.go diff --git a/vendor/github.com/google/go-github/v53/github/admin_stats.go b/vendor/github.com/google/go-github/v55/github/admin_stats.go similarity index 99% rename from vendor/github.com/google/go-github/v53/github/admin_stats.go rename to vendor/github.com/google/go-github/v55/github/admin_stats.go index ef294f447..17e568f62 100644 --- a/vendor/github.com/google/go-github/v53/github/admin_stats.go +++ b/vendor/github.com/google/go-github/v55/github/admin_stats.go @@ -7,7 +7,6 @@ package github import ( "context" - "fmt" ) // AdminStats represents a variety of stats of a GitHub Enterprise @@ -155,7 +154,7 @@ func (s RepoStats) String() string { // // GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/admin_stats/ func (s *AdminService) GetAdminStats(ctx context.Context) (*AdminStats, *Response, error) { - u := fmt.Sprintf("enterprise/stats/all") + u := "enterprise/stats/all" req, err := s.client.NewRequest("GET", u, nil) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/v53/github/admin_users.go b/vendor/github.com/google/go-github/v55/github/admin_users.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/admin_users.go rename to vendor/github.com/google/go-github/v55/github/admin_users.go diff --git a/vendor/github.com/google/go-github/v53/github/apps.go b/vendor/github.com/google/go-github/v55/github/apps.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps.go rename to vendor/github.com/google/go-github/v55/github/apps.go diff --git a/vendor/github.com/google/go-github/v53/github/apps_hooks.go b/vendor/github.com/google/go-github/v55/github/apps_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps_hooks.go rename to vendor/github.com/google/go-github/v55/github/apps_hooks.go diff --git a/vendor/github.com/google/go-github/v53/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v55/github/apps_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps_hooks_deliveries.go rename to vendor/github.com/google/go-github/v55/github/apps_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v53/github/apps_installation.go b/vendor/github.com/google/go-github/v55/github/apps_installation.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps_installation.go rename to vendor/github.com/google/go-github/v55/github/apps_installation.go diff --git a/vendor/github.com/google/go-github/v53/github/apps_manifest.go b/vendor/github.com/google/go-github/v55/github/apps_manifest.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps_manifest.go rename to vendor/github.com/google/go-github/v55/github/apps_manifest.go diff --git a/vendor/github.com/google/go-github/v53/github/apps_marketplace.go b/vendor/github.com/google/go-github/v55/github/apps_marketplace.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/apps_marketplace.go rename to vendor/github.com/google/go-github/v55/github/apps_marketplace.go diff --git a/vendor/github.com/google/go-github/v53/github/authorizations.go b/vendor/github.com/google/go-github/v55/github/authorizations.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/authorizations.go rename to vendor/github.com/google/go-github/v55/github/authorizations.go diff --git a/vendor/github.com/google/go-github/v53/github/billing.go b/vendor/github.com/google/go-github/v55/github/billing.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/billing.go rename to vendor/github.com/google/go-github/v55/github/billing.go diff --git a/vendor/github.com/google/go-github/v53/github/checks.go b/vendor/github.com/google/go-github/v55/github/checks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/checks.go rename to vendor/github.com/google/go-github/v55/github/checks.go diff --git a/vendor/github.com/google/go-github/v53/github/code-scanning.go b/vendor/github.com/google/go-github/v55/github/code-scanning.go similarity index 69% rename from vendor/github.com/google/go-github/v53/github/code-scanning.go rename to vendor/github.com/google/go-github/v55/github/code-scanning.go index e4a6abeba..0ae269be6 100644 --- a/vendor/github.com/google/go-github/v53/github/code-scanning.go +++ b/vendor/github.com/google/go-github/v55/github/code-scanning.go @@ -48,11 +48,13 @@ type Message struct { type MostRecentInstance struct { Ref *string `json:"ref,omitempty"` AnalysisKey *string `json:"analysis_key,omitempty"` + Category *string `json:"category,omitempty"` Environment *string `json:"environment,omitempty"` State *string `json:"state,omitempty"` CommitSHA *string `json:"commit_sha,omitempty"` Message *Message `json:"message,omitempty"` Location *Location `json:"location,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` Classifications []string `json:"classifications,omitempty"` } @@ -113,15 +115,30 @@ func (a *Alert) ID() int64 { return id } -// AlertListOptions specifies optional parameters to the CodeScanningService.ListAlerts -// method. +// AlertInstancesListOptions specifies optional parameters to the CodeScanningService.ListAlertInstances method. +type AlertInstancesListOptions struct { + // Return code scanning alert instances for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge + Ref string `url:"ref,omitempty"` + + ListOptions +} + +// AlertListOptions specifies optional parameters to the CodeScanningService.ListAlerts method. type AlertListOptions struct { // State of the code scanning alerts to list. Set to closed to list only closed code scanning alerts. Default: open State string `url:"state,omitempty"` - // Return code scanning alerts for a specific branch reference. The ref must be formatted as heads/. + // Return code scanning alerts for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge Ref string `url:"ref,omitempty"` + // If specified, only code scanning alerts with this severity will be returned. Possible values are: critical, high, medium, low, warning, note, error. + Severity string `url:"severity,omitempty"` + + // The name of a code scanning tool. Only results by this tool will be listed. + ToolName string `url:"tool_name,omitempty"` + ListCursorOptions // Add ListOptions so offset pagination with integer type "page" query parameter is accepted @@ -134,12 +151,28 @@ type AnalysesListOptions struct { // Return code scanning analyses belonging to the same SARIF upload. SarifID *string `url:"sarif_id,omitempty"` - // Return code scanning analyses for a specific branch reference. The ref can be formatted as refs/heads/ or simply . + // Return code scanning analyses for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge Ref *string `url:"ref,omitempty"` ListOptions } +// CodeQLDatabase represents a metadata about the CodeQL database. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type CodeQLDatabase struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Language *string `json:"language,omitempty"` + Uploader *User `json:"uploader,omitempty"` + ContentType *string `json:"content_type,omitempty"` + Size *int64 `json:"size,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` +} + // ScanningAnalysis represents an individual GitHub Code Scanning ScanningAnalysis on a single repository. // // GitHub API docs: https://docs.github.com/en/rest/code-scanning @@ -202,7 +235,7 @@ type SarifID struct { // You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events // read permission to use this endpoint. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#list-code-scanning-alerts-for-an-organization +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-an-organization func (s *CodeScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *AlertListOptions) ([]*Alert, *Response, error) { u := fmt.Sprintf("orgs/%v/code-scanning/alerts", org) u, err := addOptions(u, opts) @@ -230,7 +263,7 @@ func (s *CodeScanningService) ListAlertsForOrg(ctx context.Context, org string, // You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events // read permission to use this endpoint. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#list-code-scanning-alerts-for-a-repository +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-a-repository func (s *CodeScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *AlertListOptions) ([]*Alert, *Response, error) { u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts", owner, repo) u, err := addOptions(u, opts) @@ -259,7 +292,7 @@ func (s *CodeScanningService) ListAlertsForRepo(ctx context.Context, owner, repo // // The security alert_id is the number at the end of the security alert's URL. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#get-a-code-scanning-alert +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-alert func (s *CodeScanningService) GetAlert(ctx context.Context, owner, repo string, id int64) (*Alert, *Response, error) { u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) @@ -302,13 +335,40 @@ func (s *CodeScanningService) UpdateAlert(ctx context.Context, owner, repo strin return a, resp, nil } +// ListAlertInstances lists instances of a code scanning alert. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-instances-of-a-code-scanning-alert +func (s *CodeScanningService) ListAlertInstances(ctx context.Context, owner, repo string, id int64, opts *AlertInstancesListOptions) ([]*MostRecentInstance, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v/instances", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alertInstances []*MostRecentInstance + resp, err := s.client.Do(ctx, req, &alertInstances) + if err != nil { + return nil, resp, err + } + + return alertInstances, resp, nil +} + // UploadSarif uploads the result of code scanning job to GitHub. // // For the parameter sarif, you must first compress your SARIF file using gzip and then translate the contents of the file into a Base64 encoding string. // You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events // write permission to use this endpoint. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#upload-an-analysis-as-sarif-data +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#upload-an-analysis-as-sarif-data func (s *CodeScanningService) UploadSarif(ctx context.Context, owner, repo string, sarif *SarifAnalysis) (*SarifID, *Response, error) { u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs", owner, repo) @@ -326,13 +386,45 @@ func (s *CodeScanningService) UploadSarif(ctx context.Context, owner, repo strin return sarifID, resp, nil } +// SARIFUpload represents information about a SARIF upload. +type SARIFUpload struct { + // `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. + // `failed` files have either not been processed at all, or could only be partially processed. + ProcessingStatus *string `json:"processing_status,omitempty"` + // The REST API URL for getting the analyses associated with the upload. + AnalysesURL *string `json:"analyses_url,omitempty"` +} + +// GetSARIF gets information about a SARIF upload. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-information-about-a-sarif-upload +func (s *CodeScanningService) GetSARIF(ctx context.Context, owner, repo, sarifID string) (*SARIFUpload, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs/%v", owner, repo, sarifID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + sarifUpload := new(SARIFUpload) + resp, err := s.client.Do(ctx, req, sarifUpload) + if err != nil { + return nil, resp, err + } + + return sarifUpload, resp, nil +} + // ListAnalysesForRepo lists code scanning analyses for a repository. // // Lists the details of all code scanning analyses for a repository, starting with the most recent. // You must use an access token with the security_events scope to use this endpoint. // GitHub Apps must have the security_events read permission to use this endpoint. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#list-code-scanning-analyses-for-a-repository +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-analyses-for-a-repository func (s *CodeScanningService) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *AnalysesListOptions) ([]*ScanningAnalysis, *Response, error) { u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses", owner, repo) u, err := addOptions(u, opts) @@ -361,7 +453,7 @@ func (s *CodeScanningService) ListAnalysesForRepo(ctx context.Context, owner, re // // The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#get-a-code-scanning-analysis-for-a-repository +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-analysis-for-a-repository func (s *CodeScanningService) GetAnalysis(ctx context.Context, owner, repo string, id int64) (*ScanningAnalysis, *Response, error) { u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) @@ -379,6 +471,85 @@ func (s *CodeScanningService) GetAnalysis(ctx context.Context, owner, repo strin return analysis, resp, nil } +// DeleteAnalysis represents a successful deletion of a code scanning analysis. +type DeleteAnalysis struct { + // Next deletable analysis in chain, without last analysis deletion confirmation + NextAnalysisURL *string `json:"next_analysis_url,omitempty"` + // Next deletable analysis in chain, with last analysis deletion confirmation + ConfirmDeleteURL *string `json:"confirm_delete_url,omitempty"` +} + +// DeleteAnalysis deletes a single code scanning analysis from a repository. +// +// You must use an access token with the repo scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#delete-a-code-scanning-analysis-from-a-repository +func (s *CodeScanningService) DeleteAnalysis(ctx context.Context, owner, repo string, id int64) (*DeleteAnalysis, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, nil, err + } + + deleteAnalysis := new(DeleteAnalysis) + resp, err := s.client.Do(ctx, req, deleteAnalysis) + if err != nil { + return nil, resp, err + } + + return deleteAnalysis, resp, nil +} + +// ListCodeQLDatabases lists the CodeQL databases that are available in a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the contents read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-codeql-databases-for-a-repository +func (s *CodeScanningService) ListCodeQLDatabases(ctx context.Context, owner, repo string) ([]*CodeQLDatabase, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var codeqlDatabases []*CodeQLDatabase + resp, err := s.client.Do(ctx, req, &codeqlDatabases) + if err != nil { + return nil, resp, err + } + + return codeqlDatabases, resp, nil +} + +// GetCodeQLDatabase gets a CodeQL database for a language in a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the contents read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-codeql-database-for-a-repository +func (s *CodeScanningService) GetCodeQLDatabase(ctx context.Context, owner, repo, language string) (*CodeQLDatabase, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases/%v", owner, repo, language) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + codeqlDatabase := new(CodeQLDatabase) + resp, err := s.client.Do(ctx, req, codeqlDatabase) + if err != nil { + return nil, resp, err + } + + return codeqlDatabase, resp, nil +} + // DefaultSetupConfiguration represents a code scanning default setup configuration. type DefaultSetupConfiguration struct { State *string `json:"state,omitempty"` @@ -393,7 +564,7 @@ type DefaultSetupConfiguration struct { // endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write // permission to use this endpoint. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#get-a-code-scanning-default-setup-configuration +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-default-setup-configuration func (s *CodeScanningService) GetDefaultSetupConfiguration(ctx context.Context, owner, repo string) (*DefaultSetupConfiguration, *Response, error) { u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) @@ -434,7 +605,7 @@ type UpdateDefaultSetupConfigurationResponse struct { // This method might return an AcceptedError and a status code of 202. This is because this is the status that GitHub // returns to signify that it has now scheduled the update of the pull request branch in a background task. // -// GitHub API docs: https://docs.github.com/en/rest/code-scanning#update-a-code-scanning-default-setup-configuration +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#update-a-code-scanning-default-setup-configuration func (s *CodeScanningService) UpdateDefaultSetupConfiguration(ctx context.Context, owner, repo string, options *UpdateDefaultSetupConfigurationOptions) (*UpdateDefaultSetupConfigurationResponse, *Response, error) { u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) diff --git a/vendor/github.com/google/go-github/v53/github/codespaces.go b/vendor/github.com/google/go-github/v55/github/codespaces.go similarity index 99% rename from vendor/github.com/google/go-github/v53/github/codespaces.go rename to vendor/github.com/google/go-github/v55/github/codespaces.go index a260c227d..cd8b0e5be 100644 --- a/vendor/github.com/google/go-github/v53/github/codespaces.go +++ b/vendor/github.com/google/go-github/v55/github/codespaces.go @@ -126,7 +126,7 @@ type ListCodespacesOptions struct { // // GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#list-codespaces-for-the-authenticated-user func (s *CodespacesService) List(ctx context.Context, opts *ListCodespacesOptions) (*ListCodespaces, *Response, error) { - u := fmt.Sprint("user/codespaces") + u := "user/codespaces" u, err := addOptions(u, opts) if err != nil { return nil, nil, err diff --git a/vendor/github.com/google/go-github/v53/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v55/github/codespaces_secrets.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/codespaces_secrets.go rename to vendor/github.com/google/go-github/v55/github/codespaces_secrets.go diff --git a/vendor/github.com/google/go-github/v53/github/dependabot.go b/vendor/github.com/google/go-github/v55/github/dependabot.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/dependabot.go rename to vendor/github.com/google/go-github/v55/github/dependabot.go diff --git a/vendor/github.com/google/go-github/v53/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v55/github/dependabot_alerts.go similarity index 93% rename from vendor/github.com/google/go-github/v53/github/dependabot_alerts.go rename to vendor/github.com/google/go-github/v55/github/dependabot_alerts.go index 7b5d53b39..177316b1d 100644 --- a/vendor/github.com/google/go-github/v53/github/dependabot_alerts.go +++ b/vendor/github.com/google/go-github/v55/github/dependabot_alerts.go @@ -17,8 +17,8 @@ type Dependency struct { Scope *string `json:"scope,omitempty"` } -// AdvisoryCVSs represents the advisory pertaining to the Common Vulnerability Scoring System. -type AdvisoryCVSs struct { +// AdvisoryCVSS represents the advisory pertaining to the Common Vulnerability Scoring System. +type AdvisoryCVSS struct { Score *float64 `json:"score,omitempty"` VectorString *string `json:"vector_string,omitempty"` } @@ -37,7 +37,7 @@ type DependabotSecurityAdvisory struct { Description *string `json:"description,omitempty"` Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` Severity *string `json:"severity,omitempty"` - CVSs *AdvisoryCVSs `json:"cvss,omitempty"` + CVSS *AdvisoryCVSS `json:"cvss,omitempty"` CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` References []*AdvisoryReference `json:"references,omitempty"` @@ -62,7 +62,9 @@ type DependabotAlert struct { DismissedReason *string `json:"dismissed_reason,omitempty"` DismissedComment *string `json:"dismissed_comment,omitempty"` FixedAt *Timestamp `json:"fixed_at,omitempty"` - Repository *Repository `json:"repository,omitempty"` + AutoDismissedAt *Timestamp `json:"auto_dismissed_at,omitempty"` + // The repository is always empty for events + Repository *Repository `json:"repository,omitempty"` } // ListAlertsOptions specifies the optional parameters to the DependabotService.ListRepoAlerts @@ -76,6 +78,7 @@ type ListAlertsOptions struct { Sort *string `url:"sort,omitempty"` Direction *string `url:"direction,omitempty"` + ListOptions ListCursorOptions } diff --git a/vendor/github.com/google/go-github/v53/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v55/github/dependabot_secrets.go similarity index 95% rename from vendor/github.com/google/go-github/v53/github/dependabot_secrets.go rename to vendor/github.com/google/go-github/v55/github/dependabot_secrets.go index f87ab42c3..685f7bea8 100644 --- a/vendor/github.com/google/go-github/v53/github/dependabot_secrets.go +++ b/vendor/github.com/google/go-github/v55/github/dependabot_secrets.go @@ -144,8 +144,25 @@ func (s *DependabotService) CreateOrUpdateRepoSecret(ctx context.Context, owner, // // GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#create-or-update-an-organization-secret func (s *DependabotService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *DependabotEncryptedSecret) (*Response, error) { + repoIDs := make([]string, len(eSecret.SelectedRepositoryIDs)) + for i, secret := range eSecret.SelectedRepositoryIDs { + repoIDs[i] = fmt.Sprintf("%v", secret) + } + params := struct { + *DependabotEncryptedSecret + SelectedRepositoryIDs []string `json:"selected_repository_ids,omitempty"` + }{ + DependabotEncryptedSecret: eSecret, + SelectedRepositoryIDs: repoIDs, + } + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, eSecret.Name) - return s.putSecret(ctx, url, eSecret) + req, err := s.client.NewRequest("PUT", url, params) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) } func (s *DependabotService) deleteSecret(ctx context.Context, url string) (*Response, error) { diff --git a/vendor/github.com/google/go-github/v55/github/dependency_graph.go b/vendor/github.com/google/go-github/v55/github/dependency_graph.go new file mode 100644 index 000000000..e578965cc --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/dependency_graph.go @@ -0,0 +1,80 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +type DependencyGraphService service + +// SBOM represents a software bill of materials, which describes the +// packages/libraries that a repository depends on. +type SBOM struct { + SBOM *SBOMInfo `json:"sbom,omitempty"` +} + +// CreationInfo represents when the SBOM was created and who created it. +type CreationInfo struct { + Created *Timestamp `json:"created,omitempty"` + Creators []string `json:"creators,omitempty"` +} + +// RepoDependencies represents the dependencies of a repo. +type RepoDependencies struct { + SPDXID *string `json:"SPDXID,omitempty"` + // Package name + Name *string `json:"name,omitempty"` + VersionInfo *string `json:"versionInfo,omitempty"` + DownloadLocation *string `json:"downloadLocation,omitempty"` + FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` + LicenseConcluded *string `json:"licenseConcluded,omitempty"` + LicenseDeclared *string `json:"licenseDeclared,omitempty"` +} + +// SBOMInfo represents a software bill of materials (SBOM) using SPDX. +// SPDX is an open standard for SBOMs that +// identifies and catalogs components, licenses, copyrights, security +// references, and other metadata relating to software. +type SBOMInfo struct { + SPDXID *string `json:"SPDXID,omitempty"` + SPDXVersion *string `json:"spdxVersion,omitempty"` + CreationInfo *CreationInfo `json:"creationInfo,omitempty"` + + // Repo name + Name *string `json:"name,omitempty"` + DataLicense *string `json:"dataLicense,omitempty"` + DocumentDescribes []string `json:"documentDescribes,omitempty"` + DocumentNamespace *string `json:"documentNamespace,omitempty"` + + // List of packages dependencies + Packages []*RepoDependencies `json:"packages,omitempty"` +} + +func (s SBOM) String() string { + return Stringify(s) +} + +// GetSBOM fetches the software bill of materials for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/dependency-graph/sboms +func (s *DependencyGraphService) GetSBOM(ctx context.Context, owner, repo string) (*SBOM, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/dependency-graph/sbom", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var sbom *SBOM + resp, err := s.client.Do(ctx, req, &sbom) + if err != nil { + return nil, resp, err + } + + return sbom, resp, nil +} diff --git a/vendor/github.com/google/go-github/v53/github/doc.go b/vendor/github.com/google/go-github/v55/github/doc.go similarity index 87% rename from vendor/github.com/google/go-github/v53/github/doc.go rename to vendor/github.com/google/go-github/v55/github/doc.go index af2847111..bace6fb3c 100644 --- a/vendor/github.com/google/go-github/v53/github/doc.go +++ b/vendor/github.com/google/go-github/v55/github/doc.go @@ -8,7 +8,7 @@ Package github provides a client for using the GitHub API. Usage: - import "github.com/google/go-github/v53/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) + import "github.com/google/go-github/v55/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) import "github.com/google/go-github/github" // with go modules disabled Construct a new GitHub client, then use the various services on the client to @@ -40,34 +40,16 @@ For more sample code snippets, head over to the https://github.com/google/go-git # Authentication -The go-github library does not directly handle authentication. Instead, when -creating a new client, pass an http.Client that can handle authentication for -you. The easiest and recommended way to do this is using the golang.org/x/oauth2 -library, but you can always use any other library that provides an http.Client. -If you have an OAuth2 access token (for example, a personal API token), you can -use it with the oauth2 library using: +Use Client.WithAuthToken to configure your client to authenticate using an Oauth token +(for example, a personal access token). This is what is needed for a majority of use cases +aside from GitHub Apps. - import "golang.org/x/oauth2" - - func main() { - ctx := context.Background() - ts := oauth2.StaticTokenSource( - &oauth2.Token{AccessToken: "... your access token ..."}, - ) - tc := oauth2.NewClient(ctx, ts) - - client := github.NewClient(tc) - - // list all repositories for the authenticated user - repos, _, err := client.Repositories.List(ctx, "", nil) - } + client := github.NewClient(nil).WithAuthToken("... your access token ...") Note that when using an authenticated Client, all calls made by the client will include the specified OAuth token. Therefore, authenticated clients should almost never be shared between different users. -See the oauth2 docs for complete instructions on using that library. - For API methods that require HTTP Basic Authentication, use the BasicAuthTransport. diff --git a/vendor/github.com/google/go-github/v53/github/enterprise.go b/vendor/github.com/google/go-github/v55/github/enterprise.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/enterprise.go rename to vendor/github.com/google/go-github/v55/github/enterprise.go diff --git a/vendor/github.com/google/go-github/v53/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v55/github/enterprise_actions_runners.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/enterprise_actions_runners.go rename to vendor/github.com/google/go-github/v55/github/enterprise_actions_runners.go diff --git a/vendor/github.com/google/go-github/v53/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v55/github/enterprise_audit_log.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/enterprise_audit_log.go rename to vendor/github.com/google/go-github/v55/github/enterprise_audit_log.go diff --git a/vendor/github.com/google/go-github/v53/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v55/github/enterprise_code_security_and_analysis.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/enterprise_code_security_and_analysis.go rename to vendor/github.com/google/go-github/v55/github/enterprise_code_security_and_analysis.go diff --git a/vendor/github.com/google/go-github/v55/github/event.go b/vendor/github.com/google/go-github/v55/github/event.go new file mode 100644 index 000000000..e98606bce --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/event.go @@ -0,0 +1,54 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "encoding/json" +) + +// Event represents a GitHub event. +type Event struct { + Type *string `json:"type,omitempty"` + Public *bool `json:"public,omitempty"` + RawPayload *json.RawMessage `json:"payload,omitempty"` + Repo *Repository `json:"repo,omitempty"` + Actor *User `json:"actor,omitempty"` + Org *Organization `json:"org,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ID *string `json:"id,omitempty"` +} + +func (e Event) String() string { + return Stringify(e) +} + +// ParsePayload parses the event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +func (e *Event) ParsePayload() (interface{}, error) { + // It would be nice if e.Type were the snake_case name of the event, + // but the existing interface uses the struct name instead. + payload := EventForType(typeToMessageMapping[e.GetType()]) + + if err := json.Unmarshal(e.GetRawPayload(), &payload); err != nil { + return nil, err + } + + return payload, nil +} + +// Payload returns the parsed event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +// +// Deprecated: Use ParsePayload instead, which returns an error +// rather than panics if JSON unmarshaling raw payload fails. +func (e *Event) Payload() (payload interface{}) { + var err error + payload, err = e.ParsePayload() + if err != nil { + panic(err) + } + return payload +} diff --git a/vendor/github.com/google/go-github/v53/github/event_types.go b/vendor/github.com/google/go-github/v55/github/event_types.go similarity index 85% rename from vendor/github.com/google/go-github/v53/github/event_types.go rename to vendor/github.com/google/go-github/v55/github/event_types.go index 6a13b286b..1a403da9b 100644 --- a/vendor/github.com/google/go-github/v53/github/event_types.go +++ b/vendor/github.com/google/go-github/v55/github/event_types.go @@ -134,6 +134,25 @@ type DeleteEvent struct { Installation *Installation `json:"installation,omitempty"` } +// DependabotAlertEvent is triggered when there is activity relating to Dependabot alerts. +// The Webhook event name is "dependabot_alert". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert +type DependabotAlertEvent struct { + Action *string `json:"action,omitempty"` + Alert *DependabotAlert `json:"alert,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` +} + // DeployKeyEvent is triggered when a deploy key is added or removed from a repository. // The Webhook event name is "deploy_key". // @@ -165,8 +184,10 @@ type DeployKeyEvent struct { // // GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#deployment type DeploymentEvent struct { - Deployment *Deployment `json:"deployment,omitempty"` - Repo *Repository `json:"repository,omitempty"` + Deployment *Deployment `json:"deployment,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Workflow *Workflow `json:"workflow,omitempty"` + WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` // The following fields are only populated by Webhook events. Sender *User `json:"sender,omitempty"` @@ -505,6 +526,38 @@ type InstallationRepositoriesEvent struct { Installation *Installation `json:"installation,omitempty"` } +// InstallationLoginChange represents a change in login on an installation. +type InstallationLoginChange struct { + From *string `json:"from,omitempty"` +} + +// InstallationSlugChange represents a change in slug on an installation. +type InstallationSlugChange struct { + From *string `json:"from,omitempty"` +} + +// InstallationChanges represents a change in slug or login on an installation. +type InstallationChanges struct { + Login *InstallationLoginChange `json:"login,omitempty"` + Slug *InstallationSlugChange `json:"slug,omitempty"` +} + +// InstallationTargetEvent is triggered when there is activity on an installation from a user or organization account. +// The Webhook event name is "installation_target". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_target +type InstallationTargetEvent struct { + Account *User `json:"account,omitempty"` + Action *string `json:"action,omitempty"` + Changes *InstallationChanges `json:"changes,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + TargetType *string `json:"target_type,omitempty"` +} + // IssueCommentEvent is triggered when an issue comment is created on an issue // or pull request. // The Webhook event name is "issue_comment". @@ -774,6 +827,74 @@ type PageBuildEvent struct { Installation *Installation `json:"installation,omitempty"` } +// PersonalAccessTokenRequestEvent occurs when there is activity relating to a +// request for a fine-grained personal access token to access resources that +// belong to a resource owner that requires approval for token access. +// The webhook event name is "personal_access_token_request". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#personal_access_token_request +type PersonalAccessTokenRequestEvent struct { + // Action is the action that was performed. Possible values are: + // "approved", "cancelled", "created" or "denied" + Action *string `json:"action,omitempty"` + PersonalAccessTokenRequest *PersonalAccessTokenRequest `json:"personal_access_token_request,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// PersonalAccessTokenRequest contains the details of a PersonalAccessTokenRequestEvent. +type PersonalAccessTokenRequest struct { + // Unique identifier of the request for access via fine-grained personal + // access token. Used as the pat_request_id parameter in the list and review + // API calls. + ID *int64 `json:"id,omitempty"` + Owner *User `json:"owner,omitempty"` + + // New requested permissions, categorized by type of permission. + PermissionsAdded *PersonalAccessTokenPermissions `json:"permissions_added,omitempty"` + + // Requested permissions that elevate access for a previously approved + // request for access, categorized by type of permission. + PermissionsUpgraded *PersonalAccessTokenPermissions `json:"permissions_upgraded,omitempty"` + + // Permissions requested, categorized by type of permission. + // This field incorporates permissions_added and permissions_upgraded. + PermissionsResult *PersonalAccessTokenPermissions `json:"permissions_result,omitempty"` + + // Type of repository selection requested. Possible values are: + // "none", "all" or "subset" + RepositorySelection *string `json:"repository_selection,omitempty"` + + // The number of repositories the token is requesting access to. + // This field is only populated when repository_selection is subset. + RepositoryCount *int64 `json:"repository_count,omitempty"` + + // An array of repository objects the token is requesting access to. + // This field is only populated when repository_selection is subset. + Repositories []*Repository `json:"repositories,omitempty"` + + // Date and time when the request for access was created. + CreatedAt *Timestamp `json:"created_at,omitempty"` + + // Whether the associated fine-grained personal access token has expired. + TokenExpired *bool `json:"token_expired,omitempty"` + + // Date and time when the associated fine-grained personal access token expires. + TokenExpiresAt *Timestamp `json:"token_expires_at,omitempty"` + + // Date and time when the associated fine-grained personal access token was last used for authentication. + TokenLastUsedAt *Timestamp `json:"token_last_used_at,omitempty"` +} + +// PersonalAccessTokenPermissions represents the original or newly requested +// scope of permissions for a fine-grained personal access token within a PersonalAccessTokenRequest. +type PersonalAccessTokenPermissions struct { + Org map[string]string `json:"organization,omitempty"` + Repo map[string]string `json:"repository,omitempty"` + Other map[string]string `json:"other,omitempty"` +} + // PingEvent is triggered when a Webhook is added to GitHub. // // GitHub API docs: https://developer.github.com/webhooks/#ping-event @@ -842,6 +963,77 @@ type ProjectColumnEvent struct { Installation *Installation `json:"installation,omitempty"` } +// ProjectV2Event is triggered when there is activity relating to an organization-level project. +// The Webhook event name is "projects_v2". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2 +type ProjectV2Event struct { + Action *string `json:"action,omitempty"` + ProjectsV2 *ProjectsV2 `json:"projects_v2,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// ProjectsV2 represents a projects v2 project. +type ProjectsV2 struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Creator *User `json:"creator,omitempty"` + Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + Public *bool `json:"public,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + DeletedAt *Timestamp `json:"deleted_at,omitempty"` + Number *int `json:"number,omitempty"` + ShortDescription *string `json:"short_description,omitempty"` + DeletedBy *User `json:"deleted_by,omitempty"` +} + +// ProjectV2ItemEvent is triggered when there is activity relating to an item on an organization-level project. +// The Webhook event name is "projects_v2_item". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item +type ProjectV2ItemEvent struct { + Action *string `json:"action,omitempty"` + Changes *ProjectV2ItemChange `json:"changes,omitempty"` + ProjectV2Item *ProjectV2Item `json:"projects_v2_item,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// ProjectV2ItemChange represents a project v2 item change. +type ProjectV2ItemChange struct { + ArchivedAt *ArchivedAt `json:"archived_at,omitempty"` +} + +// ArchivedAt represents an archiving date change. +type ArchivedAt struct { + From *Timestamp `json:"from,omitempty"` + To *Timestamp `json:"to,omitempty"` +} + +// ProjectsV2 represents an item belonging to a project. +type ProjectV2Item struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + ProjectNodeID *string `json:"project_node_id,omitempty"` + ContentNodeID *string `json:"content_node_id,omitempty"` + ContentType *string `json:"content_type,omitempty"` + Creator *User `json:"creator,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ArchivedAt *Timestamp `json:"archived_at,omitempty"` +} + // PublicEvent is triggered when a private repository is open sourced. // According to GitHub: "Without a doubt: the best GitHub event." // The Webhook event name is "public". @@ -1225,6 +1417,31 @@ type SecretScanningAlertEvent struct { Installation *Installation `json:"installation,omitempty"` } +// SecurityAndAnalysisEvent is triggered when code security and analysis features +// are enabled or disabled for a repository. +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#security_and_analysis +type SecurityAndAnalysisEvent struct { + Changes *SecurityAndAnalysisChange `json:"changes,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// SecurityAndAnalysisChange represents the changes when security and analysis +// features are enabled or disabled for a repository. +type SecurityAndAnalysisChange struct { + From *SecurityAndAnalysisChangeFrom `json:"from,omitempty"` +} + +// SecurityAndAnalysisChangeFrom represents which change was made when security +// and analysis features are enabled or disabled for a repository. +type SecurityAndAnalysisChangeFrom struct { + SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` +} + // StarEvent is triggered when a star is added or removed from a repository. // The Webhook event name is "star". // @@ -1393,6 +1610,8 @@ type WorkflowRunEvent struct { // // GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory type SecurityAdvisory struct { + CVSS *AdvisoryCVSS `json:"cvss,omitempty"` + CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` GHSAID *string `json:"ghsa_id,omitempty"` Summary *string `json:"summary,omitempty"` Description *string `json:"description,omitempty"` @@ -1441,6 +1660,13 @@ type FirstPatchedVersion struct { type SecurityAdvisoryEvent struct { Action *string `json:"action,omitempty"` SecurityAdvisory *SecurityAdvisory `json:"security_advisory,omitempty"` + + // The following fields are only populated by Webhook events. + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` } // CodeScanningAlertEvent is triggered when a code scanning finds a potential vulnerability or error in your code. diff --git a/vendor/github.com/google/go-github/v53/github/gists.go b/vendor/github.com/google/go-github/v55/github/gists.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/gists.go rename to vendor/github.com/google/go-github/v55/github/gists.go diff --git a/vendor/github.com/google/go-github/v53/github/gists_comments.go b/vendor/github.com/google/go-github/v55/github/gists_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/gists_comments.go rename to vendor/github.com/google/go-github/v55/github/gists_comments.go diff --git a/vendor/github.com/google/go-github/v53/github/git.go b/vendor/github.com/google/go-github/v55/github/git.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git.go rename to vendor/github.com/google/go-github/v55/github/git.go diff --git a/vendor/github.com/google/go-github/v53/github/git_blobs.go b/vendor/github.com/google/go-github/v55/github/git_blobs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git_blobs.go rename to vendor/github.com/google/go-github/v55/github/git_blobs.go diff --git a/vendor/github.com/google/go-github/v53/github/git_commits.go b/vendor/github.com/google/go-github/v55/github/git_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git_commits.go rename to vendor/github.com/google/go-github/v55/github/git_commits.go diff --git a/vendor/github.com/google/go-github/v53/github/git_refs.go b/vendor/github.com/google/go-github/v55/github/git_refs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git_refs.go rename to vendor/github.com/google/go-github/v55/github/git_refs.go diff --git a/vendor/github.com/google/go-github/v53/github/git_tags.go b/vendor/github.com/google/go-github/v55/github/git_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git_tags.go rename to vendor/github.com/google/go-github/v55/github/git_tags.go diff --git a/vendor/github.com/google/go-github/v53/github/git_trees.go b/vendor/github.com/google/go-github/v55/github/git_trees.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/git_trees.go rename to vendor/github.com/google/go-github/v55/github/git_trees.go diff --git a/vendor/github.com/google/go-github/v53/github/github-accessors.go b/vendor/github.com/google/go-github/v55/github/github-accessors.go similarity index 94% rename from vendor/github.com/google/go-github/v53/github/github-accessors.go rename to vendor/github.com/google/go-github/v55/github/github-accessors.go index 8acb72b04..560e0892e 100644 --- a/vendor/github.com/google/go-github/v53/github/github-accessors.go +++ b/vendor/github.com/google/go-github/v55/github/github-accessors.go @@ -343,7 +343,7 @@ func (a *AdvancedSecurityCommittersBreakdown) GetUserLogin() string { } // GetScore returns the Score field. -func (a *AdvisoryCVSs) GetScore() *float64 { +func (a *AdvisoryCVSS) GetScore() *float64 { if a == nil { return nil } @@ -351,7 +351,7 @@ func (a *AdvisoryCVSs) GetScore() *float64 { } // GetVectorString returns the VectorString field if it's non-nil, zero value otherwise. -func (a *AdvisoryCVSs) GetVectorString() string { +func (a *AdvisoryCVSS) GetVectorString() string { if a == nil || a.VectorString == nil { return "" } @@ -854,6 +854,22 @@ func (a *AppConfig) GetWebhookSecret() string { return *a.WebhookSecret } +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *ArchivedAt) GetFrom() Timestamp { + if a == nil || a.From == nil { + return Timestamp{} + } + return *a.From +} + +// GetTo returns the To field if it's non-nil, zero value otherwise. +func (a *ArchivedAt) GetTo() Timestamp { + if a == nil || a.To == nil { + return Timestamp{} + } + return *a.To +} + // GetArchiveDownloadURL returns the ArchiveDownloadURL field if it's non-nil, zero value otherwise. func (a *Artifact) GetArchiveDownloadURL() string { if a == nil || a.ArchiveDownloadURL == nil { @@ -1134,6 +1150,14 @@ func (a *AuditEntry) GetCreatedAt() Timestamp { return *a.CreatedAt } +// GetData returns the Data field. +func (a *AuditEntry) GetData() *AuditEntryData { + if a == nil { + return nil + } + return a.Data +} + // GetDeployKeyFingerprint returns the DeployKeyFingerprint field if it's non-nil, zero value otherwise. func (a *AuditEntry) GetDeployKeyFingerprint() string { if a == nil || a.DeployKeyFingerprint == nil { @@ -1590,6 +1614,22 @@ func (a *AuditEntry) GetWorkflowRunID() int64 { return *a.WorkflowRunID } +// GetOldLogin returns the OldLogin field if it's non-nil, zero value otherwise. +func (a *AuditEntryData) GetOldLogin() string { + if a == nil || a.OldLogin == nil { + return "" + } + return *a.OldLogin +} + +// GetOldName returns the OldName field if it's non-nil, zero value otherwise. +func (a *AuditEntryData) GetOldName() string { + if a == nil || a.OldName == nil { + return "" + } + return *a.OldName +} + // GetApp returns the App field. func (a *Authorization) GetApp() *AuthorizationApp { if a == nil { @@ -1846,6 +1886,22 @@ func (a *AutolinkOptions) GetURLTemplate() string { return *a.URLTemplate } +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (a *AutomatedSecurityFixes) GetEnabled() bool { + if a == nil || a.Enabled == nil { + return false + } + return *a.Enabled +} + +// GetPaused returns the Paused field if it's non-nil, zero value otherwise. +func (a *AutomatedSecurityFixes) GetPaused() bool { + if a == nil || a.Paused == nil { + return false + } + return *a.Paused +} + // GetAppID returns the AppID field if it's non-nil, zero value otherwise. func (a *AutoTriggerCheck) GetAppID() int64 { if a == nil || a.AppID == nil { @@ -2238,6 +2294,14 @@ func (b *BypassActor) GetActorType() string { return *b.ActorType } +// GetBypassMode returns the BypassMode field if it's non-nil, zero value otherwise. +func (b *BypassActor) GetBypassMode() string { + if b == nil || b.BypassMode == nil { + return "" + } + return *b.BypassMode +} + // GetApp returns the App field. func (c *CheckRun) GetApp() *App { if c == nil { @@ -2766,6 +2830,78 @@ func (c *CodeownersError) GetSuggestion() string { return *c.Suggestion } +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetContentType() string { + if c == nil || c.ContentType == nil { + return "" + } + return *c.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetLanguage() string { + if c == nil || c.Language == nil { + return "" + } + return *c.Language +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetSize() int64 { + if c == nil || c.Size == nil { + return 0 + } + return *c.Size +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetUploader returns the Uploader field. +func (c *CodeQLDatabase) GetUploader() *User { + if c == nil { + return nil + } + return c.Uploader +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + // GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. func (c *CodeResult) GetHTMLURL() string { if c == nil || c.HTMLURL == nil { @@ -4662,6 +4798,102 @@ func (c *CreateUserProjectOptions) GetBody() string { return *c.Body } +// GetCreated returns the Created field if it's non-nil, zero value otherwise. +func (c *CreationInfo) GetCreated() Timestamp { + if c == nil || c.Created == nil { + return Timestamp{} + } + return *c.Created +} + +// GetAuthorizedCredentialExpiresAt returns the AuthorizedCredentialExpiresAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialExpiresAt() Timestamp { + if c == nil || c.AuthorizedCredentialExpiresAt == nil { + return Timestamp{} + } + return *c.AuthorizedCredentialExpiresAt +} + +// GetAuthorizedCredentialID returns the AuthorizedCredentialID field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialID() int64 { + if c == nil || c.AuthorizedCredentialID == nil { + return 0 + } + return *c.AuthorizedCredentialID +} + +// GetAuthorizedCredentialNote returns the AuthorizedCredentialNote field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialNote() string { + if c == nil || c.AuthorizedCredentialNote == nil { + return "" + } + return *c.AuthorizedCredentialNote +} + +// GetAuthorizedCredentialTitle returns the AuthorizedCredentialTitle field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialTitle() string { + if c == nil || c.AuthorizedCredentialTitle == nil { + return "" + } + return *c.AuthorizedCredentialTitle +} + +// GetCredentialAccessedAt returns the CredentialAccessedAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialAccessedAt() Timestamp { + if c == nil || c.CredentialAccessedAt == nil { + return Timestamp{} + } + return *c.CredentialAccessedAt +} + +// GetCredentialAuthorizedAt returns the CredentialAuthorizedAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialAuthorizedAt() Timestamp { + if c == nil || c.CredentialAuthorizedAt == nil { + return Timestamp{} + } + return *c.CredentialAuthorizedAt +} + +// GetCredentialID returns the CredentialID field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialID() int64 { + if c == nil || c.CredentialID == nil { + return 0 + } + return *c.CredentialID +} + +// GetCredentialType returns the CredentialType field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialType() string { + if c == nil || c.CredentialType == nil { + return "" + } + return *c.CredentialType +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetFingerprint() string { + if c == nil || c.Fingerprint == nil { + return "" + } + return *c.Fingerprint +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetLogin() string { + if c == nil || c.Login == nil { + return "" + } + return *c.Login +} + +// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetTokenLastEight() string { + if c == nil || c.TokenLastEight == nil { + return "" + } + return *c.TokenLastEight +} + // GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. func (c *CustomRepoRoles) GetBaseRole() string { if c == nil || c.BaseRole == nil { @@ -4718,6 +4950,22 @@ func (d *DefaultSetupConfiguration) GetUpdatedAt() Timestamp { return *d.UpdatedAt } +// GetConfirmDeleteURL returns the ConfirmDeleteURL field if it's non-nil, zero value otherwise. +func (d *DeleteAnalysis) GetConfirmDeleteURL() string { + if d == nil || d.ConfirmDeleteURL == nil { + return "" + } + return *d.ConfirmDeleteURL +} + +// GetNextAnalysisURL returns the NextAnalysisURL field if it's non-nil, zero value otherwise. +func (d *DeleteAnalysis) GetNextAnalysisURL() string { + if d == nil || d.NextAnalysisURL == nil { + return "" + } + return *d.NextAnalysisURL +} + // GetInstallation returns the Installation field. func (d *DeleteEvent) GetInstallation() *Installation { if d == nil { @@ -4766,6 +5014,14 @@ func (d *DeleteEvent) GetSender() *User { return d.Sender } +// GetAutoDismissedAt returns the AutoDismissedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetAutoDismissedAt() Timestamp { + if d == nil || d.AutoDismissedAt == nil { + return Timestamp{} + } + return *d.AutoDismissedAt +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (d *DependabotAlert) GetCreatedAt() Timestamp { if d == nil || d.CreatedAt == nil { @@ -4886,6 +5142,62 @@ func (d *DependabotAlert) GetURL() string { return *d.URL } +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DependabotAlertEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetAlert returns the Alert field. +func (d *DependabotAlertEvent) GetAlert() *DependabotAlert { + if d == nil { + return nil + } + return d.Alert +} + +// GetEnterprise returns the Enterprise field. +func (d *DependabotAlertEvent) GetEnterprise() *Enterprise { + if d == nil { + return nil + } + return d.Enterprise +} + +// GetInstallation returns the Installation field. +func (d *DependabotAlertEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrganization returns the Organization field. +func (d *DependabotAlertEvent) GetOrganization() *Organization { + if d == nil { + return nil + } + return d.Organization +} + +// GetRepo returns the Repo field. +func (d *DependabotAlertEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DependabotAlertEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + // GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. func (d *DependabotSecurityAdvisory) GetCVEID() string { if d == nil || d.CVEID == nil { @@ -4894,12 +5206,12 @@ func (d *DependabotSecurityAdvisory) GetCVEID() string { return *d.CVEID } -// GetCVSs returns the CVSs field. -func (d *DependabotSecurityAdvisory) GetCVSs() *AdvisoryCVSs { +// GetCVSS returns the CVSS field. +func (d *DependabotSecurityAdvisory) GetCVSS() *AdvisoryCVSS { if d == nil { return nil } - return d.CVSs + return d.CVSS } // GetDescription returns the Description field if it's non-nil, zero value otherwise. @@ -4958,6 +5270,14 @@ func (d *DependabotSecurityAdvisory) GetWithdrawnAt() Timestamp { return *d.WithdrawnAt } +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityUpdates) GetStatus() string { + if d == nil || d.Status == nil { + return "" + } + return *d.Status +} + // GetManifestPath returns the ManifestPath field if it's non-nil, zero value otherwise. func (d *Dependency) GetManifestPath() string { if d == nil || d.ManifestPath == nil { @@ -5206,6 +5526,22 @@ func (d *DeploymentEvent) GetSender() *User { return d.Sender } +// GetWorkflow returns the Workflow field. +func (d *DeploymentEvent) GetWorkflow() *Workflow { + if d == nil { + return nil + } + return d.Workflow +} + +// GetWorkflowRun returns the WorkflowRun field. +func (d *DeploymentEvent) GetWorkflowRun() *WorkflowRun { + if d == nil { + return nil + } + return d.WorkflowRun +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (d *DeploymentProtectionRuleEvent) GetAction() string { if d == nil || d.Action == nil { @@ -8030,6 +8366,22 @@ func (i *Installation) GetUpdatedAt() Timestamp { return *i.UpdatedAt } +// GetLogin returns the Login field. +func (i *InstallationChanges) GetLogin() *InstallationLoginChange { + if i == nil { + return nil + } + return i.Login +} + +// GetSlug returns the Slug field. +func (i *InstallationChanges) GetSlug() *InstallationSlugChange { + if i == nil { + return nil + } + return i.Slug +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (i *InstallationEvent) GetAction() string { if i == nil || i.Action == nil { @@ -8062,6 +8414,14 @@ func (i *InstallationEvent) GetSender() *User { return i.Sender } +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (i *InstallationLoginChange) GetFrom() string { + if i == nil || i.From == nil { + return "" + } + return *i.From +} + // GetActions returns the Actions field if it's non-nil, zero value otherwise. func (i *InstallationPermissions) GetActions() string { if i == nil || i.Actions == nil { @@ -8390,24 +8750,104 @@ func (i *InstallationRepositoriesEvent) GetSender() *User { return i.Sender } -// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetExpiresAt() Timestamp { - if i == nil || i.ExpiresAt == nil { - return Timestamp{} +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (i *InstallationSlugChange) GetFrom() string { + if i == nil || i.From == nil { + return "" } - return *i.ExpiresAt + return *i.From } -// GetPermissions returns the Permissions field. -func (i *InstallationToken) GetPermissions() *InstallationPermissions { +// GetAccount returns the Account field. +func (i *InstallationTargetEvent) GetAccount() *User { if i == nil { return nil } - return i.Permissions + return i.Account } -// GetToken returns the Token field if it's non-nil, zero value otherwise. -func (i *InstallationToken) GetToken() string { +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationTargetEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetChanges returns the Changes field. +func (i *InstallationTargetEvent) GetChanges() *InstallationChanges { + if i == nil { + return nil + } + return i.Changes +} + +// GetEnterprise returns the Enterprise field. +func (i *InstallationTargetEvent) GetEnterprise() *Enterprise { + if i == nil { + return nil + } + return i.Enterprise +} + +// GetInstallation returns the Installation field. +func (i *InstallationTargetEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetOrganization returns the Organization field. +func (i *InstallationTargetEvent) GetOrganization() *Organization { + if i == nil { + return nil + } + return i.Organization +} + +// GetRepository returns the Repository field. +func (i *InstallationTargetEvent) GetRepository() *Repository { + if i == nil { + return nil + } + return i.Repository +} + +// GetSender returns the Sender field. +func (i *InstallationTargetEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. +func (i *InstallationTargetEvent) GetTargetType() string { + if i == nil || i.TargetType == nil { + return "" + } + return *i.TargetType +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetExpiresAt() Timestamp { + if i == nil || i.ExpiresAt == nil { + return Timestamp{} + } + return *i.ExpiresAt +} + +// GetPermissions returns the Permissions field. +func (i *InstallationToken) GetPermissions() *InstallationPermissions { + if i == nil { + return nil + } + return i.Permissions +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetToken() string { if i == nil || i.Token == nil { return "" } @@ -9406,6 +9846,14 @@ func (j *JITRunnerConfig) GetEncodedJITConfig() string { return *j.EncodedJITConfig } +// GetRunner returns the Runner field. +func (j *JITRunnerConfig) GetRunner() *Runner { + if j == nil { + return nil + } + return j.Runner +} + // GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. func (j *Jobs) GetTotalCount() int { if j == nil || j.TotalCount == nil { @@ -11022,6 +11470,14 @@ func (m *MostRecentInstance) GetAnalysisKey() string { return *m.AnalysisKey } +// GetCategory returns the Category field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetCategory() string { + if m == nil || m.Category == nil { + return "" + } + return *m.Category +} + // GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. func (m *MostRecentInstance) GetCommitSHA() string { if m == nil || m.CommitSHA == nil { @@ -11038,6 +11494,14 @@ func (m *MostRecentInstance) GetEnvironment() string { return *m.Environment } +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { + return "" + } + return *m.HTMLURL +} + // GetLocation returns the Location field. func (m *MostRecentInstance) GetLocation() *Location { if m == nil { @@ -11102,6 +11566,14 @@ func (n *NewPullRequest) GetHead() string { return *n.Head } +// GetHeadRepo returns the HeadRepo field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetHeadRepo() string { + if n == nil || n.HeadRepo == nil { + return "" + } + return *n.HeadRepo +} + // GetIssue returns the Issue field if it's non-nil, zero value otherwise. func (n *NewPullRequest) GetIssue() int { if n == nil || n.Issue == nil { @@ -13126,6 +13598,158 @@ func (p *PagesUpdate) GetSource() *PagesSource { return p.Source } +// GetOrg returns the Org map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetOrg() map[string]string { + if p == nil || p.Org == nil { + return map[string]string{} + } + return p.Org +} + +// GetOther returns the Other map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetOther() map[string]string { + if p == nil || p.Other == nil { + return map[string]string{} + } + return p.Other +} + +// GetRepo returns the Repo map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetRepo() map[string]string { + if p == nil || p.Repo == nil { + return map[string]string{} + } + return p.Repo +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetOwner returns the Owner field. +func (p *PersonalAccessTokenRequest) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPermissionsAdded returns the PermissionsAdded field. +func (p *PersonalAccessTokenRequest) GetPermissionsAdded() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsAdded +} + +// GetPermissionsResult returns the PermissionsResult field. +func (p *PersonalAccessTokenRequest) GetPermissionsResult() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsResult +} + +// GetPermissionsUpgraded returns the PermissionsUpgraded field. +func (p *PersonalAccessTokenRequest) GetPermissionsUpgraded() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsUpgraded +} + +// GetRepositoryCount returns the RepositoryCount field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetRepositoryCount() int64 { + if p == nil || p.RepositoryCount == nil { + return 0 + } + return *p.RepositoryCount +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetRepositorySelection() string { + if p == nil || p.RepositorySelection == nil { + return "" + } + return *p.RepositorySelection +} + +// GetTokenExpired returns the TokenExpired field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenExpired() bool { + if p == nil || p.TokenExpired == nil { + return false + } + return *p.TokenExpired +} + +// GetTokenExpiresAt returns the TokenExpiresAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenExpiresAt() Timestamp { + if p == nil || p.TokenExpiresAt == nil { + return Timestamp{} + } + return *p.TokenExpiresAt +} + +// GetTokenLastUsedAt returns the TokenLastUsedAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenLastUsedAt() Timestamp { + if p == nil || p.TokenLastUsedAt == nil { + return Timestamp{} + } + return *p.TokenLastUsedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequestEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *PersonalAccessTokenRequestEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PersonalAccessTokenRequestEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetPersonalAccessTokenRequest returns the PersonalAccessTokenRequest field. +func (p *PersonalAccessTokenRequestEvent) GetPersonalAccessTokenRequest() *PersonalAccessTokenRequest { + if p == nil { + return nil + } + return p.PersonalAccessTokenRequest +} + +// GetSender returns the Sender field. +func (p *PersonalAccessTokenRequestEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + // GetHook returns the Hook field. func (p *PingEvent) GetHook() *Hook { if p == nil { @@ -13947,39 +14571,319 @@ func (p *ProjectOptions) GetOrganizationPermission() string { if p == nil || p.OrganizationPermission == nil { return "" } - return *p.OrganizationPermission + return *p.OrganizationPermission +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetPrivate() bool { + if p == nil || p.Private == nil { + return false + } + return *p.Private +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (p *ProjectPermissionLevel) GetPermission() string { + if p == nil || p.Permission == nil { + return "" + } + return *p.Permission +} + +// GetUser returns the User field. +func (p *ProjectPermissionLevel) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetClosedAt() Timestamp { + if p == nil || p.ClosedAt == nil { + return Timestamp{} + } + return *p.ClosedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *ProjectsV2) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetDeletedAt returns the DeletedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetDeletedAt() Timestamp { + if p == nil || p.DeletedAt == nil { + return Timestamp{} + } + return *p.DeletedAt +} + +// GetDeletedBy returns the DeletedBy field. +func (p *ProjectsV2) GetDeletedBy() *User { + if p == nil { + return nil + } + return p.DeletedBy +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetDescription() string { + if p == nil || p.Description == nil { + return "" + } + return *p.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetOwner returns the Owner field. +func (p *ProjectsV2) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetPublic() bool { + if p == nil || p.Public == nil { + return false + } + return *p.Public +} + +// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetShortDescription() string { + if p == nil || p.ShortDescription == nil { + return "" + } + return *p.ShortDescription +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetTitle() string { + if p == nil || p.Title == nil { + return "" + } + return *p.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectV2Event) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *ProjectV2Event) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectV2Event) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProjectsV2 returns the ProjectsV2 field. +func (p *ProjectV2Event) GetProjectsV2() *ProjectsV2 { + if p == nil { + return nil + } + return p.ProjectsV2 +} + +// GetSender returns the Sender field. +func (p *ProjectV2Event) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetArchivedAt returns the ArchivedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetArchivedAt() Timestamp { + if p == nil || p.ArchivedAt == nil { + return Timestamp{} + } + return *p.ArchivedAt +} + +// GetContentNodeID returns the ContentNodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetContentNodeID() string { + if p == nil || p.ContentNodeID == nil { + return "" + } + return *p.ContentNodeID +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetContentType() string { + if p == nil || p.ContentType == nil { + return "" + } + return *p.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *ProjectV2Item) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetProjectNodeID returns the ProjectNodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetProjectNodeID() string { + if p == nil || p.ProjectNodeID == nil { + return "" + } + return *p.ProjectNodeID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetArchivedAt returns the ArchivedAt field. +func (p *ProjectV2ItemChange) GetArchivedAt() *ArchivedAt { + if p == nil { + return nil + } + return p.ArchivedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectV2ItemEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetChanges returns the Changes field. +func (p *ProjectV2ItemEvent) GetChanges() *ProjectV2ItemChange { + if p == nil { + return nil + } + return p.Changes } -// GetPrivate returns the Private field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetPrivate() bool { - if p == nil || p.Private == nil { - return false +// GetInstallation returns the Installation field. +func (p *ProjectV2ItemEvent) GetInstallation() *Installation { + if p == nil { + return nil } - return *p.Private + return p.Installation } -// GetState returns the State field if it's non-nil, zero value otherwise. -func (p *ProjectOptions) GetState() string { - if p == nil || p.State == nil { - return "" +// GetOrg returns the Org field. +func (p *ProjectV2ItemEvent) GetOrg() *Organization { + if p == nil { + return nil } - return *p.State + return p.Org } -// GetPermission returns the Permission field if it's non-nil, zero value otherwise. -func (p *ProjectPermissionLevel) GetPermission() string { - if p == nil || p.Permission == nil { - return "" +// GetProjectV2Item returns the ProjectV2Item field. +func (p *ProjectV2ItemEvent) GetProjectV2Item() *ProjectV2Item { + if p == nil { + return nil } - return *p.Permission + return p.ProjectV2Item } -// GetUser returns the User field. -func (p *ProjectPermissionLevel) GetUser() *User { +// GetSender returns the Sender field. +func (p *ProjectV2ItemEvent) GetSender() *User { if p == nil { return nil } - return p.User + return p.Sender } // GetAllowDeletions returns the AllowDeletions field. @@ -14982,6 +15886,14 @@ func (p *PullRequestComment) GetStartSide() string { return *p.StartSide } +// GetSubjectType returns the SubjectType field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetSubjectType() string { + if p == nil || p.SubjectType == nil { + return "" + } + return *p.SubjectType +} + // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. func (p *PullRequestComment) GetUpdatedAt() Timestamp { if p == nil || p.UpdatedAt == nil { @@ -16606,6 +17518,62 @@ func (r *RenameOrgResponse) GetURL() string { return *r.URL } +// GetDownloadLocation returns the DownloadLocation field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetDownloadLocation() string { + if r == nil || r.DownloadLocation == nil { + return "" + } + return *r.DownloadLocation +} + +// GetFilesAnalyzed returns the FilesAnalyzed field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetFilesAnalyzed() bool { + if r == nil || r.FilesAnalyzed == nil { + return false + } + return *r.FilesAnalyzed +} + +// GetLicenseConcluded returns the LicenseConcluded field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetLicenseConcluded() string { + if r == nil || r.LicenseConcluded == nil { + return "" + } + return *r.LicenseConcluded +} + +// GetLicenseDeclared returns the LicenseDeclared field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetLicenseDeclared() string { + if r == nil || r.LicenseDeclared == nil { + return "" + } + return *r.LicenseDeclared +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetSPDXID() string { + if r == nil || r.SPDXID == nil { + return "" + } + return *r.SPDXID +} + +// GetVersionInfo returns the VersionInfo field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetVersionInfo() string { + if r == nil || r.VersionInfo == nil { + return "" + } + return *r.VersionInfo +} + // GetBranch returns the Branch field if it's non-nil, zero value otherwise. func (r *RepoMergeUpstreamRequest) GetBranch() string { if r == nil || r.Branch == nil { @@ -17838,6 +18806,14 @@ func (r *RepositoryContent) GetSize() int { return *r.Size } +// GetSubmoduleGitURL returns the SubmoduleGitURL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetSubmoduleGitURL() string { + if r == nil || r.SubmoduleGitURL == nil { + return "" + } + return *r.SubmoduleGitURL +} + // GetTarget returns the Target field if it's non-nil, zero value otherwise. func (r *RepositoryContent) GetTarget() string { if r == nil || r.Target == nil { @@ -18798,6 +19774,14 @@ func (r *ReviewersRequest) GetNodeID() string { return *r.NodeID } +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (r *ReviewPersonalAccessTokenRequestOptions) GetReason() string { + if r == nil || r.Reason == nil { + return "" + } + return *r.Reason +} + // GetDescription returns the Description field if it's non-nil, zero value otherwise. func (r *Rule) GetDescription() string { if r == nil || r.Description == nil { @@ -18878,14 +19862,6 @@ func (r *RuleRequiredStatusChecks) GetIntegrationID() int64 { return *r.IntegrationID } -// GetBypassMode returns the BypassMode field if it's non-nil, zero value otherwise. -func (r *Ruleset) GetBypassMode() string { - if r == nil || r.BypassMode == nil { - return "" - } - return *r.BypassMode -} - // GetConditions returns the Conditions field. func (r *Ruleset) GetConditions() *RulesetConditions { if r == nil { @@ -18894,6 +19870,14 @@ func (r *Ruleset) GetConditions() *RulesetConditions { return r.Conditions } +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Ruleset) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + // GetLinks returns the Links field. func (r *Ruleset) GetLinks() *RulesetLinks { if r == nil { @@ -18934,8 +19918,16 @@ func (r *RulesetConditions) GetRefName() *RulesetRefConditionParameters { return r.RefName } +// GetRepositoryID returns the RepositoryID field. +func (r *RulesetConditions) GetRepositoryID() *RulesetRepositoryIDsConditionParameters { + if r == nil { + return nil + } + return r.RepositoryID +} + // GetRepositoryName returns the RepositoryName field. -func (r *RulesetConditions) GetRepositoryName() *RulesetRepositoryConditionParameters { +func (r *RulesetConditions) GetRepositoryName() *RulesetRepositoryNamesConditionParameters { if r == nil { return nil } @@ -18959,7 +19951,7 @@ func (r *RulesetLinks) GetSelf() *RulesetLink { } // GetProtected returns the Protected field if it's non-nil, zero value otherwise. -func (r *RulesetRepositoryConditionParameters) GetProtected() bool { +func (r *RulesetRepositoryNamesConditionParameters) GetProtected() bool { if r == nil || r.Protected == nil { return false } @@ -19222,6 +20214,78 @@ func (s *SarifID) GetURL() string { return *s.URL } +// GetAnalysesURL returns the AnalysesURL field if it's non-nil, zero value otherwise. +func (s *SARIFUpload) GetAnalysesURL() string { + if s == nil || s.AnalysesURL == nil { + return "" + } + return *s.AnalysesURL +} + +// GetProcessingStatus returns the ProcessingStatus field if it's non-nil, zero value otherwise. +func (s *SARIFUpload) GetProcessingStatus() string { + if s == nil || s.ProcessingStatus == nil { + return "" + } + return *s.ProcessingStatus +} + +// GetSBOM returns the SBOM field. +func (s *SBOM) GetSBOM() *SBOMInfo { + if s == nil { + return nil + } + return s.SBOM +} + +// GetCreationInfo returns the CreationInfo field. +func (s *SBOMInfo) GetCreationInfo() *CreationInfo { + if s == nil { + return nil + } + return s.CreationInfo +} + +// GetDataLicense returns the DataLicense field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetDataLicense() string { + if s == nil || s.DataLicense == nil { + return "" + } + return *s.DataLicense +} + +// GetDocumentNamespace returns the DocumentNamespace field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetDocumentNamespace() string { + if s == nil || s.DocumentNamespace == nil { + return "" + } + return *s.DocumentNamespace +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetName() string { + if s == nil || s.Name == nil { + return "" + } + return *s.Name +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetSPDXID() string { + if s == nil || s.SPDXID == nil { + return "" + } + return *s.SPDXID +} + +// GetSPDXVersion returns the SPDXVersion field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetSPDXVersion() string { + if s == nil || s.SPDXVersion == nil { + return "" + } + return *s.SPDXVersion +} + // GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. func (s *ScanningAnalysis) GetAnalysisKey() string { if s == nil || s.AnalysisKey == nil { @@ -19502,6 +20566,14 @@ func (s *SecretScanningAlert) GetNumber() int { return *s.Number } +// GetRepository returns the Repository field. +func (s *SecretScanningAlert) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + // GetResolution returns the Resolution field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetResolution() string { if s == nil || s.Resolution == nil { @@ -19542,6 +20614,14 @@ func (s *SecretScanningAlert) GetSecretType() string { return *s.SecretType } +// GetSecretTypeDisplayName returns the SecretTypeDisplayName field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetSecretTypeDisplayName() string { + if s == nil || s.SecretTypeDisplayName == nil { + return "" + } + return *s.SecretTypeDisplayName +} + // GetState returns the State field if it's non-nil, zero value otherwise. func (s *SecretScanningAlert) GetState() string { if s == nil || s.State == nil { @@ -19734,6 +20814,14 @@ func (s *SecretScanningPushProtection) GetStatus() string { return *s.Status } +// GetCVSS returns the CVSS field. +func (s *SecurityAdvisory) GetCVSS() *AdvisoryCVSS { + if s == nil { + return nil + } + return s.CVSS +} + // GetDescription returns the Description field if it's non-nil, zero value otherwise. func (s *SecurityAdvisory) GetDescription() string { if s == nil || s.Description == nil { @@ -19798,6 +20886,38 @@ func (s *SecurityAdvisoryEvent) GetAction() string { return *s.Action } +// GetEnterprise returns the Enterprise field. +func (s *SecurityAdvisoryEvent) GetEnterprise() *Enterprise { + if s == nil { + return nil + } + return s.Enterprise +} + +// GetInstallation returns the Installation field. +func (s *SecurityAdvisoryEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrganization returns the Organization field. +func (s *SecurityAdvisoryEvent) GetOrganization() *Organization { + if s == nil { + return nil + } + return s.Organization +} + +// GetRepository returns the Repository field. +func (s *SecurityAdvisoryEvent) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + // GetSecurityAdvisory returns the SecurityAdvisory field. func (s *SecurityAdvisoryEvent) GetSecurityAdvisory() *SecurityAdvisory { if s == nil { @@ -19806,6 +20926,14 @@ func (s *SecurityAdvisoryEvent) GetSecurityAdvisory() *SecurityAdvisory { return s.SecurityAdvisory } +// GetSender returns the Sender field. +func (s *SecurityAdvisoryEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + // GetAdvancedSecurity returns the AdvancedSecurity field. func (s *SecurityAndAnalysis) GetAdvancedSecurity() *AdvancedSecurity { if s == nil { @@ -19814,6 +20942,14 @@ func (s *SecurityAndAnalysis) GetAdvancedSecurity() *AdvancedSecurity { return s.AdvancedSecurity } +// GetDependabotSecurityUpdates returns the DependabotSecurityUpdates field. +func (s *SecurityAndAnalysis) GetDependabotSecurityUpdates() *DependabotSecurityUpdates { + if s == nil { + return nil + } + return s.DependabotSecurityUpdates +} + // GetSecretScanning returns the SecretScanning field. func (s *SecurityAndAnalysis) GetSecretScanning() *SecretScanning { if s == nil { @@ -19830,6 +20966,70 @@ func (s *SecurityAndAnalysis) GetSecretScanningPushProtection() *SecretScanningP return s.SecretScanningPushProtection } +// GetFrom returns the From field. +func (s *SecurityAndAnalysisChange) GetFrom() *SecurityAndAnalysisChangeFrom { + if s == nil { + return nil + } + return s.From +} + +// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. +func (s *SecurityAndAnalysisChangeFrom) GetSecurityAndAnalysis() *SecurityAndAnalysis { + if s == nil { + return nil + } + return s.SecurityAndAnalysis +} + +// GetChanges returns the Changes field. +func (s *SecurityAndAnalysisEvent) GetChanges() *SecurityAndAnalysisChange { + if s == nil { + return nil + } + return s.Changes +} + +// GetEnterprise returns the Enterprise field. +func (s *SecurityAndAnalysisEvent) GetEnterprise() *Enterprise { + if s == nil { + return nil + } + return s.Enterprise +} + +// GetInstallation returns the Installation field. +func (s *SecurityAndAnalysisEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrganization returns the Organization field. +func (s *SecurityAndAnalysisEvent) GetOrganization() *Organization { + if s == nil { + return nil + } + return s.Organization +} + +// GetRepository returns the Repository field. +func (s *SecurityAndAnalysisEvent) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + +// GetSender returns the Sender field. +func (s *SecurityAndAnalysisEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + // GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. func (s *SelectedReposList) GetTotalCount() int { if s == nil || s.TotalCount == nil { @@ -23086,6 +24286,14 @@ func (w *WorkflowRun) GetStatus() string { return *w.Status } +// GetTriggeringActor returns the TriggeringActor field. +func (w *WorkflowRun) GetTriggeringActor() *User { + if w == nil { + return nil + } + return w.TriggeringActor +} + // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. func (w *WorkflowRun) GetUpdatedAt() Timestamp { if w == nil || w.UpdatedAt == nil { diff --git a/vendor/github.com/google/go-github/v53/github/github.go b/vendor/github.com/google/go-github/v55/github/github.go similarity index 91% rename from vendor/github.com/google/go-github/v53/github/github.go rename to vendor/github.com/google/go-github/v55/github/github.go index 34a27282f..df0114446 100644 --- a/vendor/github.com/google/go-github/v53/github/github.go +++ b/vendor/github.com/google/go-github/v55/github/github.go @@ -24,11 +24,10 @@ import ( "time" "github.com/google/go-querystring/query" - "golang.org/x/oauth2" ) const ( - Version = "v53.2.0" + Version = "v55.0.0" defaultAPIVersion = "2022-11-28" defaultBaseURL = "https://api.github.com/" @@ -126,9 +125,6 @@ const ( // https://developer.github.com/changes/2019-04-24-vulnerability-alerts/ mediaTypeRequiredVulnerabilityAlertsPreview = "application/vnd.github.dorian-preview+json" - // https://developer.github.com/changes/2019-06-04-automated-security-fixes/ - mediaTypeRequiredAutomatedSecurityFixesPreview = "application/vnd.github.london-preview+json" - // https://developer.github.com/changes/2019-05-29-update-branch-api/ mediaTypeUpdatePullRequestBranchPreview = "application/vnd.github.lydian-preview+json" @@ -179,36 +175,38 @@ type Client struct { common service // Reuse a single struct instead of allocating one for each service on the heap. // Services used for talking to different parts of the GitHub API. - Actions *ActionsService - Activity *ActivityService - Admin *AdminService - Apps *AppsService - Authorizations *AuthorizationsService - Billing *BillingService - Checks *ChecksService - CodeScanning *CodeScanningService - Codespaces *CodespacesService - Dependabot *DependabotService - Enterprise *EnterpriseService - Gists *GistsService - Git *GitService - Gitignores *GitignoresService - Interactions *InteractionsService - IssueImport *IssueImportService - Issues *IssuesService - Licenses *LicensesService - Marketplace *MarketplaceService - Migrations *MigrationService - Organizations *OrganizationsService - Projects *ProjectsService - PullRequests *PullRequestsService - Reactions *ReactionsService - Repositories *RepositoriesService - SCIM *SCIMService - Search *SearchService - SecretScanning *SecretScanningService - Teams *TeamsService - Users *UsersService + Actions *ActionsService + Activity *ActivityService + Admin *AdminService + Apps *AppsService + Authorizations *AuthorizationsService + Billing *BillingService + Checks *ChecksService + CodeScanning *CodeScanningService + Codespaces *CodespacesService + Dependabot *DependabotService + DependencyGraph *DependencyGraphService + Enterprise *EnterpriseService + Gists *GistsService + Git *GitService + Gitignores *GitignoresService + Interactions *InteractionsService + IssueImport *IssueImportService + Issues *IssuesService + Licenses *LicensesService + Marketplace *MarketplaceService + Migrations *MigrationService + Organizations *OrganizationsService + Projects *ProjectsService + PullRequests *PullRequestsService + Reactions *ReactionsService + Repositories *RepositoriesService + SCIM *SCIMService + Search *SearchService + SecretScanning *SecretScanningService + SecurityAdvisories *SecurityAdvisoriesService + Teams *TeamsService + Users *UsersService } type service struct { @@ -307,16 +305,92 @@ func addOptions(s string, opts interface{}) (string, error) { // NewClient returns a new GitHub API client. If a nil httpClient is // provided, a new http.Client will be used. To use API methods which require -// authentication, provide an http.Client that will perform the authentication -// for you (such as that provided by the golang.org/x/oauth2 library). +// authentication, either use Client.WithAuthToken or provide NewClient with +// an http.Client that will perform the authentication for you (such as that +// provided by the golang.org/x/oauth2 library). func NewClient(httpClient *http.Client) *Client { - if httpClient == nil { - httpClient = &http.Client{} + c := &Client{client: httpClient} + c.initialize() + return c +} + +// WithAuthToken returns a copy of the client configured to use the provided token for the Authorization header. +func (c *Client) WithAuthToken(token string) *Client { + c2 := c.copy() + defer c2.initialize() + transport := c2.client.Transport + if transport == nil { + transport = http.DefaultTransport + } + c2.client.Transport = roundTripperFunc( + func(req *http.Request) (*http.Response, error) { + req = req.Clone(req.Context()) + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) + return transport.RoundTrip(req) + }, + ) + return c2 +} + +// WithEnterpriseURLs returns a copy of the client configured to use the provided base and +// upload URLs. If the base URL does not have the suffix "/api/v3/", it will be added +// automatically. If the upload URL does not have the suffix "/api/uploads", it will be +// added automatically. +// +// Note that WithEnterpriseURLs is a convenience helper only; +// its behavior is equivalent to setting the BaseURL and UploadURL fields. +// +// Another important thing is that by default, the GitHub Enterprise URL format +// should be http(s)://[hostname]/api/v3/ or you will always receive the 406 status code. +// The upload URL format should be http(s)://[hostname]/api/uploads/. +func (c *Client) WithEnterpriseURLs(baseURL, uploadURL string) (*Client, error) { + c2 := c.copy() + defer c2.initialize() + var err error + c2.BaseURL, err = url.Parse(baseURL) + if err != nil { + return nil, err + } + + if !strings.HasSuffix(c2.BaseURL.Path, "/") { + c2.BaseURL.Path += "/" + } + if !strings.HasSuffix(c2.BaseURL.Path, "/api/v3/") && + !strings.HasPrefix(c2.BaseURL.Host, "api.") && + !strings.Contains(c2.BaseURL.Host, ".api.") { + c2.BaseURL.Path += "api/v3/" + } + + c2.UploadURL, err = url.Parse(uploadURL) + if err != nil { + return nil, err + } + + if !strings.HasSuffix(c2.UploadURL.Path, "/") { + c2.UploadURL.Path += "/" + } + if !strings.HasSuffix(c2.UploadURL.Path, "/api/uploads/") && + !strings.HasPrefix(c2.UploadURL.Host, "api.") && + !strings.Contains(c2.UploadURL.Host, ".api.") { + c2.UploadURL.Path += "api/uploads/" } - baseURL, _ := url.Parse(defaultBaseURL) - uploadURL, _ := url.Parse(uploadBaseURL) + return c2, nil +} - c := &Client{client: httpClient, BaseURL: baseURL, UserAgent: defaultUserAgent, UploadURL: uploadURL} +// initialize sets default values and initializes services. +func (c *Client) initialize() { + if c.client == nil { + c.client = &http.Client{} + } + if c.BaseURL == nil { + c.BaseURL, _ = url.Parse(defaultBaseURL) + } + if c.UploadURL == nil { + c.UploadURL, _ = url.Parse(uploadBaseURL) + } + if c.UserAgent == "" { + c.UserAgent = defaultUserAgent + } c.common.client = c c.Actions = (*ActionsService)(&c.common) c.Activity = (*ActivityService)(&c.common) @@ -328,6 +402,7 @@ func NewClient(httpClient *http.Client) *Client { c.CodeScanning = (*CodeScanningService)(&c.common) c.Codespaces = (*CodespacesService)(&c.common) c.Dependabot = (*DependabotService)(&c.common) + c.DependencyGraph = (*DependencyGraphService)(&c.common) c.Enterprise = (*EnterpriseService)(&c.common) c.Gists = (*GistsService)(&c.common) c.Git = (*GitService)(&c.common) @@ -346,9 +421,30 @@ func NewClient(httpClient *http.Client) *Client { c.SCIM = (*SCIMService)(&c.common) c.Search = (*SearchService)(&c.common) c.SecretScanning = (*SecretScanningService)(&c.common) + c.SecurityAdvisories = (*SecurityAdvisoriesService)(&c.common) c.Teams = (*TeamsService)(&c.common) c.Users = (*UsersService)(&c.common) - return c +} + +// copy returns a copy of the current client. It must be initialized before use. +func (c *Client) copy() *Client { + c.clientMu.Lock() + // can't use *c here because that would copy mutexes by value. + clone := Client{ + client: c.client, + UserAgent: c.UserAgent, + BaseURL: c.BaseURL, + UploadURL: c.UploadURL, + secondaryRateLimitReset: c.secondaryRateLimitReset, + } + c.clientMu.Unlock() + if clone.client == nil { + clone.client = &http.Client{} + } + c.rateMu.Lock() + copy(clone.rateLimits[:], c.rateLimits[:]) + c.rateMu.Unlock() + return &clone } // NewClientWithEnvProxy enhances NewClient with the HttpProxy env. @@ -357,56 +453,18 @@ func NewClientWithEnvProxy() *Client { } // NewTokenClient returns a new GitHub API client authenticated with the provided token. -func NewTokenClient(ctx context.Context, token string) *Client { - return NewClient(oauth2.NewClient(ctx, oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token}))) +// Deprecated: Use NewClient(nil).WithAuthToken(token) instead. +func NewTokenClient(_ context.Context, token string) *Client { + // This always returns a nil error. + return NewClient(nil).WithAuthToken(token) } // NewEnterpriseClient returns a new GitHub API client with provided // base URL and upload URL (often is your GitHub Enterprise hostname). -// If the base URL does not have the suffix "/api/v3/", it will be added automatically. -// If the upload URL does not have the suffix "/api/uploads", it will be added automatically. -// If a nil httpClient is provided, a new http.Client will be used. // -// Note that NewEnterpriseClient is a convenience helper only; -// its behavior is equivalent to using NewClient, followed by setting -// the BaseURL and UploadURL fields. -// -// Another important thing is that by default, the GitHub Enterprise URL format -// should be http(s)://[hostname]/api/v3/ or you will always receive the 406 status code. -// The upload URL format should be http(s)://[hostname]/api/uploads/. +// Deprecated: Use NewClient(httpClient).WithOptions(WithEnterpriseURLs(baseURL, uploadURL)) instead. func NewEnterpriseClient(baseURL, uploadURL string, httpClient *http.Client) (*Client, error) { - baseEndpoint, err := url.Parse(baseURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(baseEndpoint.Path, "/") { - baseEndpoint.Path += "/" - } - if !strings.HasSuffix(baseEndpoint.Path, "/api/v3/") && - !strings.HasPrefix(baseEndpoint.Host, "api.") && - !strings.Contains(baseEndpoint.Host, ".api.") { - baseEndpoint.Path += "api/v3/" - } - - uploadEndpoint, err := url.Parse(uploadURL) - if err != nil { - return nil, err - } - - if !strings.HasSuffix(uploadEndpoint.Path, "/") { - uploadEndpoint.Path += "/" - } - if !strings.HasSuffix(uploadEndpoint.Path, "/api/uploads/") && - !strings.HasPrefix(uploadEndpoint.Host, "api.") && - !strings.Contains(uploadEndpoint.Host, ".api.") { - uploadEndpoint.Path += "api/uploads/" - } - - c := NewClient(httpClient) - c.BaseURL = baseEndpoint - c.UploadURL = uploadEndpoint - return c, nil + return NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) } // RequestOption represents an option that can modify an http.Request. @@ -1052,7 +1110,7 @@ func (ae *AcceptedError) Is(target error) bool { if !ok { return false } - return bytes.Compare(ae.Raw, v.Raw) == 0 + return bytes.Equal(ae.Raw, v.Raw) } // AbuseRateLimitError occurs when GitHub returns 403 Forbidden response with the @@ -1538,3 +1596,10 @@ func Int64(v int64) *int64 { return &v } // String is a helper routine that allocates a new string value // to store v and returns a pointer to it. func String(v string) *string { return &v } + +// roundTripperFunc creates a RoundTripper (transport) +type roundTripperFunc func(*http.Request) (*http.Response, error) + +func (fn roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { + return fn(r) +} diff --git a/vendor/github.com/google/go-github/v53/github/gitignore.go b/vendor/github.com/google/go-github/v55/github/gitignore.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/gitignore.go rename to vendor/github.com/google/go-github/v55/github/gitignore.go diff --git a/vendor/github.com/google/go-github/v53/github/interactions.go b/vendor/github.com/google/go-github/v55/github/interactions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/interactions.go rename to vendor/github.com/google/go-github/v55/github/interactions.go diff --git a/vendor/github.com/google/go-github/v53/github/interactions_orgs.go b/vendor/github.com/google/go-github/v55/github/interactions_orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/interactions_orgs.go rename to vendor/github.com/google/go-github/v55/github/interactions_orgs.go diff --git a/vendor/github.com/google/go-github/v53/github/interactions_repos.go b/vendor/github.com/google/go-github/v55/github/interactions_repos.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/interactions_repos.go rename to vendor/github.com/google/go-github/v55/github/interactions_repos.go diff --git a/vendor/github.com/google/go-github/v53/github/issue_import.go b/vendor/github.com/google/go-github/v55/github/issue_import.go similarity index 97% rename from vendor/github.com/google/go-github/v53/github/issue_import.go rename to vendor/github.com/google/go-github/v55/github/issue_import.go index 4bc8d5f1d..04899772b 100644 --- a/vendor/github.com/google/go-github/v53/github/issue_import.go +++ b/vendor/github.com/google/go-github/v55/github/issue_import.go @@ -86,14 +86,11 @@ func (s *IssueImportService) Create(ctx context.Context, owner, repo string, iss if err != nil { aerr, ok := err.(*AcceptedError) if ok { - decErr := json.Unmarshal(aerr.Raw, i) - if decErr != nil { - err = decErr + if err := json.Unmarshal(aerr.Raw, i); err != nil { + return i, resp, err } - - return i, resp, nil + return i, resp, err } - return nil, resp, err } diff --git a/vendor/github.com/google/go-github/v53/github/issues.go b/vendor/github.com/google/go-github/v55/github/issues.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues.go rename to vendor/github.com/google/go-github/v55/github/issues.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_assignees.go b/vendor/github.com/google/go-github/v55/github/issues_assignees.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_assignees.go rename to vendor/github.com/google/go-github/v55/github/issues_assignees.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_comments.go b/vendor/github.com/google/go-github/v55/github/issues_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_comments.go rename to vendor/github.com/google/go-github/v55/github/issues_comments.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_events.go b/vendor/github.com/google/go-github/v55/github/issues_events.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_events.go rename to vendor/github.com/google/go-github/v55/github/issues_events.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_labels.go b/vendor/github.com/google/go-github/v55/github/issues_labels.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_labels.go rename to vendor/github.com/google/go-github/v55/github/issues_labels.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_milestones.go b/vendor/github.com/google/go-github/v55/github/issues_milestones.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_milestones.go rename to vendor/github.com/google/go-github/v55/github/issues_milestones.go diff --git a/vendor/github.com/google/go-github/v53/github/issues_timeline.go b/vendor/github.com/google/go-github/v55/github/issues_timeline.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/issues_timeline.go rename to vendor/github.com/google/go-github/v55/github/issues_timeline.go diff --git a/vendor/github.com/google/go-github/v53/github/licenses.go b/vendor/github.com/google/go-github/v55/github/licenses.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/licenses.go rename to vendor/github.com/google/go-github/v55/github/licenses.go diff --git a/vendor/github.com/google/go-github/v53/github/messages.go b/vendor/github.com/google/go-github/v55/github/messages.go similarity index 61% rename from vendor/github.com/google/go-github/v53/github/messages.go rename to vendor/github.com/google/go-github/v55/github/messages.go index bb5ae3f38..c16b60152 100644 --- a/vendor/github.com/google/go-github/v53/github/messages.go +++ b/vendor/github.com/google/go-github/v55/github/messages.go @@ -22,6 +22,8 @@ import ( "mime" "net/http" "net/url" + "reflect" + "sort" "strings" ) @@ -43,69 +45,87 @@ const ( var ( // eventTypeMapping maps webhooks types to their corresponding go-github struct types. - eventTypeMapping = map[string]string{ - "branch_protection_rule": "BranchProtectionRuleEvent", - "check_run": "CheckRunEvent", - "check_suite": "CheckSuiteEvent", - "code_scanning_alert": "CodeScanningAlertEvent", - "commit_comment": "CommitCommentEvent", - "content_reference": "ContentReferenceEvent", - "create": "CreateEvent", - "delete": "DeleteEvent", - "deploy_key": "DeployKeyEvent", - "deployment": "DeploymentEvent", - "deployment_status": "DeploymentStatusEvent", - "deployment_protection_rule": "DeploymentProtectionRuleEvent", - "discussion": "DiscussionEvent", - "discussion_comment": "DiscussionCommentEvent", - "fork": "ForkEvent", - "github_app_authorization": "GitHubAppAuthorizationEvent", - "gollum": "GollumEvent", - "installation": "InstallationEvent", - "installation_repositories": "InstallationRepositoriesEvent", - "issue_comment": "IssueCommentEvent", - "issues": "IssuesEvent", - "label": "LabelEvent", - "marketplace_purchase": "MarketplacePurchaseEvent", - "member": "MemberEvent", - "membership": "MembershipEvent", - "merge_group": "MergeGroupEvent", - "meta": "MetaEvent", - "milestone": "MilestoneEvent", - "organization": "OrganizationEvent", - "org_block": "OrgBlockEvent", - "package": "PackageEvent", - "page_build": "PageBuildEvent", - "ping": "PingEvent", - "project": "ProjectEvent", - "project_card": "ProjectCardEvent", - "project_column": "ProjectColumnEvent", - "public": "PublicEvent", - "pull_request": "PullRequestEvent", - "pull_request_review": "PullRequestReviewEvent", - "pull_request_review_comment": "PullRequestReviewCommentEvent", - "pull_request_review_thread": "PullRequestReviewThreadEvent", - "pull_request_target": "PullRequestTargetEvent", - "push": "PushEvent", - "repository": "RepositoryEvent", - "repository_dispatch": "RepositoryDispatchEvent", - "repository_import": "RepositoryImportEvent", - "repository_vulnerability_alert": "RepositoryVulnerabilityAlertEvent", - "release": "ReleaseEvent", - "secret_scanning_alert": "SecretScanningAlertEvent", - "security_advisory": "SecurityAdvisoryEvent", - "star": "StarEvent", - "status": "StatusEvent", - "team": "TeamEvent", - "team_add": "TeamAddEvent", - "user": "UserEvent", - "watch": "WatchEvent", - "workflow_dispatch": "WorkflowDispatchEvent", - "workflow_job": "WorkflowJobEvent", - "workflow_run": "WorkflowRunEvent", + eventTypeMapping = map[string]interface{}{ + "branch_protection_rule": &BranchProtectionRuleEvent{}, + "check_run": &CheckRunEvent{}, + "check_suite": &CheckSuiteEvent{}, + "code_scanning_alert": &CodeScanningAlertEvent{}, + "commit_comment": &CommitCommentEvent{}, + "content_reference": &ContentReferenceEvent{}, + "create": &CreateEvent{}, + "delete": &DeleteEvent{}, + "dependabot_alert": &DependabotAlertEvent{}, + "deploy_key": &DeployKeyEvent{}, + "deployment": &DeploymentEvent{}, + "deployment_status": &DeploymentStatusEvent{}, + "deployment_protection_rule": &DeploymentProtectionRuleEvent{}, + "discussion": &DiscussionEvent{}, + "discussion_comment": &DiscussionCommentEvent{}, + "fork": &ForkEvent{}, + "github_app_authorization": &GitHubAppAuthorizationEvent{}, + "gollum": &GollumEvent{}, + "installation": &InstallationEvent{}, + "installation_repositories": &InstallationRepositoriesEvent{}, + "installation_target": &InstallationTargetEvent{}, + "issue_comment": &IssueCommentEvent{}, + "issues": &IssuesEvent{}, + "label": &LabelEvent{}, + "marketplace_purchase": &MarketplacePurchaseEvent{}, + "member": &MemberEvent{}, + "membership": &MembershipEvent{}, + "merge_group": &MergeGroupEvent{}, + "meta": &MetaEvent{}, + "milestone": &MilestoneEvent{}, + "organization": &OrganizationEvent{}, + "org_block": &OrgBlockEvent{}, + "package": &PackageEvent{}, + "page_build": &PageBuildEvent{}, + "personal_access_token_request": &PersonalAccessTokenRequestEvent{}, + "ping": &PingEvent{}, + "project": &ProjectEvent{}, + "project_card": &ProjectCardEvent{}, + "project_column": &ProjectColumnEvent{}, + "projects_v2": &ProjectV2Event{}, + "projects_v2_item": &ProjectV2ItemEvent{}, + "public": &PublicEvent{}, + "pull_request": &PullRequestEvent{}, + "pull_request_review": &PullRequestReviewEvent{}, + "pull_request_review_comment": &PullRequestReviewCommentEvent{}, + "pull_request_review_thread": &PullRequestReviewThreadEvent{}, + "pull_request_target": &PullRequestTargetEvent{}, + "push": &PushEvent{}, + "repository": &RepositoryEvent{}, + "repository_dispatch": &RepositoryDispatchEvent{}, + "repository_import": &RepositoryImportEvent{}, + "repository_vulnerability_alert": &RepositoryVulnerabilityAlertEvent{}, + "release": &ReleaseEvent{}, + "secret_scanning_alert": &SecretScanningAlertEvent{}, + "security_advisory": &SecurityAdvisoryEvent{}, + "security_and_analysis": &SecurityAndAnalysisEvent{}, + "star": &StarEvent{}, + "status": &StatusEvent{}, + "team": &TeamEvent{}, + "team_add": &TeamAddEvent{}, + "user": &UserEvent{}, + "watch": &WatchEvent{}, + "workflow_dispatch": &WorkflowDispatchEvent{}, + "workflow_job": &WorkflowJobEvent{}, + "workflow_run": &WorkflowRunEvent{}, } + // forward mapping of event types to the string names of the structs + messageToTypeName = make(map[string]string, len(eventTypeMapping)) + // Inverse map of the above + typeToMessageMapping = make(map[string]string, len(eventTypeMapping)) ) +func init() { + for k, v := range eventTypeMapping { + typename := reflect.TypeOf(v).Elem().Name() + messageToTypeName[k] = typename + typeToMessageMapping[typename] = k + } +} + // genMAC generates the HMAC signature for a message provided the secret key // and hashFunc. func genMAC(message, key []byte, hashFunc func() hash.Hash) []byte { @@ -294,7 +314,7 @@ func DeliveryID(r *http.Request) string { // } // } func ParseWebHook(messageType string, payload []byte) (interface{}, error) { - eventType, ok := eventTypeMapping[messageType] + eventType, ok := messageToTypeName[messageType] if !ok { return nil, fmt.Errorf("unknown X-Github-Event in message: %v", messageType) } @@ -305,3 +325,28 @@ func ParseWebHook(messageType string, payload []byte) (interface{}, error) { } return event.ParsePayload() } + +// WebhookTypes returns a sorted list of all the known GitHub event type strings +// supported by go-github. +func MessageTypes() []string { + types := make([]string, 0, len(eventTypeMapping)) + for t := range eventTypeMapping { + types = append(types, t) + } + sort.Strings(types) + return types +} + +// EventForType returns an empty struct matching the specified GitHub event type. +// If messageType does not match any known event types, it returns nil. +func EventForType(messageType string) interface{} { + prototype := eventTypeMapping[messageType] + if prototype == nil { + return nil + } + // return a _copy_ of the pointed-to-object. Unfortunately, for this we + // need to use reflection. If we store the actual objects in the map, + // we still need to use reflection to convert from `any` to the actual + // type, so this was deemed the lesser of two evils. (#2865) + return reflect.New(reflect.TypeOf(prototype).Elem()).Interface() +} diff --git a/vendor/github.com/google/go-github/v53/github/migrations.go b/vendor/github.com/google/go-github/v55/github/migrations.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/migrations.go rename to vendor/github.com/google/go-github/v55/github/migrations.go diff --git a/vendor/github.com/google/go-github/v53/github/migrations_source_import.go b/vendor/github.com/google/go-github/v55/github/migrations_source_import.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/migrations_source_import.go rename to vendor/github.com/google/go-github/v55/github/migrations_source_import.go diff --git a/vendor/github.com/google/go-github/v53/github/migrations_user.go b/vendor/github.com/google/go-github/v55/github/migrations_user.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/migrations_user.go rename to vendor/github.com/google/go-github/v55/github/migrations_user.go diff --git a/vendor/github.com/google/go-github/v53/github/misc.go b/vendor/github.com/google/go-github/v55/github/misc.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/misc.go rename to vendor/github.com/google/go-github/v55/github/misc.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs.go b/vendor/github.com/google/go-github/v55/github/orgs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs.go rename to vendor/github.com/google/go-github/v55/github/orgs.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v55/github/orgs_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_actions_allowed.go rename to vendor/github.com/google/go-github/v55/github/orgs_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v55/github/orgs_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_actions_permissions.go rename to vendor/github.com/google/go-github/v55/github/orgs_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v55/github/orgs_audit_log.go similarity index 96% rename from vendor/github.com/google/go-github/v53/github/orgs_audit_log.go rename to vendor/github.com/google/go-github/v55/github/orgs_audit_log.go index e2e4692e5..4c34445fa 100644 --- a/vendor/github.com/google/go-github/v53/github/orgs_audit_log.go +++ b/vendor/github.com/google/go-github/v55/github/orgs_audit_log.go @@ -121,6 +121,14 @@ type AuditEntry struct { Visibility *string `json:"visibility,omitempty"` // The repository visibility, for example `public` or `private`. WorkflowID *int64 `json:"workflow_id,omitempty"` WorkflowRunID *int64 `json:"workflow_run_id,omitempty"` + + Data *AuditEntryData `json:"data,omitempty"` +} + +// AuditEntryData represents additional information stuffed into a `data` field. +type AuditEntryData struct { + OldName *string `json:"old_name,omitempty"` // The previous name of the repository, for a name change + OldLogin *string `json:"old_login,omitempty"` // The previous name of the organization, for a name change } // GetAuditLog gets the audit-log entries for an organization. diff --git a/vendor/github.com/google/go-github/v55/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v55/github/orgs_credential_authorizations.go new file mode 100644 index 000000000..6d56fe8f7 --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/orgs_credential_authorizations.go @@ -0,0 +1,95 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" +) + +// CredentialAuthorization represents a credential authorized through SAML SSO. +type CredentialAuthorization struct { + // User login that owns the underlying credential. + Login *string `json:"login,omitempty"` + + // Unique identifier for the credential. + CredentialID *int64 `json:"credential_id,omitempty"` + + // Human-readable description of the credential type. + CredentialType *string `json:"credential_type,omitempty"` + + // Last eight characters of the credential. + // Only included in responses with credential_type of personal access token. + TokenLastEight *string `json:"token_last_eight,omitempty"` + + // Date when the credential was authorized for use. + CredentialAuthorizedAt *Timestamp `json:"credential_authorized_at,omitempty"` + + // Date when the credential was last accessed. + // May be null if it was never accessed. + CredentialAccessedAt *Timestamp `json:"credential_accessed_at,omitempty"` + + // List of oauth scopes the token has been granted. + Scopes []string `json:"scopes,omitempty"` + + // Unique string to distinguish the credential. + // Only included in responses with credential_type of SSH Key. + Fingerprint *string `json:"fingerprint,omitempty"` + + AuthorizedCredentialID *int64 `json:"authorized_credential_id,omitempty"` + + // The title given to the ssh key. + // This will only be present when the credential is an ssh key. + AuthorizedCredentialTitle *string `json:"authorized_credential_title,omitempty"` + + // The note given to the token. + // This will only be present when the credential is a token. + AuthorizedCredentialNote *string `json:"authorized_credential_note,omitempty"` + + // The expiry for the token. + // This will only be present when the credential is a token. + AuthorizedCredentialExpiresAt *Timestamp `json:"authorized_credential_expires_at,omitempty"` +} + +// ListCredentialAuthorizations lists credentials authorized through SAML SSO +// for a given organization. Only available with GitHub Enterprise Cloud. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/orgs/orgs?apiVersion=2022-11-28#list-saml-sso-authorizations-for-an-organization +func (s *OrganizationsService) ListCredentialAuthorizations(ctx context.Context, org string, opts *ListOptions) ([]*CredentialAuthorization, *Response, error) { + u := fmt.Sprintf("orgs/%v/credential-authorizations", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest(http.MethodGet, u, nil) + if err != nil { + return nil, nil, err + } + + var creds []*CredentialAuthorization + resp, err := s.client.Do(ctx, req, &creds) + if err != nil { + return nil, resp, err + } + + return creds, resp, nil +} + +// RemoveCredentialAuthorization revokes the SAML SSO authorization for a given +// credential within an organization. Only available with GitHub Enterprise Cloud. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/orgs/orgs?apiVersion=2022-11-28#remove-a-saml-sso-authorization-for-an-organization +func (s *OrganizationsService) RemoveCredentialAuthorization(ctx context.Context, org string, credentialID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/credential-authorizations/%v", org, credentialID) + req, err := s.client.NewRequest(http.MethodDelete, u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v53/github/orgs_custom_roles.go b/vendor/github.com/google/go-github/v55/github/orgs_custom_roles.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_custom_roles.go rename to vendor/github.com/google/go-github/v55/github/orgs_custom_roles.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_hooks.go b/vendor/github.com/google/go-github/v55/github/orgs_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_hooks.go rename to vendor/github.com/google/go-github/v55/github/orgs_hooks.go diff --git a/vendor/github.com/google/go-github/v55/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v55/github/orgs_hooks_configuration.go new file mode 100644 index 000000000..953a2329c --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/orgs_hooks_configuration.go @@ -0,0 +1,49 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetHookConfiguration returns the configuration for the specified organization webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks?apiVersion=2022-11-28#get-a-webhook-configuration-for-an-organization +func (s *OrganizationsService) GetHookConfiguration(ctx context.Context, org string, id int64) (*HookConfig, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + config := new(HookConfig) + resp, err := s.client.Do(ctx, req, config) + if err != nil { + return nil, resp, err + } + + return config, resp, nil +} + +// EditHookConfiguration updates the configuration for the specified organization webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks?apiVersion=2022-11-28#update-a-webhook-configuration-for-an-organization +func (s *OrganizationsService) EditHookConfiguration(ctx context.Context, org string, id int64, config *HookConfig) (*HookConfig, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) + req, err := s.client.NewRequest("PATCH", u, config) + if err != nil { + return nil, nil, err + } + + c := new(HookConfig) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} diff --git a/vendor/github.com/google/go-github/v53/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v55/github/orgs_hooks_deliveries.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_hooks_deliveries.go rename to vendor/github.com/google/go-github/v55/github/orgs_hooks_deliveries.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_members.go b/vendor/github.com/google/go-github/v55/github/orgs_members.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_members.go rename to vendor/github.com/google/go-github/v55/github/orgs_members.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v55/github/orgs_outside_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_outside_collaborators.go rename to vendor/github.com/google/go-github/v55/github/orgs_outside_collaborators.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_packages.go b/vendor/github.com/google/go-github/v55/github/orgs_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_packages.go rename to vendor/github.com/google/go-github/v55/github/orgs_packages.go diff --git a/vendor/github.com/google/go-github/v55/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v55/github/orgs_personal_access_tokens.go new file mode 100644 index 000000000..c30ff2843 --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/orgs_personal_access_tokens.go @@ -0,0 +1,34 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" +) + +// ReviewPersonalAccessTokenRequestOptions specifies the parameters to the ReviewPersonalAccessTokenRequest method. +type ReviewPersonalAccessTokenRequestOptions struct { + Action string `json:"action"` + Reason *string `json:"reason,omitempty"` +} + +// ReviewPersonalAccessTokenRequest approves or denies a pending request to access organization resources via a fine-grained personal access token. +// Only GitHub Apps can call this API, using the `organization_personal_access_token_requests: write` permission. +// `action` can be one of `approve` or `deny`. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/personal-access-tokens?apiVersion=2022-11-28#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token +func (s *OrganizationsService) ReviewPersonalAccessTokenRequest(ctx context.Context, org string, requestID int64, opts ReviewPersonalAccessTokenRequestOptions) (*Response, error) { + u := fmt.Sprintf("orgs/%v/personal-access-token-requests/%v", org, requestID) + + req, err := s.client.NewRequest(http.MethodPost, u, &opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v53/github/orgs_projects.go b/vendor/github.com/google/go-github/v55/github/orgs_projects.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_projects.go rename to vendor/github.com/google/go-github/v55/github/orgs_projects.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_rules.go b/vendor/github.com/google/go-github/v55/github/orgs_rules.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_rules.go rename to vendor/github.com/google/go-github/v55/github/orgs_rules.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v55/github/orgs_security_managers.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_security_managers.go rename to vendor/github.com/google/go-github/v55/github/orgs_security_managers.go diff --git a/vendor/github.com/google/go-github/v53/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v55/github/orgs_users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/orgs_users_blocking.go rename to vendor/github.com/google/go-github/v55/github/orgs_users_blocking.go diff --git a/vendor/github.com/google/go-github/v53/github/packages.go b/vendor/github.com/google/go-github/v55/github/packages.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/packages.go rename to vendor/github.com/google/go-github/v55/github/packages.go diff --git a/vendor/github.com/google/go-github/v53/github/projects.go b/vendor/github.com/google/go-github/v55/github/projects.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/projects.go rename to vendor/github.com/google/go-github/v55/github/projects.go diff --git a/vendor/github.com/google/go-github/v53/github/pulls.go b/vendor/github.com/google/go-github/v55/github/pulls.go similarity index 99% rename from vendor/github.com/google/go-github/v53/github/pulls.go rename to vendor/github.com/google/go-github/v55/github/pulls.go index 6e49eba2f..29549e24b 100644 --- a/vendor/github.com/google/go-github/v53/github/pulls.go +++ b/vendor/github.com/google/go-github/v55/github/pulls.go @@ -170,7 +170,7 @@ func (s *PullRequestsService) List(ctx context.Context, owner string, repo strin // By default, the PullRequestListOptions State filters for "open". // // GitHub API docs: https://docs.github.com/en/rest/commits/commits#list-pull-requests-associated-with-a-commit -func (s *PullRequestsService) ListPullRequestsWithCommit(ctx context.Context, owner, repo, sha string, opts *PullRequestListOptions) ([]*PullRequest, *Response, error) { +func (s *PullRequestsService) ListPullRequestsWithCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*PullRequest, *Response, error) { u := fmt.Sprintf("repos/%v/%v/commits/%v/pulls", owner, repo, sha) u, err := addOptions(u, opts) if err != nil { @@ -244,6 +244,7 @@ func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo str type NewPullRequest struct { Title *string `json:"title,omitempty"` Head *string `json:"head,omitempty"` + HeadRepo *string `json:"head_repo,omitempty"` Base *string `json:"base,omitempty"` Body *string `json:"body,omitempty"` Issue *int `json:"issue,omitempty"` diff --git a/vendor/github.com/google/go-github/v53/github/pulls_comments.go b/vendor/github.com/google/go-github/v55/github/pulls_comments.go similarity index 97% rename from vendor/github.com/google/go-github/v53/github/pulls_comments.go rename to vendor/github.com/google/go-github/v55/github/pulls_comments.go index 1f6b726d8..ea1cf45d1 100644 --- a/vendor/github.com/google/go-github/v53/github/pulls_comments.go +++ b/vendor/github.com/google/go-github/v55/github/pulls_comments.go @@ -41,6 +41,8 @@ type PullRequestComment struct { URL *string `json:"url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` PullRequestURL *string `json:"pull_request_url,omitempty"` + // Can be one of: LINE, FILE from https://docs.github.com/en/rest/pulls/comments?apiVersion=2022-11-28#create-a-review-comment-for-a-pull-request + SubjectType *string `json:"subject_type,omitempty"` } func (p PullRequestComment) String() string { diff --git a/vendor/github.com/google/go-github/v53/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v55/github/pulls_reviewers.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/pulls_reviewers.go rename to vendor/github.com/google/go-github/v55/github/pulls_reviewers.go diff --git a/vendor/github.com/google/go-github/v53/github/pulls_reviews.go b/vendor/github.com/google/go-github/v55/github/pulls_reviews.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/pulls_reviews.go rename to vendor/github.com/google/go-github/v55/github/pulls_reviews.go diff --git a/vendor/github.com/google/go-github/v53/github/pulls_threads.go b/vendor/github.com/google/go-github/v55/github/pulls_threads.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/pulls_threads.go rename to vendor/github.com/google/go-github/v55/github/pulls_threads.go diff --git a/vendor/github.com/google/go-github/v53/github/reactions.go b/vendor/github.com/google/go-github/v55/github/reactions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/reactions.go rename to vendor/github.com/google/go-github/v55/github/reactions.go diff --git a/vendor/github.com/google/go-github/v53/github/repos.go b/vendor/github.com/google/go-github/v55/github/repos.go similarity index 97% rename from vendor/github.com/google/go-github/v53/github/repos.go rename to vendor/github.com/google/go-github/v55/github/repos.go index 5ffad6dd3..83fc3f8e7 100644 --- a/vendor/github.com/google/go-github/v53/github/repos.go +++ b/vendor/github.com/google/go-github/v55/github/repos.go @@ -210,6 +210,7 @@ type SecurityAndAnalysis struct { AdvancedSecurity *AdvancedSecurity `json:"advanced_security,omitempty"` SecretScanning *SecretScanning `json:"secret_scanning,omitempty"` SecretScanningPushProtection *SecretScanningPushProtection `json:"secret_scanning_push_protection,omitempty"` + DependabotSecurityUpdates *DependabotSecurityUpdates `json:"dependabot_security_updates,omitempty"` } func (s SecurityAndAnalysis) String() string { @@ -245,6 +246,21 @@ type SecretScanningPushProtection struct { Status *string `json:"status,omitempty"` } +func (s SecretScanningPushProtection) String() string { + return Stringify(s) +} + +// DependabotSecurityUpdates specifies the state of Dependabot security updates on a repository. +// +// GitHub API docs: https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates +type DependabotSecurityUpdates struct { + Status *string `json:"status,omitempty"` +} + +func (d DependabotSecurityUpdates) String() string { + return Stringify(d) +} + // List the repositories for a user. Passing the empty string will list // repositories for the authenticated user. // @@ -687,6 +703,25 @@ func (s *RepositoriesService) DisableVulnerabilityAlerts(ctx context.Context, ow return s.client.Do(ctx, req, nil) } +// GetAutomatedSecurityFixes checks if the automated security fixes for a repository are enabled. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos?apiVersion=2022-11-28#check-if-automated-security-fixes-are-enabled-for-a-repository +func (s *RepositoriesService) GetAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*AutomatedSecurityFixes, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + p := new(AutomatedSecurityFixes) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + return p, resp, nil +} + // EnableAutomatedSecurityFixes enables the automated security fixes for a repository. // // GitHub API docs: https://docs.github.com/en/rest/repos/repos#enable-automated-security-fixes @@ -698,9 +733,6 @@ func (s *RepositoriesService) EnableAutomatedSecurityFixes(ctx context.Context, return nil, err } - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredAutomatedSecurityFixesPreview) - return s.client.Do(ctx, req, nil) } @@ -715,9 +747,6 @@ func (s *RepositoriesService) DisableAutomatedSecurityFixes(ctx context.Context, return nil, err } - // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeRequiredAutomatedSecurityFixesPreview) - return s.client.Do(ctx, req, nil) } @@ -1212,6 +1241,12 @@ type SignaturesProtectedBranch struct { Enabled *bool `json:"enabled,omitempty"` } +// AutomatedSecurityFixes represents their status. +type AutomatedSecurityFixes struct { + Enabled *bool `json:"enabled"` + Paused *bool `json:"paused"` +} + // ListBranches lists branches for the specified repository. // // GitHub API docs: https://docs.github.com/en/rest/branches/branches#list-branches @@ -2034,3 +2069,43 @@ func isBranchNotProtected(err error) bool { errorResponse, ok := err.(*ErrorResponse) return ok && errorResponse.Message == githubBranchNotProtected } + +// EnablePrivateReporting enables private reporting of vulnerabilities for a +// repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#enable-private-vulnerability-reporting-for-a-repository +func (s *RepositoriesService) EnablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// DisablePrivateReporting disables private reporting of vulnerabilities for a +// repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#disable-private-vulnerability-reporting-for-a-repository +func (s *RepositoriesService) DisablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v53/github/repos_actions_access.go b/vendor/github.com/google/go-github/v55/github/repos_actions_access.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_actions_access.go rename to vendor/github.com/google/go-github/v55/github/repos_actions_access.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v55/github/repos_actions_allowed.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_actions_allowed.go rename to vendor/github.com/google/go-github/v55/github/repos_actions_allowed.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v55/github/repos_actions_permissions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_actions_permissions.go rename to vendor/github.com/google/go-github/v55/github/repos_actions_permissions.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_autolinks.go b/vendor/github.com/google/go-github/v55/github/repos_autolinks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_autolinks.go rename to vendor/github.com/google/go-github/v55/github/repos_autolinks.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_codeowners.go b/vendor/github.com/google/go-github/v55/github/repos_codeowners.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_codeowners.go rename to vendor/github.com/google/go-github/v55/github/repos_codeowners.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_collaborators.go b/vendor/github.com/google/go-github/v55/github/repos_collaborators.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_collaborators.go rename to vendor/github.com/google/go-github/v55/github/repos_collaborators.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_comments.go b/vendor/github.com/google/go-github/v55/github/repos_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_comments.go rename to vendor/github.com/google/go-github/v55/github/repos_comments.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_commits.go b/vendor/github.com/google/go-github/v55/github/repos_commits.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_commits.go rename to vendor/github.com/google/go-github/v55/github/repos_commits.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_community_health.go b/vendor/github.com/google/go-github/v55/github/repos_community_health.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_community_health.go rename to vendor/github.com/google/go-github/v55/github/repos_community_health.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_contents.go b/vendor/github.com/google/go-github/v55/github/repos_contents.go similarity index 95% rename from vendor/github.com/google/go-github/v53/github/repos_contents.go rename to vendor/github.com/google/go-github/v55/github/repos_contents.go index 874a32772..de7a0d5b8 100644 --- a/vendor/github.com/google/go-github/v53/github/repos_contents.go +++ b/vendor/github.com/google/go-github/v55/github/repos_contents.go @@ -21,6 +21,8 @@ import ( "strings" ) +var ErrPathForbidden = errors.New("path must not contain '..' due to auth vulnerability issue") + // RepositoryContent represents a file or directory in a github repository. type RepositoryContent struct { Type *string `json:"type,omitempty"` @@ -34,12 +36,13 @@ type RepositoryContent struct { // Content contains the actual file content, which may be encoded. // Callers should call GetContent which will decode the content if // necessary. - Content *string `json:"content,omitempty"` - SHA *string `json:"sha,omitempty"` - URL *string `json:"url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - DownloadURL *string `json:"download_url,omitempty"` + Content *string `json:"content,omitempty"` + SHA *string `json:"sha,omitempty"` + URL *string `json:"url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + DownloadURL *string `json:"download_url,omitempty"` + SubmoduleGitURL *string `json:"submodule_git_url,omitempty"` } // RepositoryContentResponse holds the parsed response from CreateFile, UpdateFile, and DeleteFile. @@ -198,7 +201,7 @@ func (s *RepositoriesService) DownloadContentsWithMeta(ctx context.Context, owne // GitHub API docs: https://docs.github.com/en/rest/repos/contents#get-repository-content func (s *RepositoriesService) GetContents(ctx context.Context, owner, repo, path string, opts *RepositoryContentGetOptions) (fileContent *RepositoryContent, directoryContent []*RepositoryContent, resp *Response, err error) { if strings.Contains(path, "..") { - return nil, nil, nil, errors.New("path must not contain '..' due to auth vulnerability issue") + return nil, nil, nil, ErrPathForbidden } escapedPath := (&url.URL{Path: strings.TrimSuffix(path, "/")}).String() diff --git a/vendor/github.com/google/go-github/v53/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v55/github/repos_deployment_branch_policies.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_deployment_branch_policies.go rename to vendor/github.com/google/go-github/v55/github/repos_deployment_branch_policies.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_deployments.go b/vendor/github.com/google/go-github/v55/github/repos_deployments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_deployments.go rename to vendor/github.com/google/go-github/v55/github/repos_deployments.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_environments.go b/vendor/github.com/google/go-github/v55/github/repos_environments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_environments.go rename to vendor/github.com/google/go-github/v55/github/repos_environments.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_forks.go b/vendor/github.com/google/go-github/v55/github/repos_forks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_forks.go rename to vendor/github.com/google/go-github/v55/github/repos_forks.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_hooks.go b/vendor/github.com/google/go-github/v55/github/repos_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_hooks.go rename to vendor/github.com/google/go-github/v55/github/repos_hooks.go diff --git a/vendor/github.com/google/go-github/v55/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v55/github/repos_hooks_configuration.go new file mode 100644 index 000000000..5aadfb645 --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/repos_hooks_configuration.go @@ -0,0 +1,49 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetHookConfiguration returns the configuration for the specified repository webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-config?apiVersion=2022-11-28#get-a-webhook-configuration-for-a-repository +func (s *RepositoriesService) GetHookConfiguration(ctx context.Context, owner, repo string, id int64) (*HookConfig, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + config := new(HookConfig) + resp, err := s.client.Do(ctx, req, config) + if err != nil { + return nil, resp, err + } + + return config, resp, nil +} + +// EditHookConfiguration updates the configuration for the specified repository webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-config?apiVersion=2022-11-28#update-a-webhook-configuration-for-a-repository +func (s *RepositoriesService) EditHookConfiguration(ctx context.Context, owner, repo string, id int64, config *HookConfig) (*HookConfig, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, config) + if err != nil { + return nil, nil, err + } + + c := new(HookConfig) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} diff --git a/vendor/github.com/google/go-github/v53/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v55/github/repos_hooks_deliveries.go similarity index 97% rename from vendor/github.com/google/go-github/v53/github/repos_hooks_deliveries.go rename to vendor/github.com/google/go-github/v55/github/repos_hooks_deliveries.go index cbd2d3819..5f2f8807e 100644 --- a/vendor/github.com/google/go-github/v53/github/repos_hooks_deliveries.go +++ b/vendor/github.com/google/go-github/v55/github/repos_hooks_deliveries.go @@ -126,9 +126,9 @@ func (s *RepositoriesService) RedeliverHookDelivery(ctx context.Context, owner, // ParseRequestPayload parses the request payload. For recognized event types, // a value of the corresponding struct type will be returned. func (d *HookDelivery) ParseRequestPayload() (interface{}, error) { - eType, ok := eventTypeMapping[*d.Event] + eType, ok := messageToTypeName[d.GetEvent()] if !ok { - return nil, fmt.Errorf("unsupported event type %q", *d.Event) + return nil, fmt.Errorf("unsupported event type %q", d.GetEvent()) } e := &Event{Type: &eType, RawPayload: d.Request.RawPayload} diff --git a/vendor/github.com/google/go-github/v53/github/repos_invitations.go b/vendor/github.com/google/go-github/v55/github/repos_invitations.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_invitations.go rename to vendor/github.com/google/go-github/v55/github/repos_invitations.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_keys.go b/vendor/github.com/google/go-github/v55/github/repos_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_keys.go rename to vendor/github.com/google/go-github/v55/github/repos_keys.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_lfs.go b/vendor/github.com/google/go-github/v55/github/repos_lfs.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_lfs.go rename to vendor/github.com/google/go-github/v55/github/repos_lfs.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_merging.go b/vendor/github.com/google/go-github/v55/github/repos_merging.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_merging.go rename to vendor/github.com/google/go-github/v55/github/repos_merging.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_pages.go b/vendor/github.com/google/go-github/v55/github/repos_pages.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_pages.go rename to vendor/github.com/google/go-github/v55/github/repos_pages.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v55/github/repos_prereceive_hooks.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_prereceive_hooks.go rename to vendor/github.com/google/go-github/v55/github/repos_prereceive_hooks.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_projects.go b/vendor/github.com/google/go-github/v55/github/repos_projects.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_projects.go rename to vendor/github.com/google/go-github/v55/github/repos_projects.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_releases.go b/vendor/github.com/google/go-github/v55/github/repos_releases.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_releases.go rename to vendor/github.com/google/go-github/v55/github/repos_releases.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_rules.go b/vendor/github.com/google/go-github/v55/github/repos_rules.go similarity index 91% rename from vendor/github.com/google/go-github/v53/github/repos_rules.go rename to vendor/github.com/google/go-github/v55/github/repos_rules.go index 9299d3e7f..7f964fe66 100644 --- a/vendor/github.com/google/go-github/v53/github/repos_rules.go +++ b/vendor/github.com/google/go-github/v55/github/repos_rules.go @@ -14,8 +14,10 @@ import ( // BypassActor represents the bypass actors from a ruleset. type BypassActor struct { ActorID *int64 `json:"actor_id,omitempty"` - // Possible values for ActorType are: Team, Integration + // Possible values for ActorType are: RepositoryRole, Team, Integration, OrganizationAdmin ActorType *string `json:"actor_type,omitempty"` + // Possible values for BypassMode are: always, pull_request + BypassMode *string `json:"bypass_mode,omitempty"` } // RulesetLink represents a single link object from GitHub ruleset request _links. @@ -34,17 +36,24 @@ type RulesetRefConditionParameters struct { Exclude []string `json:"exclude"` } -// RulesetRepositoryConditionParameters represents the conditions object for repository_names. -type RulesetRepositoryConditionParameters struct { - Include []string `json:"include,omitempty"` - Exclude []string `json:"exclude,omitempty"` +// RulesetRepositoryNamesConditionParameters represents the conditions object for repository_names. +type RulesetRepositoryNamesConditionParameters struct { + Include []string `json:"include"` + Exclude []string `json:"exclude"` Protected *bool `json:"protected,omitempty"` } +// RulesetRepositoryIDsConditionParameters represents the conditions object for repository_ids. +type RulesetRepositoryIDsConditionParameters struct { + RepositoryIDs []int64 `json:"repository_ids,omitempty"` +} + // RulesetCondition represents the conditions object in a ruleset. +// Set either RepositoryName or RepositoryID, not both. type RulesetConditions struct { - RefName *RulesetRefConditionParameters `json:"ref_name,omitempty"` - RepositoryName *RulesetRepositoryConditionParameters `json:"repository_name,omitempty"` + RefName *RulesetRefConditionParameters `json:"ref_name,omitempty"` + RepositoryName *RulesetRepositoryNamesConditionParameters `json:"repository_name,omitempty"` + RepositoryID *RulesetRepositoryIDsConditionParameters `json:"repository_id,omitempty"` } // RulePatternParameters represents the rule pattern parameters. @@ -109,6 +118,10 @@ func (r *RepositoryRule) UnmarshalJSON(data []byte) error { case "creation", "deletion", "required_linear_history", "required_signatures", "non_fast_forward": r.Parameters = nil case "update": + if RepositoryRule.Parameters == nil { + r.Parameters = nil + return nil + } params := UpdateAllowsFetchAndMergeRuleParameters{} if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { return err @@ -118,6 +131,7 @@ func (r *RepositoryRule) UnmarshalJSON(data []byte) error { rawParams := json.RawMessage(bytes) r.Parameters = &rawParams + case "required_deployments": params := RequiredDeploymentEnvironmentsRuleParameters{} if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { @@ -176,13 +190,18 @@ func NewCreationRule() (rule *RepositoryRule) { // NewUpdateRule creates a rule to only allow users with bypass permission to update matching refs. func NewUpdateRule(params *UpdateAllowsFetchAndMergeRuleParameters) (rule *RepositoryRule) { - bytes, _ := json.Marshal(params) + if params != nil { + bytes, _ := json.Marshal(params) - rawParams := json.RawMessage(bytes) + rawParams := json.RawMessage(bytes) + return &RepositoryRule{ + Type: "update", + Parameters: &rawParams, + } + } return &RepositoryRule{ - Type: "update", - Parameters: &rawParams, + Type: "update", } } @@ -312,7 +331,7 @@ func NewTagNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) // Ruleset represents a GitHub ruleset object. type Ruleset struct { - ID int64 `json:"id"` + ID *int64 `json:"id,omitempty"` Name string `json:"name"` // Possible values for Target are branch, tag Target *string `json:"target,omitempty"` @@ -320,9 +339,7 @@ type Ruleset struct { SourceType *string `json:"source_type,omitempty"` Source string `json:"source"` // Possible values for Enforcement are: disabled, active, evaluate - Enforcement string `json:"enforcement"` - // Possible values for BypassMode are: none, repository, organization - BypassMode *string `json:"bypass_mode,omitempty"` + Enforcement string `json:"enforcement"` BypassActors []*BypassActor `json:"bypass_actors,omitempty"` NodeID *string `json:"node_id,omitempty"` Links *RulesetLinks `json:"_links,omitempty"` diff --git a/vendor/github.com/google/go-github/v53/github/repos_stats.go b/vendor/github.com/google/go-github/v55/github/repos_stats.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_stats.go rename to vendor/github.com/google/go-github/v55/github/repos_stats.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_statuses.go b/vendor/github.com/google/go-github/v55/github/repos_statuses.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_statuses.go rename to vendor/github.com/google/go-github/v55/github/repos_statuses.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_tags.go b/vendor/github.com/google/go-github/v55/github/repos_tags.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_tags.go rename to vendor/github.com/google/go-github/v55/github/repos_tags.go diff --git a/vendor/github.com/google/go-github/v53/github/repos_traffic.go b/vendor/github.com/google/go-github/v55/github/repos_traffic.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/repos_traffic.go rename to vendor/github.com/google/go-github/v55/github/repos_traffic.go diff --git a/vendor/github.com/google/go-github/v53/github/scim.go b/vendor/github.com/google/go-github/v55/github/scim.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/scim.go rename to vendor/github.com/google/go-github/v55/github/scim.go diff --git a/vendor/github.com/google/go-github/v53/github/search.go b/vendor/github.com/google/go-github/v55/github/search.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/search.go rename to vendor/github.com/google/go-github/v55/github/search.go diff --git a/vendor/github.com/google/go-github/v53/github/secret_scanning.go b/vendor/github.com/google/go-github/v55/github/secret_scanning.go similarity index 91% rename from vendor/github.com/google/go-github/v53/github/secret_scanning.go rename to vendor/github.com/google/go-github/v55/github/secret_scanning.go index d512560d9..b8295cbf7 100644 --- a/vendor/github.com/google/go-github/v53/github/secret_scanning.go +++ b/vendor/github.com/google/go-github/v55/github/secret_scanning.go @@ -16,17 +16,19 @@ type SecretScanningService service // SecretScanningAlert represents a GitHub secret scanning alert. type SecretScanningAlert struct { - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - URL *string `json:"url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - LocationsURL *string `json:"locations_url,omitempty"` - State *string `json:"state,omitempty"` - Resolution *string `json:"resolution,omitempty"` - ResolvedAt *Timestamp `json:"resolved_at,omitempty"` - ResolvedBy *User `json:"resolved_by,omitempty"` - SecretType *string `json:"secret_type,omitempty"` - Secret *string `json:"secret,omitempty"` + Number *int `json:"number,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + LocationsURL *string `json:"locations_url,omitempty"` + State *string `json:"state,omitempty"` + Resolution *string `json:"resolution,omitempty"` + ResolvedAt *Timestamp `json:"resolved_at,omitempty"` + ResolvedBy *User `json:"resolved_by,omitempty"` + SecretType *string `json:"secret_type,omitempty"` + SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` + Secret *string `json:"secret,omitempty"` + Repository *Repository `json:"repository,omitempty"` } // SecretScanningAlertLocation represents the location for a secret scanning alert. diff --git a/vendor/github.com/google/go-github/v55/github/security_advisories.go b/vendor/github.com/google/go-github/v55/github/security_advisories.go new file mode 100644 index 000000000..a75fce54d --- /dev/null +++ b/vendor/github.com/google/go-github/v55/github/security_advisories.go @@ -0,0 +1,37 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +type SecurityAdvisoriesService service + +// RequestCVE requests a Common Vulnerabilities and Exposures (CVE) for a repository security advisory. +// The ghsaID is the GitHub Security Advisory identifier of the advisory. +// +// GitHub API docs: https://docs.github.com/en/rest/security-advisories/repository-advisories#request-a-cve-for-a-repository-security-advisory +func (s *SecurityAdvisoriesService) RequestCVE(ctx context.Context, owner, repo, ghsaID string) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/security-advisories/%v/cve", owner, repo, ghsaID) + + req, err := s.client.NewRequest("POST", url, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + if _, ok := err.(*AcceptedError); ok { + return resp, nil + } + + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v53/github/strings.go b/vendor/github.com/google/go-github/v55/github/strings.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/strings.go rename to vendor/github.com/google/go-github/v55/github/strings.go diff --git a/vendor/github.com/google/go-github/v53/github/teams.go b/vendor/github.com/google/go-github/v55/github/teams.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/teams.go rename to vendor/github.com/google/go-github/v55/github/teams.go diff --git a/vendor/github.com/google/go-github/v53/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v55/github/teams_discussion_comments.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/teams_discussion_comments.go rename to vendor/github.com/google/go-github/v55/github/teams_discussion_comments.go diff --git a/vendor/github.com/google/go-github/v53/github/teams_discussions.go b/vendor/github.com/google/go-github/v55/github/teams_discussions.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/teams_discussions.go rename to vendor/github.com/google/go-github/v55/github/teams_discussions.go diff --git a/vendor/github.com/google/go-github/v53/github/teams_members.go b/vendor/github.com/google/go-github/v55/github/teams_members.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/teams_members.go rename to vendor/github.com/google/go-github/v55/github/teams_members.go diff --git a/vendor/github.com/google/go-github/v53/github/timestamp.go b/vendor/github.com/google/go-github/v55/github/timestamp.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/timestamp.go rename to vendor/github.com/google/go-github/v55/github/timestamp.go diff --git a/vendor/github.com/google/go-github/v53/github/users.go b/vendor/github.com/google/go-github/v55/github/users.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users.go rename to vendor/github.com/google/go-github/v55/github/users.go diff --git a/vendor/github.com/google/go-github/v53/github/users_administration.go b/vendor/github.com/google/go-github/v55/github/users_administration.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_administration.go rename to vendor/github.com/google/go-github/v55/github/users_administration.go diff --git a/vendor/github.com/google/go-github/v53/github/users_blocking.go b/vendor/github.com/google/go-github/v55/github/users_blocking.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_blocking.go rename to vendor/github.com/google/go-github/v55/github/users_blocking.go diff --git a/vendor/github.com/google/go-github/v53/github/users_emails.go b/vendor/github.com/google/go-github/v55/github/users_emails.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_emails.go rename to vendor/github.com/google/go-github/v55/github/users_emails.go diff --git a/vendor/github.com/google/go-github/v53/github/users_followers.go b/vendor/github.com/google/go-github/v55/github/users_followers.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_followers.go rename to vendor/github.com/google/go-github/v55/github/users_followers.go diff --git a/vendor/github.com/google/go-github/v53/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v55/github/users_gpg_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_gpg_keys.go rename to vendor/github.com/google/go-github/v55/github/users_gpg_keys.go diff --git a/vendor/github.com/google/go-github/v53/github/users_keys.go b/vendor/github.com/google/go-github/v55/github/users_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_keys.go rename to vendor/github.com/google/go-github/v55/github/users_keys.go diff --git a/vendor/github.com/google/go-github/v53/github/users_packages.go b/vendor/github.com/google/go-github/v55/github/users_packages.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_packages.go rename to vendor/github.com/google/go-github/v55/github/users_packages.go diff --git a/vendor/github.com/google/go-github/v53/github/users_projects.go b/vendor/github.com/google/go-github/v55/github/users_projects.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_projects.go rename to vendor/github.com/google/go-github/v55/github/users_projects.go diff --git a/vendor/github.com/google/go-github/v53/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v55/github/users_ssh_signing_keys.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/users_ssh_signing_keys.go rename to vendor/github.com/google/go-github/v55/github/users_ssh_signing_keys.go diff --git a/vendor/github.com/google/go-github/v53/github/with_appengine.go b/vendor/github.com/google/go-github/v55/github/with_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/with_appengine.go rename to vendor/github.com/google/go-github/v55/github/with_appengine.go diff --git a/vendor/github.com/google/go-github/v53/github/without_appengine.go b/vendor/github.com/google/go-github/v55/github/without_appengine.go similarity index 100% rename from vendor/github.com/google/go-github/v53/github/without_appengine.go rename to vendor/github.com/google/go-github/v55/github/without_appengine.go diff --git a/vendor/github.com/google/go-github/v56/AUTHORS b/vendor/github.com/google/go-github/v56/AUTHORS new file mode 100644 index 000000000..74a21dc60 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/AUTHORS @@ -0,0 +1,487 @@ +# This is the official list of go-github authors for copyright purposes. +# +# This does not necessarily list everyone who has contributed code, since in +# some cases, their employer may be the copyright holder. To see the full list +# of contributors, see the revision history in source control or +# https://github.com/google/go-github/graphs/contributors. +# +# Authors who wish to be recognized in this file should add themselves (or +# their employer, as appropriate). + +178inaba +2BFL +413x +6543 <6543@obermui.de> +Abed Kibbe +Abhinav Gupta +Abhishek Veeramalla +aboy +Adam Kohring +adrienzieba +afdesk +Ahmad Nurus S +Ahmed Hagy +Aidan +Aidan Steele +Ainsley Chong +ajz01 +Akeda Bagus +Akhil Mohan +Alec Thomas +Aleks Clark +Alex Bramley +Alex Ellis +Alex Orr +Alex Su +Alex Unger +Alexander Harkness +Alexis Gauthiez +Ali Farooq +Allan Guwatudde +Allen Sun +Amey Sakhadeo +Anders Janmyr +Andreas Garnæs +Andrew Ryabchun +Andrew Svoboda +Andy Grunwald +Andy Hume +Andy Lindeman +angie pinilla +anjanashenoy +Anshuman Bhartiya +Antoine +Antoine Pelisse +Anton Nguyen +Anubha Kushwaha +appilon +aprp +apurwaj2 +Aravind +Arda Kuyumcu +Arıl Bozoluk +Asier Marruedo +Austin Burdine +Austin Dizzy +Azuka Okuleye +Ben Batha +Benjamen Keroack +Berkay Tacyildiz +Beshr Kayali +Beyang Liu +Billy Keyes +Billy Lynch +Bingtan Lu +Bjorn Neergaard +Björn Häuser +Bo Huang +boljen +Bracken +Brad Harris +Brad Moylan +Bradley Falzon +Bradley McAllister +Brandon Butler +Brandon Cook +Brandon Stubbs +Brett Kuhlman +Brett Logan +Brian Egizi +Bryan Boreham +Bryan Peterson +Cami Diez +Carl Johnson +Carlos Alexandro Becker +Carlos Tadeu Panato Junior +ChandanChainani +chandresh-pancholi +Charles Fenwick Elliott +Charlie Yan +Chmouel Boudjnah +Chris King +Chris Mc +Chris Raborg +Chris Roche +Chris Schaefer +chrisforrette +Christian Bargmann +Christian Muehlhaeuser +Christoph Jerolimov +Christoph Sassenberg +CI Monk +Colin Misare +Craig Gumbley +Craig Peterson +Cristian Maglie +Cyb3r Jak3 +Daehyeok Mun +Dalton Hubble +Daniel Lanner +Daniel Leavitt +Daniel Nilsson +Daoq +Dave Du Cros +Dave Henderson +Dave Perrett +Dave Protasowski +David Deng +David Gamba +David J. M. Karlsen +David Jannotta +David Ji +David Lopez Reyes +Davide Zipeto +Dennis Webb +Derek Jobst +DeviousLab +Dhi Aurrahman +Diego Lapiduz +Diogo Vilela +Dmitri Shuralyov +dmnlk +Don Petersen +Doug Turner +Drew Fradette +Dustin Deus +Dustin Lish +Eivind +Eli Uriegas +Elliott Beach +Emerson Wood +Emil V +Eng Zer Jun +eperm +Erick Fejta +Erik Nobel +erwinvaneyk +Evan Anderson +Evan Elias +Fabian Holler +Fabrice +Fatema-Moaiyadi +Federico Di Pierro +Felix Geisendörfer +Filippo Valsorda +Florian Forster +Florian Wagner +Francesc Gil +Francis +Francisco Guimarães +François de Metz +Fredrik Jönsson +Gabriel +Gal Ofri +Garrett Squire +George Kontridze +Georgy Buranov +Glen Mailer +Gnahz +Google Inc. +Grachev Mikhail +griffin_stewie +guangwu +Guillaume Jacquet +Guz Alexander +Guðmundur Bjarni Ólafsson +Hanno Hecker +Hari haran +Harikesh00 +haya14busa +haya14busa +Hiroki Ito +Hubot Jr +Huy Tr +huydx +i2bskn +Iain Steers +Ikko Ashimine +Ilia Choly +Ioannis Georgoulas +Isao Jonas +ishan upadhyay +isqua +Jacob Valdemar +Jake Krammer +Jake White +Jameel Haffejee +James Bowes +James Cockbain +James Loh +James Maguire +James Turley +Jamie West +Jan Kosecki +Jan Švábík +Jason Field +Javier Campanini +Jef LeCompte +Jeff Wenzbauer +Jens Rantil +Jeremy Morris +Jesse Haka +Jesse Newland +Jihoon Chung +Jille Timmermans +Jimmi Dyson +Joan Saum +Joe Tsai +John Barton +John Engelman +John Jones +John Liu +Jordan Brockopp +Jordan Burandt +Jordan Sussman +Jorge Gómez Reus +Joshua Bezaleel Abednego +João Cerqueira +JP Phillips +jpbelanger-mtl +Juan +Juan Basso +Julien Garcia Gonzalez +Julien Rostand +Junya Kono +Justin Abrahms +Justin Toh +Jusung Lee +jzhoucliqr +k0ral +k1rnt +kadern0 +Karthik Sundari +Katrina Owen +Kautilya Tripathi +Keita Urashima +Kevin Burke +Kevin Wang +Kevin Zhao +kgalli +Kirill +Konrad Malawski +Kookheon Kwon +Krishna Indani +Krzysztof Kowalczyk +Kshitij Saraogi +Kumar Saurabh +Kyle Kurz +kyokomi +Lars Lehtonen +Laurent Verdoïa +leopoldwang +Liam Galvin +Lluis Campos +Lovro Mažgon +Loïs Postula +Luca Campese +Lucas Alcantara +Lucas Martin-King +Luis Davim +Luke Evers +Luke Hinds +Luke Kysow +Luke Roberts +Luke Young +lynn [they] +Magnus Kulke +Maksim Zhylinski +Marc Binder +Marcelo Carlos +Mark Tareshawty +Martin Holman +Martin-Louis Bright +Martins Sipenko +Marwan Sulaiman +Masayuki Izumi +Mat Geist +Matija Horvat +Matin Rahmanian +Matt +Matt Brender +Matt Dainty +Matt Gaunt +Matt Landis +Matt Moore +Matt Simons +Maxime Bury +Michael Meng +Michael Spiegel +Michael Tiller +Michał Glapa +Michelangelo Morrillo +Miguel Elias dos Santos +Mike Chen +mohammad ali <2018cs92@student.uet.edu.pk> +Mohammed AlDujaili +Mukundan Senthil +Munia Balayil +Mustafa Abban +Nadav Kaner +Naoki Kanatani +Nathan VanBenschoten +Navaneeth Suresh +Neal Caffery +Neil O'Toole +Nick Miyake +Nick Platt +Nick Spragg +Nicolas Chapurlat +Nikhita Raghunath +Nilesh Singh +Noah Hanjun Lee +Noah Zoschke +ns-cweber +nxya +Ole Orhagen +Oleg Kovalov +Ondřej Kupka +Ori Talmor +Osama Faqhruldin +oslowalk +Pablo Pérez Schröder +Palash Nigam +Panagiotis Moustafellos +Parham Alvani +pari-27 +Parker Moore +parkhyukjun89 +Pat Alwell +Patrick DeVivo +Patrick Marabeas +Patrik Nordlén +Pavel Dvoinos +Pavel Shtanko +Pete Wagner +Petr Shevtsov +Pierce McEntagart +Pierre Carrier +Piotr Zurek +Piyush Chugh +Pratik Mallya +Qais Patankar +Quang Le Hong +Quentin Leffray +Quinn Slack +Rackspace US, Inc. +Radek Simko +Radliński Ignacy +Rafael Aramizu Gomes +Rajat Jindal +Rajendra arora +Rajkumar +Ranbir Singh +Ravi Shekhar Jethani +RaviTeja Pothana +rc1140 +Red Hat, Inc. +Reetuparna Mukherjee +reeves122 +Reinier Timmer +Renjith R +Ricco Førgaard +Richard de Vries +Rob Figueiredo +Rohit Upadhyay +Rojan Dinc +Ronak Jain +Ronan Pelliard +Ross Gustafson +Ruben Vereecken +Russell Boley +Ryan Leung +Ryan Lower +Ryo Nakao +Saaarah +Safwan Olaimat +Sahil Dua +Sai Ravi Teja Chintakrindi +saisi +Sam Minnée +Sandeep Sukhani +Sander Knape +Sander van Harmelen +Sanket Payghan +Sarah Funkhouser +Sarasa Kisaragi +Sasha Melentyev +Sean Wang +Sebastian Mandrean +Sebastian Mæland Pedersen +Sergei Popinevskii +Sergey Romanov +Sergio Garcia +Seth Vargo +Sevki +Shagun Khemka +shakeelrao +Shawn Catanzarite +Shawn Smith +Shibasis Patel +Sho Okada +Shrikrishna Singh +Simon Davis +sona-tar +soniachikh +SoundCloud, Ltd. +Sridhar Mocherla +SriVignessh Pss +Stefan Sedich +Steve Teuber +Stian Eikeland +Suhaib Mujahid +sushmita wable +Szymon Kodrebski +Søren Hansen +T.J. Corrigan +Takashi Yoneuchi +Takayuki Watanabe +Taketoshi Fujiwara +Taketoshi Fujiwara +Takuma Kajikawa +Tasya Aditya Rukmana +Theo Henson +Theofilos Petsios +Thomas Aidan Curran +Thomas Bruyelle +Tim Rogers +Timothy O'Brien +Timothée Peignier +Tingluo Huang +tkhandel +Tobias Gesellchen +Tom Payne +Trey Tacon +tsbkw +ttacon +Vaibhav Singh +Varadarajan Aravamudhan +Victor Castell +Victor Vrantchan +Victory Osikwemhe +vikkyomkar +Vivek +Vlad Ungureanu +Wasim Thabraze +Weslei Juan Moser Pereira +Wheeler Law +Will Maier +Will Norris +Willem D'Haeseleer +William Bailey +William Cooke +Xabi +xibz +Yann Malet +Yannick Utard +Yarden Shoham +Yicheng Qin +Yosuke Akatsuka +Yumikiyo Osanai +Yurii Soldak +Yusef Mohamadi +Yusuke Kuoka +Zach Latta +zhouhaibing089 +六开箱 +缘生 +蒋航 diff --git a/vendor/github.com/google/go-github/v56/LICENSE b/vendor/github.com/google/go-github/v56/LICENSE new file mode 100644 index 000000000..28b6486f0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013 The go-github AUTHORS. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/google/go-github/v56/github/actions.go b/vendor/github.com/google/go-github/v56/github/actions.go new file mode 100644 index 000000000..8d552f2d0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions.go @@ -0,0 +1,12 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// ActionsService handles communication with the actions related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/ +type ActionsService service diff --git a/vendor/github.com/google/go-github/v56/github/actions_artifacts.go b/vendor/github.com/google/go-github/v56/github/actions_artifacts.go new file mode 100644 index 000000000..6389268b4 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_artifacts.go @@ -0,0 +1,157 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "net/url" +) + +// ArtifactWorkflowRun represents a GitHub artifact's workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts +type ArtifactWorkflowRun struct { + ID *int64 `json:"id,omitempty"` + RepositoryID *int64 `json:"repository_id,omitempty"` + HeadRepositoryID *int64 `json:"head_repository_id,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` +} + +// Artifact represents a GitHub artifact. Artifacts allow sharing +// data between jobs in a workflow and provide storage for data +// once a workflow is complete. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts +type Artifact struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + SizeInBytes *int64 `json:"size_in_bytes,omitempty"` + URL *string `json:"url,omitempty"` + ArchiveDownloadURL *string `json:"archive_download_url,omitempty"` + Expired *bool `json:"expired,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ExpiresAt *Timestamp `json:"expires_at,omitempty"` + WorkflowRun *ArtifactWorkflowRun `json:"workflow_run,omitempty"` +} + +// ArtifactList represents a list of GitHub artifacts. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#artifacts +type ArtifactList struct { + TotalCount *int64 `json:"total_count,omitempty"` + Artifacts []*Artifact `json:"artifacts,omitempty"` +} + +// ListArtifacts lists all artifacts that belong to a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#list-artifacts-for-a-repository +func (s *ActionsService) ListArtifacts(ctx context.Context, owner, repo string, opts *ListOptions) (*ArtifactList, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/artifacts", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + artifactList := new(ArtifactList) + resp, err := s.client.Do(ctx, req, artifactList) + if err != nil { + return nil, resp, err + } + + return artifactList, resp, nil +} + +// ListWorkflowRunArtifacts lists all artifacts that belong to a workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#list-workflow-run-artifacts +func (s *ActionsService) ListWorkflowRunArtifacts(ctx context.Context, owner, repo string, runID int64, opts *ListOptions) (*ArtifactList, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/artifacts", owner, repo, runID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + artifactList := new(ArtifactList) + resp, err := s.client.Do(ctx, req, artifactList) + if err != nil { + return nil, resp, err + } + + return artifactList, resp, nil +} + +// GetArtifact gets a specific artifact for a workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#get-an-artifact +func (s *ActionsService) GetArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Artifact, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + artifact := new(Artifact) + resp, err := s.client.Do(ctx, req, artifact) + if err != nil { + return nil, resp, err + } + + return artifact, resp, nil +} + +// DownloadArtifact gets a redirect URL to download an archive for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#download-an-artifact +func (s *ActionsService) DownloadArtifact(ctx context.Context, owner, repo string, artifactID int64, maxRedirects int) (*url.URL, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v/zip", owner, repo, artifactID) + + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusFound { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + parsedURL, err := url.Parse(resp.Header.Get("Location")) + if err != nil { + return nil, newResponse(resp), err + } + + return parsedURL, newResponse(resp), nil +} + +// DeleteArtifact deletes a workflow run artifact. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/artifacts#delete-an-artifact +func (s *ActionsService) DeleteArtifact(ctx context.Context, owner, repo string, artifactID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/artifacts/%v", owner, repo, artifactID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_cache.go b/vendor/github.com/google/go-github/v56/github/actions_cache.go new file mode 100644 index 000000000..9592d9ab6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_cache.go @@ -0,0 +1,235 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ActionsCache represents a GitHub action cache. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#about-the-cache-api +type ActionsCache struct { + ID *int64 `json:"id,omitempty" url:"-"` + Ref *string `json:"ref,omitempty" url:"ref"` + Key *string `json:"key,omitempty" url:"key"` + Version *string `json:"version,omitempty" url:"-"` + LastAccessedAt *Timestamp `json:"last_accessed_at,omitempty" url:"-"` + CreatedAt *Timestamp `json:"created_at,omitempty" url:"-"` + SizeInBytes *int64 `json:"size_in_bytes,omitempty" url:"-"` +} + +// ActionsCacheList represents a list of GitHub actions Cache. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#list-github-actions-caches-for-a-repository +type ActionsCacheList struct { + TotalCount int `json:"total_count"` + ActionsCaches []*ActionsCache `json:"actions_caches,omitempty"` +} + +// ActionsCacheUsage represents a GitHub Actions Cache Usage object. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-a-repository +type ActionsCacheUsage struct { + FullName string `json:"full_name"` + ActiveCachesSizeInBytes int64 `json:"active_caches_size_in_bytes"` + ActiveCachesCount int `json:"active_caches_count"` +} + +// ActionsCacheUsageList represents a list of repositories with GitHub Actions cache usage for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-a-repository +type ActionsCacheUsageList struct { + TotalCount int `json:"total_count"` + RepoCacheUsage []*ActionsCacheUsage `json:"repository_cache_usages,omitempty"` +} + +// TotalCacheUsage represents total GitHub actions cache usage of an organization or enterprise. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-an-enterprise +type TotalCacheUsage struct { + TotalActiveCachesUsageSizeInBytes int64 `json:"total_active_caches_size_in_bytes"` + TotalActiveCachesCount int `json:"total_active_caches_count"` +} + +// ActionsCacheListOptions represents a list of all possible optional Query parameters for ListCaches method. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#list-github-actions-caches-for-a-repository +type ActionsCacheListOptions struct { + ListOptions + // The Git reference for the results you want to list. + // The ref for a branch can be formatted either as refs/heads/ + // or simply . To reference a pull request use refs/pull//merge + Ref *string `url:"ref,omitempty"` + Key *string `url:"key,omitempty"` + // Can be one of: "created_at", "last_accessed_at", "size_in_bytes". Default: "last_accessed_at" + Sort *string `url:"sort,omitempty"` + // Can be one of: "asc", "desc" Default: desc + Direction *string `url:"direction,omitempty"` +} + +// ListCaches lists the GitHub Actions caches for a repository. +// You must authenticate using an access token with the repo scope to use this endpoint. +// +// Permissions: must have the actions:read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#list-github-actions-caches-for-a-repository +func (s *ActionsService) ListCaches(ctx context.Context, owner, repo string, opts *ActionsCacheListOptions) (*ActionsCacheList, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionCacheList := new(ActionsCacheList) + resp, err := s.client.Do(ctx, req, actionCacheList) + if err != nil { + return nil, resp, err + } + + return actionCacheList, resp, nil +} + +// DeleteCachesByKey deletes one or more GitHub Actions caches for a repository, using a complete cache key. +// By default, all caches that match the provided key are deleted, but you can optionally provide +// a Git ref to restrict deletions to caches that match both the provided key and the Git ref. +// The ref for a branch can be formatted either as "refs/heads/" or simply "". +// To reference a pull request use "refs/pull//merge". If you don't want to use ref just pass nil in parameter. +// +// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-github-actions-caches-for-a-repository-using-a-cache-key +func (s *ActionsService) DeleteCachesByKey(ctx context.Context, owner, repo, key string, ref *string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/caches", owner, repo) + u, err := addOptions(u, ActionsCache{Key: &key, Ref: ref}) + if err != nil { + return nil, err + } + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteCachesByID deletes a GitHub Actions cache for a repository, using a cache ID. +// +// Permissions: You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have the actions:write permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id +func (s *ActionsService) DeleteCachesByID(ctx context.Context, owner, repo string, cacheID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/caches/%v", owner, repo, cacheID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetCacheUsageForRepo gets GitHub Actions cache usage for a repository. The data fetched using this API is refreshed approximately every 5 minutes, +// so values returned from this endpoint may take at least 5 minutes to get updated. +// +// Permissions: Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an +// access token with the repo scope. GitHub Apps must have the actions:read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-a-repository +func (s *ActionsService) GetCacheUsageForRepo(ctx context.Context, owner, repo string) (*ActionsCacheUsage, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/cache/usage", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + cacheUsage := new(ActionsCacheUsage) + res, err := s.client.Do(ctx, req, cacheUsage) + if err != nil { + return nil, res, err + } + + return cacheUsage, res, err +} + +// ListCacheUsageByRepoForOrg lists repositories and their GitHub Actions cache usage for an organization. The data fetched using this API is +// refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. +// +// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. +// GitHub Apps must have the organization_admistration:read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#list-repositories-with-github-actions-cache-usage-for-an-organization +func (s *ActionsService) ListCacheUsageByRepoForOrg(ctx context.Context, org string, opts *ListOptions) (*ActionsCacheUsageList, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/cache/usage-by-repository", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + cacheUsage := new(ActionsCacheUsageList) + res, err := s.client.Do(ctx, req, cacheUsage) + if err != nil { + return nil, res, err + } + + return cacheUsage, res, err +} + +// GetTotalCacheUsageForOrg gets the total GitHub Actions cache usage for an organization. The data fetched using this API is refreshed approximately every +// 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. +// +// Permissions: You must authenticate using an access token with the read:org scope to use this endpoint. +// GitHub Apps must have the organization_admistration:read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-an-organization +func (s *ActionsService) GetTotalCacheUsageForOrg(ctx context.Context, org string) (*TotalCacheUsage, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/cache/usage", org) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + cacheUsage := new(TotalCacheUsage) + res, err := s.client.Do(ctx, req, cacheUsage) + if err != nil { + return nil, res, err + } + + return cacheUsage, res, err +} + +// GetTotalCacheUsageForEnterprise gets the total GitHub Actions cache usage for an enterprise. The data fetched using this API is refreshed approximately every 5 minutes, +// so values returned from this endpoint may take at least 5 minutes to get updated. +// +// Permissions: You must authenticate using an access token with the "admin:enterprise" scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#get-github-actions-cache-usage-for-an-enterprise +func (s *ActionsService) GetTotalCacheUsageForEnterprise(ctx context.Context, enterprise string) (*TotalCacheUsage, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/cache/usage", enterprise) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + cacheUsage := new(TotalCacheUsage) + res, err := s.client.Do(ctx, req, cacheUsage) + if err != nil { + return nil, res, err + } + + return cacheUsage, res, err +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_oidc.go b/vendor/github.com/google/go-github/v56/github/actions_oidc.go new file mode 100644 index 000000000..b7f2d26ae --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_oidc.go @@ -0,0 +1,73 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// OIDCSubjectClaimCustomTemplate represents an OIDC subject claim customization template. +type OIDCSubjectClaimCustomTemplate struct { + UseDefault *bool `json:"use_default,omitempty"` + IncludeClaimKeys []string `json:"include_claim_keys,omitempty"` +} + +// GetOrgOIDCSubjectClaimCustomTemplate gets the subject claim customization template for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization +func (s *ActionsService) GetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) + return s.getOIDCSubjectClaimCustomTemplate(ctx, u) +} + +// GetRepoOIDCSubjectClaimCustomTemplate gets the subject claim customization template for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-a-repository +func (s *ActionsService) GetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) + return s.getOIDCSubjectClaimCustomTemplate(ctx, u) +} + +func (s *ActionsService) getOIDCSubjectClaimCustomTemplate(ctx context.Context, url string) (*OIDCSubjectClaimCustomTemplate, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + tmpl := new(OIDCSubjectClaimCustomTemplate) + resp, err := s.client.Do(ctx, req, tmpl) + if err != nil { + return nil, resp, err + } + + return tmpl, resp, nil +} + +// SetOrgOIDCSubjectClaimCustomTemplate sets the subject claim customization for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization +func (s *ActionsService) SetOrgOIDCSubjectClaimCustomTemplate(ctx context.Context, org string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/oidc/customization/sub", org) + return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) +} + +// SetRepoOIDCSubjectClaimCustomTemplate sets the subject claim customization for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-a-repository +func (s *ActionsService) SetRepoOIDCSubjectClaimCustomTemplate(ctx context.Context, owner, repo string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/oidc/customization/sub", owner, repo) + return s.setOIDCSubjectClaimCustomTemplate(ctx, u, template) +} + +func (s *ActionsService) setOIDCSubjectClaimCustomTemplate(ctx context.Context, url string, template *OIDCSubjectClaimCustomTemplate) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, template) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_permissions_enterprise.go b/vendor/github.com/google/go-github/v56/github/actions_permissions_enterprise.go new file mode 100644 index 000000000..8311e7294 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_permissions_enterprise.go @@ -0,0 +1,191 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ActionsEnabledOnEnterpriseOrgs represents all the repositories in an enterprise for which Actions is enabled. +type ActionsEnabledOnEnterpriseRepos struct { + TotalCount int `json:"total_count"` + Organizations []*Organization `json:"organizations"` +} + +// ActionsPermissionsEnterprise represents a policy for allowed actions in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions +type ActionsPermissionsEnterprise struct { + EnabledOrganizations *string `json:"enabled_organizations,omitempty"` + AllowedActions *string `json:"allowed_actions,omitempty"` + SelectedActionsURL *string `json:"selected_actions_url,omitempty"` +} + +func (a ActionsPermissionsEnterprise) String() string { + return Stringify(a) +} + +// GetActionsPermissionsInEnterprise gets the GitHub Actions permissions policy for an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-github-actions-permissions-for-an-enterprise +func (s *ActionsService) GetActionsPermissionsInEnterprise(ctx context.Context, enterprise string) (*ActionsPermissionsEnterprise, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + permissions := new(ActionsPermissionsEnterprise) + resp, err := s.client.Do(ctx, req, permissions) + if err != nil { + return nil, resp, err + } + + return permissions, resp, nil +} + +// EditActionsPermissionsInEnterprise sets the permissions policy in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-github-actions-permissions-for-an-enterprise +func (s *ActionsService) EditActionsPermissionsInEnterprise(ctx context.Context, enterprise string, actionsPermissionsEnterprise ActionsPermissionsEnterprise) (*ActionsPermissionsEnterprise, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions", enterprise) + req, err := s.client.NewRequest("PUT", u, actionsPermissionsEnterprise) + if err != nil { + return nil, nil, err + } + + p := new(ActionsPermissionsEnterprise) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// ListEnabledOrgsInEnterprise lists the selected organizations that are enabled for GitHub Actions in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise +func (s *ActionsService) ListEnabledOrgsInEnterprise(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnEnterpriseRepos, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + orgs := &ActionsEnabledOnEnterpriseRepos{} + resp, err := s.client.Do(ctx, req, orgs) + if err != nil { + return nil, resp, err + } + + return orgs, resp, nil +} + +// SetEnabledOrgsInEnterprise replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-selected-organizations-enabled-for-github-actions-in-an-enterprise +func (s *ActionsService) SetEnabledOrgsInEnterprise(ctx context.Context, owner string, organizationIDs []int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations", owner) + + req, err := s.client.NewRequest("PUT", u, struct { + IDs []int64 `json:"selected_organization_ids"` + }{IDs: organizationIDs}) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// AddEnabledOrgInEnterprise adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#enable-a-selected-organization-for-github-actions-in-an-enterprise +func (s *ActionsService) AddEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// RemoveEnabledOrgInEnterprise removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#disable-a-selected-organization-for-github-actions-in-an-enterprise +func (s *ActionsService) RemoveEnabledOrgInEnterprise(ctx context.Context, owner string, organizationID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/organizations/%v", owner, organizationID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// GetActionsAllowedInEnterprise gets the actions that are allowed in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-enterprise +func (s *ActionsService) GetActionsAllowedInEnterprise(ctx context.Context, enterprise string) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionsAllowed := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, actionsAllowed) + if err != nil { + return nil, resp, err + } + + return actionsAllowed, resp, nil +} + +// EditActionsAllowedInEnterprise sets the actions that are allowed in an enterprise. +// +// GitHub API docs: https://docs.github.com/enterprise-cloud@latest/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-enterprise +func (s *ActionsService) EditActionsAllowedInEnterprise(ctx context.Context, enterprise string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/permissions/selected-actions", enterprise) + req, err := s.client.NewRequest("PUT", u, actionsAllowed) + if err != nil { + return nil, nil, err + } + + p := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_permissions_orgs.go b/vendor/github.com/google/go-github/v56/github/actions_permissions_orgs.go new file mode 100644 index 000000000..801f1d974 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_permissions_orgs.go @@ -0,0 +1,204 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ActionsPermissions represents a policy for repositories and allowed actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions +type ActionsPermissions struct { + EnabledRepositories *string `json:"enabled_repositories,omitempty"` + AllowedActions *string `json:"allowed_actions,omitempty"` + SelectedActionsURL *string `json:"selected_actions_url,omitempty"` +} + +func (a ActionsPermissions) String() string { + return Stringify(a) +} + +// ActionsEnabledOnOrgRepos represents all the repositories in an organization for which Actions is enabled. +type ActionsEnabledOnOrgRepos struct { + TotalCount int `json:"total_count"` + Repositories []*Repository `json:"repositories"` +} + +// ActionsAllowed represents selected actions that are allowed. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions +type ActionsAllowed struct { + GithubOwnedAllowed *bool `json:"github_owned_allowed,omitempty"` + VerifiedAllowed *bool `json:"verified_allowed,omitempty"` + PatternsAllowed []string `json:"patterns_allowed,omitempty"` +} + +func (a ActionsAllowed) String() string { + return Stringify(a) +} + +// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-github-actions-permissions-for-an-organization +func (s *ActionsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions", org) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + permissions := new(ActionsPermissions) + resp, err := s.client.Do(ctx, req, permissions) + if err != nil { + return nil, resp, err + } + + return permissions, resp, nil +} + +// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-github-actions-permissions-for-an-organization +func (s *ActionsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions", org) + req, err := s.client.NewRequest("PUT", u, actionsPermissions) + if err != nil { + return nil, nil, err + } + + p := new(ActionsPermissions) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// ListEnabledReposInOrg lists the selected repositories that are enabled for GitHub Actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#list-selected-repositories-enabled-for-github-actions-in-an-organization +func (s *ActionsService) ListEnabledReposInOrg(ctx context.Context, owner string, opts *ListOptions) (*ActionsEnabledOnOrgRepos, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + repos := &ActionsEnabledOnOrgRepos{} + resp, err := s.client.Do(ctx, req, repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// SetEnabledReposInOrg replaces the list of selected repositories that are enabled for GitHub Actions in an organization.. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-selected-repositories-enabled-for-github-actions-in-an-organization +func (s *ActionsService) SetEnabledReposInOrg(ctx context.Context, owner string, repositoryIDs []int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/repositories", owner) + + req, err := s.client.NewRequest("PUT", u, struct { + IDs []int64 `json:"selected_repository_ids"` + }{IDs: repositoryIDs}) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// AddEnabledReposInOrg adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#enable-a-selected-repository-for-github-actions-in-an-organization +func (s *ActionsService) AddEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// RemoveEnabledRepoInOrg removes a single repository from the list of enabled repos for GitHub Actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#disable-a-selected-repository-for-github-actions-in-an-organization +func (s *ActionsService) RemoveEnabledReposInOrg(ctx context.Context, owner string, repositoryID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/repositories/%v", owner, repositoryID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// GetActionsAllowed gets the actions that are allowed in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization +func (s *ActionsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionsAllowed := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, actionsAllowed) + if err != nil { + return nil, resp, err + } + + return actionsAllowed, resp, nil +} + +// EditActionsAllowed sets the actions that are allowed in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization +func (s *ActionsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/permissions/selected-actions", org) + req, err := s.client.NewRequest("PUT", u, actionsAllowed) + if err != nil { + return nil, nil, err + } + + p := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_required_workflows.go b/vendor/github.com/google/go-github/v56/github/actions_required_workflows.go new file mode 100644 index 000000000..3566eb9d2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_required_workflows.go @@ -0,0 +1,247 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// OrgRequiredWorkflow represents a required workflow object at the org level. +type OrgRequiredWorkflow struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + Scope *string `json:"scope,omitempty"` + Ref *string `json:"ref,omitempty"` + State *string `json:"state,omitempty"` + SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Repository *Repository `json:"repository,omitempty"` +} + +// OrgRequiredWorkflows represents the required workflows for the org. +type OrgRequiredWorkflows struct { + TotalCount *int `json:"total_count,omitempty"` + RequiredWorkflows []*OrgRequiredWorkflow `json:"required_workflows,omitempty"` +} + +// CreateUpdateRequiredWorkflowOptions represents the input object used to create or update required workflows. +type CreateUpdateRequiredWorkflowOptions struct { + WorkflowFilePath *string `json:"workflow_file_path,omitempty"` + RepositoryID *int64 `json:"repository_id,omitempty"` + Scope *string `json:"scope,omitempty"` + SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` +} + +// RequiredWorkflowSelectedRepos represents the repos that a required workflow is applied to. +type RequiredWorkflowSelectedRepos struct { + TotalCount *int `json:"total_count,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +// RepoRequiredWorkflow represents a required workflow object at the repo level. +type RepoRequiredWorkflow struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + State *string `json:"state,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + BadgeURL *string `json:"badge_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + SourceRepository *Repository `json:"source_repository,omitempty"` +} + +// RepoRequiredWorkflows represents the required workflows for a repo. +type RepoRequiredWorkflows struct { + TotalCount *int `json:"total_count,omitempty"` + RequiredWorkflows []*RepoRequiredWorkflow `json:"required_workflows,omitempty"` +} + +// ListOrgRequiredWorkflows lists the RequiredWorkflows for an org. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#list-required-workflows +func (s *ActionsService) ListOrgRequiredWorkflows(ctx context.Context, org string, opts *ListOptions) (*OrgRequiredWorkflows, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + requiredWorkflows := new(OrgRequiredWorkflows) + resp, err := s.client.Do(ctx, req, &requiredWorkflows) + if err != nil { + return nil, resp, err + } + + return requiredWorkflows, resp, nil +} + +// CreateRequiredWorkflow creates the required workflow in an org. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#create-a-required-workflow +func (s *ActionsService) CreateRequiredWorkflow(ctx context.Context, org string, createRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows", org) + req, err := s.client.NewRequest("POST", url, createRequiredWorkflowOptions) + if err != nil { + return nil, nil, err + } + + orgRequiredWorkflow := new(OrgRequiredWorkflow) + resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) + if err != nil { + return nil, resp, err + } + + return orgRequiredWorkflow, resp, nil +} + +// GetRequiredWorkflowByID get the RequiredWorkflows for an org by its ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#list-required-workflows +func (s *ActionsService) GetRequiredWorkflowByID(ctx context.Context, owner string, requiredWorkflowID int64) (*OrgRequiredWorkflow, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", owner, requiredWorkflowID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + requiredWorkflow := new(OrgRequiredWorkflow) + resp, err := s.client.Do(ctx, req, &requiredWorkflow) + if err != nil { + return nil, resp, err + } + + return requiredWorkflow, resp, nil +} + +// UpdateRequiredWorkflow updates a required workflow in an org. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#update-a-required-workflow +func (s *ActionsService) UpdateRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64, updateRequiredWorkflowOptions *CreateUpdateRequiredWorkflowOptions) (*OrgRequiredWorkflow, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) + req, err := s.client.NewRequest("PATCH", url, updateRequiredWorkflowOptions) + if err != nil { + return nil, nil, err + } + + orgRequiredWorkflow := new(OrgRequiredWorkflow) + resp, err := s.client.Do(ctx, req, orgRequiredWorkflow) + if err != nil { + return nil, resp, err + } + + return orgRequiredWorkflow, resp, nil +} + +// DeleteRequiredWorkflow deletes a required workflow in an org. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#update-a-required-workflow +func (s *ActionsService) DeleteRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID int64) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v", org, requiredWorkflowID) + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// ListRequiredWorkflowSelectedRepos lists the Repositories selected for a workflow. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#list-selected-repositories-for-a-required-workflow +func (s *ActionsService) ListRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, opts *ListOptions) (*RequiredWorkflowSelectedRepos, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + requiredWorkflowRepos := new(RequiredWorkflowSelectedRepos) + resp, err := s.client.Do(ctx, req, &requiredWorkflowRepos) + if err != nil { + return nil, resp, err + } + + return requiredWorkflowRepos, resp, nil +} + +// SetRequiredWorkflowSelectedRepos sets the Repositories selected for a workflow. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#sets-repositories-for-a-required-workflow +func (s *ActionsService) SetRequiredWorkflowSelectedRepos(ctx context.Context, org string, requiredWorkflowID int64, ids SelectedRepoIDs) (*Response, error) { + type repoIDs struct { + SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` + } + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories", org, requiredWorkflowID) + req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddRepoToRequiredWorkflow adds the Repository to a required workflow. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#add-a-repository-to-a-required-workflow +func (s *ActionsService) AddRepoToRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// RemoveRepoFromRequiredWorkflow removes the Repository from a required workflow. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#add-a-repository-to-a-required-workflow +func (s *ActionsService) RemoveRepoFromRequiredWorkflow(ctx context.Context, org string, requiredWorkflowID, repoID int64) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/required_workflows/%v/repositories/%v", org, requiredWorkflowID, repoID) + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// ListRepoRequiredWorkflows lists the RequiredWorkflows for a repo. +// +// Github API docs:https://docs.github.com/en/rest/actions/required-workflows?apiVersion=2022-11-28#list-repository-required-workflows +func (s *ActionsService) ListRepoRequiredWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*RepoRequiredWorkflows, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/required_workflows", owner, repo) + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + requiredWorkflows := new(RepoRequiredWorkflows) + resp, err := s.client.Do(ctx, req, &requiredWorkflows) + if err != nil { + return nil, resp, err + } + + return requiredWorkflows, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_runner_groups.go b/vendor/github.com/google/go-github/v56/github/actions_runner_groups.go new file mode 100644 index 000000000..00b9b6ce0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_runner_groups.go @@ -0,0 +1,311 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RunnerGroup represents a self-hosted runner group configured in an organization. +type RunnerGroup struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + Default *bool `json:"default,omitempty"` + SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` + RunnersURL *string `json:"runners_url,omitempty"` + Inherited *bool `json:"inherited,omitempty"` + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + SelectedWorkflows []string `json:"selected_workflows,omitempty"` + WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` +} + +// RunnerGroups represents a collection of self-hosted runner groups configured for an organization. +type RunnerGroups struct { + TotalCount int `json:"total_count"` + RunnerGroups []*RunnerGroup `json:"runner_groups"` +} + +// CreateRunnerGroupRequest represents a request to create a Runner group for an organization. +type CreateRunnerGroupRequest struct { + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + // List of repository IDs that can access the runner group. + SelectedRepositoryIDs []int64 `json:"selected_repository_ids,omitempty"` + // Runners represent a list of runner IDs to add to the runner group. + Runners []int64 `json:"runners,omitempty"` + // If set to True, public repos can use this runner group + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. + SelectedWorkflows []string `json:"selected_workflows,omitempty"` +} + +// UpdateRunnerGroupRequest represents a request to update a Runner group for an organization. +type UpdateRunnerGroupRequest struct { + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + SelectedWorkflows []string `json:"selected_workflows,omitempty"` +} + +// SetRepoAccessRunnerGroupRequest represents a request to replace the list of repositories +// that can access a self-hosted runner group configured in an organization. +type SetRepoAccessRunnerGroupRequest struct { + // Updated list of repository IDs that should be given access to the runner group. + SelectedRepositoryIDs []int64 `json:"selected_repository_ids"` +} + +// SetRunnerGroupRunnersRequest represents a request to replace the list of +// self-hosted runners that are part of an organization runner group. +type SetRunnerGroupRunnersRequest struct { + // Updated list of runner IDs that should be given access to the runner group. + Runners []int64 `json:"runners"` +} + +// ListOrgRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToRepository. +type ListOrgRunnerGroupOptions struct { + ListOptions + + // Only return runner groups that are allowed to be used by this repository. + VisibleToRepository string `url:"visible_to_repository,omitempty"` +} + +// ListOrganizationRunnerGroups lists all self-hosted runner groups configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#list-self-hosted-runner-groups-for-an-organization +func (s *ActionsService) ListOrganizationRunnerGroups(ctx context.Context, org string, opts *ListOrgRunnerGroupOptions) (*RunnerGroups, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + groups := &RunnerGroups{} + resp, err := s.client.Do(ctx, req, &groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// GetOrganizationRunnerGroup gets a specific self-hosted runner group for an organization using its RunnerGroup ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#get-a-self-hosted-runner-group-for-an-organization +func (s *ActionsService) GetOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*RunnerGroup, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(RunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// DeleteOrganizationRunnerGroup deletes a self-hosted runner group from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#delete-a-self-hosted-runner-group-from-an-organization +func (s *ActionsService) DeleteOrganizationRunnerGroup(ctx context.Context, org string, groupID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CreateOrganizationRunnerGroup creates a new self-hosted runner group for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#create-a-self-hosted-runner-group-for-an-organization +func (s *ActionsService) CreateOrganizationRunnerGroup(ctx context.Context, org string, createReq CreateRunnerGroupRequest) (*RunnerGroup, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups", org) + req, err := s.client.NewRequest("POST", u, createReq) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(RunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// UpdateOrganizationRunnerGroup updates a self-hosted runner group for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#update-a-self-hosted-runner-group-for-an-organization +func (s *ActionsService) UpdateOrganizationRunnerGroup(ctx context.Context, org string, groupID int64, updateReq UpdateRunnerGroupRequest) (*RunnerGroup, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v", org, groupID) + req, err := s.client.NewRequest("PATCH", u, updateReq) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(RunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// ListRepositoryAccessRunnerGroup lists the repositories with access to a self-hosted runner group configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#list-repository-access-to-a-self-hosted-runner-group-in-an-organization +func (s *ActionsService) ListRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, opts *ListOptions) (*ListRepositories, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + repos := &ListRepositories{} + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// SetRepositoryAccessRunnerGroup replaces the list of repositories that have access to a self-hosted runner group configured in an organization +// with a new List of repositories. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#set-repository-access-for-a-self-hosted-runner-group-in-an-organization +func (s *ActionsService) SetRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID int64, ids SetRepoAccessRunnerGroupRequest) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories", org, groupID) + + req, err := s.client.NewRequest("PUT", u, ids) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddRepositoryAccessRunnerGroup adds a repository to the list of selected repositories that can access a self-hosted runner group. +// The runner group must have visibility set to 'selected'. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#add-repository-access-to-a-self-hosted-runner-group-in-an-organization +func (s *ActionsService) AddRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveRepositoryAccessRunnerGroup removes a repository from the list of selected repositories that can access a self-hosted runner group. +// The runner group must have visibility set to 'selected'. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#remove-repository-access-to-a-self-hosted-runner-group-in-an-organization +func (s *ActionsService) RemoveRepositoryAccessRunnerGroup(ctx context.Context, org string, groupID, repoID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/repositories/%v", org, groupID, repoID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListRunnerGroupRunners lists self-hosted runners that are in a specific organization group. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#list-self-hosted-runners-in-a-group-for-an-organization +func (s *ActionsService) ListRunnerGroupRunners(ctx context.Context, org string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runners := &Runners{} + resp, err := s.client.Do(ctx, req, &runners) + if err != nil { + return nil, resp, err + } + + return runners, resp, nil +} + +// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an organization runner group +// with a new list of runners. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#set-self-hosted-runners-in-a-group-for-an-organization +func (s *ActionsService) SetRunnerGroupRunners(ctx context.Context, org string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners", org, groupID) + + req, err := s.client.NewRequest("PUT", u, ids) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#add-a-self-hosted-runner-to-a-group-for-an-organization +func (s *ActionsService) AddRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an organization. +// The runner is then returned to the default group. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runner-groups#remove-a-self-hosted-runner-from-a-group-for-an-organization +func (s *ActionsService) RemoveRunnerGroupRunners(ctx context.Context, org string, groupID, runnerID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runner-groups/%v/runners/%v", org, groupID, runnerID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_runners.go b/vendor/github.com/google/go-github/v56/github/actions_runners.go new file mode 100644 index 000000000..d54d6d41c --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_runners.go @@ -0,0 +1,343 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RunnerApplicationDownload represents a binary for the self-hosted runner application that can be downloaded. +type RunnerApplicationDownload struct { + OS *string `json:"os,omitempty"` + Architecture *string `json:"architecture,omitempty"` + DownloadURL *string `json:"download_url,omitempty"` + Filename *string `json:"filename,omitempty"` + TempDownloadToken *string `json:"temp_download_token,omitempty"` + SHA256Checksum *string `json:"sha256_checksum,omitempty"` +} + +// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-runner-applications-for-a-repository +func (s *ActionsService) ListRunnerApplicationDownloads(ctx context.Context, owner, repo string) ([]*RunnerApplicationDownload, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/downloads", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rads []*RunnerApplicationDownload + resp, err := s.client.Do(ctx, req, &rads) + if err != nil { + return nil, resp, err + } + + return rads, resp, nil +} + +// GenerateJITConfigRequest specifies body parameters to GenerateRepoJITConfig. +type GenerateJITConfigRequest struct { + Name string `json:"name"` + RunnerGroupID int64 `json:"runner_group_id"` + WorkFolder *string `json:"work_folder,omitempty"` + + // Labels represents the names of the custom labels to add to the runner. + // Minimum items: 1. Maximum items: 100. + Labels []string `json:"labels"` +} + +// JITRunnerConfig represents encoded JIT configuration that can be used to bootstrap a self-hosted runner. +type JITRunnerConfig struct { + Runner *Runner `json:"runner,omitempty"` + EncodedJITConfig *string `json:"encoded_jit_config,omitempty"` +} + +// GenerateOrgJITConfig generate a just-in-time configuration for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners?apiVersion=2022-11-28#create-configuration-for-a-just-in-time-runner-for-an-organization +func (s *ActionsService) GenerateOrgJITConfig(ctx context.Context, owner string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/generate-jitconfig", owner) + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + jitConfig := new(JITRunnerConfig) + resp, err := s.client.Do(ctx, req, jitConfig) + if err != nil { + return nil, resp, err + } + + return jitConfig, resp, nil +} + +// GenerateRepoJITConfig generates a just-in-time configuration for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners?apiVersion=2022-11-28#create-configuration-for-a-just-in-time-runner-for-a-repository +func (s *ActionsService) GenerateRepoJITConfig(ctx context.Context, owner, repo string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/generate-jitconfig", owner, repo) + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + jitConfig := new(JITRunnerConfig) + resp, err := s.client.Do(ctx, req, jitConfig) + if err != nil { + return nil, resp, err + } + + return jitConfig, resp, nil +} + +// RegistrationToken represents a token that can be used to add a self-hosted runner to a repository. +type RegistrationToken struct { + Token *string `json:"token,omitempty"` + ExpiresAt *Timestamp `json:"expires_at,omitempty"` +} + +// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#create-a-registration-token-for-a-repository +func (s *ActionsService) CreateRegistrationToken(ctx context.Context, owner, repo string) (*RegistrationToken, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/registration-token", owner, repo) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + registrationToken := new(RegistrationToken) + resp, err := s.client.Do(ctx, req, registrationToken) + if err != nil { + return nil, resp, err + } + + return registrationToken, resp, nil +} + +// Runner represents a self-hosted runner registered with a repository. +type Runner struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + OS *string `json:"os,omitempty"` + Status *string `json:"status,omitempty"` + Busy *bool `json:"busy,omitempty"` + Labels []*RunnerLabels `json:"labels,omitempty"` +} + +// RunnerLabels represents a collection of labels attached to each runner. +type RunnerLabels struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` +} + +// Runners represents a collection of self-hosted runners for a repository. +type Runners struct { + TotalCount int `json:"total_count"` + Runners []*Runner `json:"runners"` +} + +// ListRunners lists all the self-hosted runners for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-self-hosted-runners-for-a-repository +func (s *ActionsService) ListRunners(ctx context.Context, owner, repo string, opts *ListOptions) (*Runners, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runners := &Runners{} + resp, err := s.client.Do(ctx, req, &runners) + if err != nil { + return nil, resp, err + } + + return runners, resp, nil +} + +// GetRunner gets a specific self-hosted runner for a repository using its runner ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-a-repository +func (s *ActionsService) GetRunner(ctx context.Context, owner, repo string, runnerID int64) (*Runner, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runner := new(Runner) + resp, err := s.client.Do(ctx, req, runner) + if err != nil { + return nil, resp, err + } + + return runner, resp, nil +} + +// RemoveToken represents a token that can be used to remove a self-hosted runner from a repository. +type RemoveToken struct { + Token *string `json:"token,omitempty"` + ExpiresAt *Timestamp `json:"expires_at,omitempty"` +} + +// CreateRemoveToken creates a token that can be used to remove a self-hosted runner from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#create-a-remove-token-for-a-repository +func (s *ActionsService) CreateRemoveToken(ctx context.Context, owner, repo string) (*RemoveToken, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/remove-token", owner, repo) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + removeToken := new(RemoveToken) + resp, err := s.client.Do(ctx, req, removeToken) + if err != nil { + return nil, resp, err + } + + return removeToken, resp, nil +} + +// RemoveRunner forces the removal of a self-hosted runner in a repository using the runner id. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-a-repository +func (s *ActionsService) RemoveRunner(ctx context.Context, owner, repo string, runnerID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runners/%v", owner, repo, runnerID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListOrganizationRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-runner-applications-for-an-organization +func (s *ActionsService) ListOrganizationRunnerApplicationDownloads(ctx context.Context, owner string) ([]*RunnerApplicationDownload, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/downloads", owner) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rads []*RunnerApplicationDownload + resp, err := s.client.Do(ctx, req, &rads) + if err != nil { + return nil, resp, err + } + + return rads, resp, nil +} + +// CreateOrganizationRegistrationToken creates a token that can be used to add a self-hosted runner to an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#create-a-registration-token-for-an-organization +func (s *ActionsService) CreateOrganizationRegistrationToken(ctx context.Context, owner string) (*RegistrationToken, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/registration-token", owner) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + registrationToken := new(RegistrationToken) + resp, err := s.client.Do(ctx, req, registrationToken) + if err != nil { + return nil, resp, err + } + + return registrationToken, resp, nil +} + +// ListOrganizationRunners lists all the self-hosted runners for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-organization +func (s *ActionsService) ListOrganizationRunners(ctx context.Context, owner string, opts *ListOptions) (*Runners, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners", owner) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runners := &Runners{} + resp, err := s.client.Do(ctx, req, &runners) + if err != nil { + return nil, resp, err + } + + return runners, resp, nil +} + +// GetOrganizationRunner gets a specific self-hosted runner for an organization using its runner ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#get-a-self-hosted-runner-for-an-organization +func (s *ActionsService) GetOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Runner, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runner := new(Runner) + resp, err := s.client.Do(ctx, req, runner) + if err != nil { + return nil, resp, err + } + + return runner, resp, nil +} + +// CreateOrganizationRemoveToken creates a token that can be used to remove a self-hosted runner from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#create-a-remove-token-for-an-organization +func (s *ActionsService) CreateOrganizationRemoveToken(ctx context.Context, owner string) (*RemoveToken, *Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/remove-token", owner) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + removeToken := new(RemoveToken) + resp, err := s.client.Do(ctx, req, removeToken) + if err != nil { + return nil, resp, err + } + + return removeToken, resp, nil +} + +// RemoveOrganizationRunner forces the removal of a self-hosted runner from an organization using the runner id. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-organization +func (s *ActionsService) RemoveOrganizationRunner(ctx context.Context, owner string, runnerID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/actions/runners/%v", owner, runnerID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_secrets.go b/vendor/github.com/google/go-github/v56/github/actions_secrets.go new file mode 100644 index 000000000..49985e12a --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_secrets.go @@ -0,0 +1,358 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "strconv" +) + +// PublicKey represents the public key that should be used to encrypt secrets. +type PublicKey struct { + KeyID *string `json:"key_id"` + Key *string `json:"key"` +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// This ensures GitHub Enterprise versions which return a numeric key id +// do not error out when unmarshaling. +func (p *PublicKey) UnmarshalJSON(data []byte) error { + var pk struct { + KeyID interface{} `json:"key_id"` + Key *string `json:"key"` + } + + if err := json.Unmarshal(data, &pk); err != nil { + return err + } + + p.Key = pk.Key + + switch v := pk.KeyID.(type) { + case nil: + return nil + case string: + p.KeyID = &v + case float64: + p.KeyID = String(strconv.FormatFloat(v, 'f', -1, 64)) + default: + return fmt.Errorf("unable to unmarshal %T as a string", v) + } + + return nil +} + +func (s *ActionsService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + pubKey := new(PublicKey) + resp, err := s.client.Do(ctx, req, pubKey) + if err != nil { + return nil, resp, err + } + + return pubKey, resp, nil +} + +// GetRepoPublicKey gets a public key that should be used for secret encryption. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-a-repository-public-key +func (s *ActionsService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/secrets/public-key", owner, repo) + return s.getPublicKey(ctx, url) +} + +// GetOrgPublicKey gets a public key that should be used for secret encryption. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-an-organization-public-key +func (s *ActionsService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/public-key", org) + return s.getPublicKey(ctx, url) +} + +// GetEnvPublicKey gets a public key that should be used for secret encryption. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-an-environment-public-key +func (s *ActionsService) GetEnvPublicKey(ctx context.Context, repoID int, env string) (*PublicKey, *Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/secrets/public-key", repoID, env) + return s.getPublicKey(ctx, url) +} + +// Secret represents a repository action secret. +type Secret struct { + Name string `json:"name"` + CreatedAt Timestamp `json:"created_at"` + UpdatedAt Timestamp `json:"updated_at"` + Visibility string `json:"visibility,omitempty"` + SelectedRepositoriesURL string `json:"selected_repositories_url,omitempty"` +} + +// Secrets represents one item from the ListSecrets response. +type Secrets struct { + TotalCount int `json:"total_count"` + Secrets []*Secret `json:"secrets"` +} + +func (s *ActionsService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + secrets := new(Secrets) + resp, err := s.client.Do(ctx, req, &secrets) + if err != nil { + return nil, resp, err + } + + return secrets, resp, nil +} + +// ListRepoSecrets lists all secrets available in a repository +// without revealing their encrypted values. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#list-repository-secrets +func (s *ActionsService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/secrets", owner, repo) + return s.listSecrets(ctx, url, opts) +} + +// ListOrgSecrets lists all secrets available in an organization +// without revealing their encrypted values. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#list-organization-secrets +func (s *ActionsService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets", org) + return s.listSecrets(ctx, url, opts) +} + +// ListEnvSecrets lists all secrets available in an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#list-environment-secrets +func (s *ActionsService) ListEnvSecrets(ctx context.Context, repoID int, env string, opts *ListOptions) (*Secrets, *Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/secrets", repoID, env) + return s.listSecrets(ctx, url, opts) +} + +func (s *ActionsService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + secret := new(Secret) + resp, err := s.client.Do(ctx, req, secret) + if err != nil { + return nil, resp, err + } + + return secret, resp, nil +} + +// GetRepoSecret gets a single repository secret without revealing its encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-a-repository-secret +func (s *ActionsService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) + return s.getSecret(ctx, url) +} + +// GetOrgSecret gets a single organization secret without revealing its encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-an-organization-secret +func (s *ActionsService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) + return s.getSecret(ctx, url) +} + +// GetEnvSecret gets a single environment secret without revealing its encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#get-an-environment-secret +func (s *ActionsService) GetEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Secret, *Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) + return s.getSecret(ctx, url) +} + +// SelectedRepoIDs are the repository IDs that have access to the actions secrets. +type SelectedRepoIDs []int64 + +// EncryptedSecret represents a secret that is encrypted using a public key. +// +// The value of EncryptedValue must be your secret, encrypted with +// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) +// using the public key retrieved using the GetPublicKey method. +type EncryptedSecret struct { + Name string `json:"-"` + KeyID string `json:"key_id"` + EncryptedValue string `json:"encrypted_value"` + Visibility string `json:"visibility,omitempty"` + SelectedRepositoryIDs SelectedRepoIDs `json:"selected_repository_ids,omitempty"` +} + +func (s *ActionsService) putSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, eSecret) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CreateOrUpdateRepoSecret creates or updates a repository secret with an encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#create-or-update-a-repository-secret +func (s *ActionsService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, eSecret.Name) + return s.putSecret(ctx, url, eSecret) +} + +// CreateOrUpdateOrgSecret creates or updates an organization secret with an encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#create-or-update-an-organization-secret +func (s *ActionsService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, eSecret.Name) + return s.putSecret(ctx, url, eSecret) +} + +// CreateOrUpdateEnvSecret creates or updates a single environment secret with an encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#create-or-update-an-environment-secret +func (s *ActionsService) CreateOrUpdateEnvSecret(ctx context.Context, repoID int, env string, eSecret *EncryptedSecret) (*Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, eSecret.Name) + return s.putSecret(ctx, url, eSecret) +} + +func (s *ActionsService) deleteSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteRepoSecret deletes a secret in a repository using the secret name. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#delete-a-repository-secret +func (s *ActionsService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/secrets/%v", owner, repo, name) + return s.deleteSecret(ctx, url) +} + +// DeleteOrgSecret deletes a secret in an organization using the secret name. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#delete-an-organization-secret +func (s *ActionsService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v", org, name) + return s.deleteSecret(ctx, url) +} + +// DeleteEnvSecret deletes a secret in an environment using the secret name. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#delete-an-environment-secret +func (s *ActionsService) DeleteEnvSecret(ctx context.Context, repoID int, env, secretName string) (*Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/secrets/%v", repoID, env, secretName) + return s.deleteSecret(ctx, url) +} + +// SelectedReposList represents the list of repositories selected for an organization secret. +type SelectedReposList struct { + TotalCount *int `json:"total_count,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +func (s *ActionsService) listSelectedReposForSecret(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + result := new(SelectedReposList) + resp, err := s.client.Do(ctx, req, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// ListSelectedReposForOrgSecret lists all repositories that have access to a secret. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#list-selected-repositories-for-an-organization-secret +func (s *ActionsService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) + return s.listSelectedReposForSecret(ctx, url, opts) +} + +func (s *ActionsService) setSelectedReposForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { + type repoIDs struct { + SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` + } + + req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// SetSelectedReposForOrgSecret sets the repositories that have access to a secret. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#set-selected-repositories-for-an-organization-secret +func (s *ActionsService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories", org, name) + return s.setSelectedReposForSecret(ctx, url, ids) +} + +func (s *ActionsService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddSelectedRepoToOrgSecret adds a repository to an organization secret. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#add-selected-repository-to-an-organization-secret +func (s *ActionsService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) + return s.addSelectedRepoToSecret(ctx, url) +} + +func (s *ActionsService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveSelectedRepoFromOrgSecret removes a repository from an organization secret. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/secrets#remove-selected-repository-from-an-organization-secret +func (s *ActionsService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/secrets/%v/repositories/%v", org, name, *repo.ID) + return s.removeSelectedRepoFromSecret(ctx, url) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_variables.go b/vendor/github.com/google/go-github/v56/github/actions_variables.go new file mode 100644 index 000000000..29445edd0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_variables.go @@ -0,0 +1,293 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ActionsVariable represents a repository action variable. +type ActionsVariable struct { + Name string `json:"name"` + Value string `json:"value"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Visibility *string `json:"visibility,omitempty"` + // Used by ListOrgVariables and GetOrgVariables + SelectedRepositoriesURL *string `json:"selected_repositories_url,omitempty"` + // Used by UpdateOrgVariable and CreateOrgVariable + SelectedRepositoryIDs *SelectedRepoIDs `json:"selected_repository_ids,omitempty"` +} + +// ActionsVariables represents one item from the ListVariables response. +type ActionsVariables struct { + TotalCount int `json:"total_count"` + Variables []*ActionsVariable `json:"variables"` +} + +func (s *ActionsService) listVariables(ctx context.Context, url string, opts *ListOptions) (*ActionsVariables, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + variables := new(ActionsVariables) + resp, err := s.client.Do(ctx, req, &variables) + if err != nil { + return nil, resp, err + } + + return variables, resp, nil +} + +// ListRepoVariables lists all variables available in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#list-repository-variables +func (s *ActionsService) ListRepoVariables(ctx context.Context, owner, repo string, opts *ListOptions) (*ActionsVariables, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) + return s.listVariables(ctx, url, opts) +} + +// ListOrgVariables lists all variables available in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#list-organization-variables +func (s *ActionsService) ListOrgVariables(ctx context.Context, org string, opts *ListOptions) (*ActionsVariables, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables", org) + return s.listVariables(ctx, url, opts) +} + +// ListEnvVariables lists all variables available in an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#list-environment-variables +func (s *ActionsService) ListEnvVariables(ctx context.Context, repoID int, env string, opts *ListOptions) (*ActionsVariables, *Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) + return s.listVariables(ctx, url, opts) +} + +func (s *ActionsService) getVariable(ctx context.Context, url string) (*ActionsVariable, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + variable := new(ActionsVariable) + resp, err := s.client.Do(ctx, req, variable) + if err != nil { + return nil, resp, err + } + + return variable, resp, nil +} + +// GetRepoVariable gets a single repository variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#get-a-repository-variable +func (s *ActionsService) GetRepoVariable(ctx context.Context, owner, repo, name string) (*ActionsVariable, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) + return s.getVariable(ctx, url) +} + +// GetOrgVariable gets a single organization variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#get-an-organization-variable +func (s *ActionsService) GetOrgVariable(ctx context.Context, org, name string) (*ActionsVariable, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) + return s.getVariable(ctx, url) +} + +// GetEnvVariable gets a single environment variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#get-an-environment-variable +func (s *ActionsService) GetEnvVariable(ctx context.Context, repoID int, env, variableName string) (*ActionsVariable, *Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) + return s.getVariable(ctx, url) +} + +func (s *ActionsService) postVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { + req, err := s.client.NewRequest("POST", url, variable) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// CreateRepoVariable creates a repository variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#create-a-repository-variable +func (s *ActionsService) CreateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/variables", owner, repo) + return s.postVariable(ctx, url, variable) +} + +// CreateOrgVariable creates an organization variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#create-an-organization-variable +func (s *ActionsService) CreateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables", org) + return s.postVariable(ctx, url, variable) +} + +// CreateEnvVariable creates an environment variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#create-an-environment-variable +func (s *ActionsService) CreateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/variables", repoID, env) + return s.postVariable(ctx, url, variable) +} + +func (s *ActionsService) patchVariable(ctx context.Context, url string, variable *ActionsVariable) (*Response, error) { + req, err := s.client.NewRequest("PATCH", url, variable) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// UpdateRepoVariable updates a repository variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#update-a-repository-variable +func (s *ActionsService) UpdateRepoVariable(ctx context.Context, owner, repo string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, variable.Name) + return s.patchVariable(ctx, url, variable) +} + +// UpdateOrgVariable updates an organization variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#update-an-organization-variable +func (s *ActionsService) UpdateOrgVariable(ctx context.Context, org string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, variable.Name) + return s.patchVariable(ctx, url, variable) +} + +// UpdateEnvVariable updates an environment variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#create-an-environment-variable +func (s *ActionsService) UpdateEnvVariable(ctx context.Context, repoID int, env string, variable *ActionsVariable) (*Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variable.Name) + return s.patchVariable(ctx, url, variable) +} + +func (s *ActionsService) deleteVariable(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteRepoVariable deletes a variable in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#delete-a-repository-variable +func (s *ActionsService) DeleteRepoVariable(ctx context.Context, owner, repo, name string) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/actions/variables/%v", owner, repo, name) + return s.deleteVariable(ctx, url) +} + +// DeleteOrgVariable deletes a variable in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#delete-an-organization-variable +func (s *ActionsService) DeleteOrgVariable(ctx context.Context, org, name string) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v", org, name) + return s.deleteVariable(ctx, url) +} + +// DeleteEnvVariable deletes a variable in an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#delete-an-environment-variable +func (s *ActionsService) DeleteEnvVariable(ctx context.Context, repoID int, env, variableName string) (*Response, error) { + url := fmt.Sprintf("repositories/%v/environments/%v/variables/%v", repoID, env, variableName) + return s.deleteVariable(ctx, url) +} + +func (s *ActionsService) listSelectedReposForVariable(ctx context.Context, url string, opts *ListOptions) (*SelectedReposList, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + result := new(SelectedReposList) + resp, err := s.client.Do(ctx, req, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// ListSelectedReposForOrgVariable lists all repositories that have access to a variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#list-selected-repositories-for-an-organization-variable +func (s *ActionsService) ListSelectedReposForOrgVariable(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) + return s.listSelectedReposForVariable(ctx, url, opts) +} + +func (s *ActionsService) setSelectedReposForVariable(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { + type repoIDs struct { + SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` + } + + req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// SetSelectedReposForOrgVariable sets the repositories that have access to a variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#set-selected-repositories-for-an-organization-variable +func (s *ActionsService) SetSelectedReposForOrgVariable(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories", org, name) + return s.setSelectedReposForVariable(ctx, url, ids) +} + +func (s *ActionsService) addSelectedRepoToVariable(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddSelectedRepoToOrgVariable adds a repository to an organization variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#add-selected-repository-to-an-organization-variable +func (s *ActionsService) AddSelectedRepoToOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) + return s.addSelectedRepoToVariable(ctx, url) +} + +func (s *ActionsService) removeSelectedRepoFromVariable(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveSelectedRepoFromOrgVariable removes a repository from an organization variable. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/variables#remove-selected-repository-from-an-organization-variable +func (s *ActionsService) RemoveSelectedRepoFromOrgVariable(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/actions/variables/%v/repositories/%v", org, name, *repo.ID) + return s.removeSelectedRepoFromVariable(ctx, url) +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_workflow_jobs.go b/vendor/github.com/google/go-github/v56/github/actions_workflow_jobs.go new file mode 100644 index 000000000..87d117ba9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_workflow_jobs.go @@ -0,0 +1,133 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "net/url" +) + +// TaskStep represents a single task step from a sequence of tasks of a job. +type TaskStep struct { + Name *string `json:"name,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + Number *int64 `json:"number,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + CompletedAt *Timestamp `json:"completed_at,omitempty"` +} + +// WorkflowJob represents a repository action workflow job. +type WorkflowJob struct { + ID *int64 `json:"id,omitempty"` + RunID *int64 `json:"run_id,omitempty"` + RunURL *string `json:"run_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + CompletedAt *Timestamp `json:"completed_at,omitempty"` + Name *string `json:"name,omitempty"` + Steps []*TaskStep `json:"steps,omitempty"` + CheckRunURL *string `json:"check_run_url,omitempty"` + // Labels represents runner labels from the `runs-on:` key from a GitHub Actions workflow. + Labels []string `json:"labels,omitempty"` + RunnerID *int64 `json:"runner_id,omitempty"` + RunnerName *string `json:"runner_name,omitempty"` + RunnerGroupID *int64 `json:"runner_group_id,omitempty"` + RunnerGroupName *string `json:"runner_group_name,omitempty"` + RunAttempt *int64 `json:"run_attempt,omitempty"` + WorkflowName *string `json:"workflow_name,omitempty"` +} + +// Jobs represents a slice of repository action workflow job. +type Jobs struct { + TotalCount *int `json:"total_count,omitempty"` + Jobs []*WorkflowJob `json:"jobs,omitempty"` +} + +// ListWorkflowJobsOptions specifies optional parameters to ListWorkflowJobs. +type ListWorkflowJobsOptions struct { + // Filter specifies how jobs should be filtered by their completed_at timestamp. + // Possible values are: + // latest - Returns jobs from the most recent execution of the workflow run + // all - Returns all jobs for a workflow run, including from old executions of the workflow run + // + // Default value is "latest". + Filter string `url:"filter,omitempty"` + ListOptions +} + +// ListWorkflowJobs lists all jobs for a workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-jobs#list-jobs-for-a-workflow-run +func (s *ActionsService) ListWorkflowJobs(ctx context.Context, owner, repo string, runID int64, opts *ListWorkflowJobsOptions) (*Jobs, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/actions/runs/%v/jobs", owner, repo, runID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + jobs := new(Jobs) + resp, err := s.client.Do(ctx, req, &jobs) + if err != nil { + return nil, resp, err + } + + return jobs, resp, nil +} + +// GetWorkflowJobByID gets a specific job in a workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-jobs#get-a-job-for-a-workflow-run +func (s *ActionsService) GetWorkflowJobByID(ctx context.Context, owner, repo string, jobID int64) (*WorkflowJob, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v", owner, repo, jobID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + job := new(WorkflowJob) + resp, err := s.client.Do(ctx, req, job) + if err != nil { + return nil, resp, err + } + + return job, resp, nil +} + +// GetWorkflowJobLogs gets a redirect URL to download a plain text file of logs for a workflow job. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-jobs#download-job-logs-for-a-workflow-run +func (s *ActionsService) GetWorkflowJobLogs(ctx context.Context, owner, repo string, jobID int64, maxRedirects int) (*url.URL, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/logs", owner, repo, jobID) + + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusFound { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + parsedURL, err := url.Parse(resp.Header.Get("Location")) + return parsedURL, newResponse(resp), err +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_workflow_runs.go b/vendor/github.com/google/go-github/v56/github/actions_workflow_runs.go new file mode 100644 index 000000000..390c26af9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_workflow_runs.go @@ -0,0 +1,373 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "net/url" +) + +// WorkflowRun represents a repository action workflow run. +type WorkflowRun struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + RunNumber *int `json:"run_number,omitempty"` + RunAttempt *int `json:"run_attempt,omitempty"` + Event *string `json:"event,omitempty"` + DisplayTitle *string `json:"display_title,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + WorkflowID *int64 `json:"workflow_id,omitempty"` + CheckSuiteID *int64 `json:"check_suite_id,omitempty"` + CheckSuiteNodeID *string `json:"check_suite_node_id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + RunStartedAt *Timestamp `json:"run_started_at,omitempty"` + JobsURL *string `json:"jobs_url,omitempty"` + LogsURL *string `json:"logs_url,omitempty"` + CheckSuiteURL *string `json:"check_suite_url,omitempty"` + ArtifactsURL *string `json:"artifacts_url,omitempty"` + CancelURL *string `json:"cancel_url,omitempty"` + RerunURL *string `json:"rerun_url,omitempty"` + PreviousAttemptURL *string `json:"previous_attempt_url,omitempty"` + HeadCommit *HeadCommit `json:"head_commit,omitempty"` + WorkflowURL *string `json:"workflow_url,omitempty"` + Repository *Repository `json:"repository,omitempty"` + HeadRepository *Repository `json:"head_repository,omitempty"` + Actor *User `json:"actor,omitempty"` + TriggeringActor *User `json:"triggering_actor,omitempty"` +} + +// WorkflowRuns represents a slice of repository action workflow run. +type WorkflowRuns struct { + TotalCount *int `json:"total_count,omitempty"` + WorkflowRuns []*WorkflowRun `json:"workflow_runs,omitempty"` +} + +// ListWorkflowRunsOptions specifies optional parameters to ListWorkflowRuns. +type ListWorkflowRunsOptions struct { + Actor string `url:"actor,omitempty"` + Branch string `url:"branch,omitempty"` + Event string `url:"event,omitempty"` + Status string `url:"status,omitempty"` + Created string `url:"created,omitempty"` + HeadSHA string `url:"head_sha,omitempty"` + ExcludePullRequests bool `url:"exclude_pull_requests,omitempty"` + CheckSuiteID int64 `url:"check_suite_id,omitempty"` + ListOptions +} + +// WorkflowRunUsage represents a usage of a specific workflow run. +type WorkflowRunUsage struct { + Billable *WorkflowRunBillMap `json:"billable,omitempty"` + RunDurationMS *int64 `json:"run_duration_ms,omitempty"` +} + +// WorkflowRunBillMap represents different runner environments available for a workflow run. +// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. +type WorkflowRunBillMap map[string]*WorkflowRunBill + +// WorkflowRunBill specifies billable time for a specific environment in a workflow run. +type WorkflowRunBill struct { + TotalMS *int64 `json:"total_ms,omitempty"` + Jobs *int `json:"jobs,omitempty"` + JobRuns []*WorkflowRunJobRun `json:"job_runs,omitempty"` +} + +// WorkflowRunJobRun represents a usage of individual jobs of a specific workflow run. +type WorkflowRunJobRun struct { + JobID *int `json:"job_id,omitempty"` + DurationMS *int64 `json:"duration_ms,omitempty"` +} + +// WorkflowRunAttemptOptions specifies optional parameters to GetWorkflowRunAttempt. +type WorkflowRunAttemptOptions struct { + ExcludePullRequests *bool `url:"exclude_pull_requests,omitempty"` +} + +// PendingDeploymentsRequest specifies body parameters to PendingDeployments. +type PendingDeploymentsRequest struct { + EnvironmentIDs []int64 `json:"environment_ids"` + // State can be one of: "approved", "rejected". + State string `json:"state"` + Comment string `json:"comment"` +} + +func (s *ActionsService) listWorkflowRuns(ctx context.Context, endpoint string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { + u, err := addOptions(endpoint, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runs := new(WorkflowRuns) + resp, err := s.client.Do(ctx, req, &runs) + if err != nil { + return nil, resp, err + } + + return runs, resp, nil +} + +// ListWorkflowRunsByID lists all workflow runs by workflow ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs +func (s *ActionsService) ListWorkflowRunsByID(ctx context.Context, owner, repo string, workflowID int64, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowID) + return s.listWorkflowRuns(ctx, u, opts) +} + +// ListWorkflowRunsByFileName lists all workflow runs by workflow file name. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs +func (s *ActionsService) ListWorkflowRunsByFileName(ctx context.Context, owner, repo, workflowFileName string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/actions/workflows/%v/runs", owner, repo, workflowFileName) + return s.listWorkflowRuns(ctx, u, opts) +} + +// ListRepositoryWorkflowRuns lists all workflow runs for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs-for-a-repository +func (s *ActionsService) ListRepositoryWorkflowRuns(ctx context.Context, owner, repo string, opts *ListWorkflowRunsOptions) (*WorkflowRuns, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/actions/runs", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runs := new(WorkflowRuns) + resp, err := s.client.Do(ctx, req, &runs) + if err != nil { + return nil, resp, err + } + + return runs, resp, nil +} + +// GetWorkflowRunByID gets a specific workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run +func (s *ActionsService) GetWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + run := new(WorkflowRun) + resp, err := s.client.Do(ctx, req, run) + if err != nil { + return nil, resp, err + } + + return run, resp, nil +} + +// GetWorkflowRunAttempt gets a specific workflow run attempt. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run-attempt +func (s *ActionsService) GetWorkflowRunAttempt(ctx context.Context, owner, repo string, runID int64, attemptNumber int, opts *WorkflowRunAttemptOptions) (*WorkflowRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v", owner, repo, runID, attemptNumber) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + run := new(WorkflowRun) + resp, err := s.client.Do(ctx, req, run) + if err != nil { + return nil, resp, err + } + + return run, resp, nil +} + +// GetWorkflowRunAttemptLogs gets a redirect URL to download a plain text file of logs for a workflow run for attempt number. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#download-workflow-run-attempt-logs +func (s *ActionsService) GetWorkflowRunAttemptLogs(ctx context.Context, owner, repo string, runID int64, attemptNumber int, maxRedirects int) (*url.URL, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/attempts/%v/logs", owner, repo, runID, attemptNumber) + + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusFound { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + parsedURL, err := url.Parse(resp.Header.Get("Location")) + return parsedURL, newResponse(resp), err +} + +// RerunWorkflowByID re-runs a workflow by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#re-run-a-workflow +func (s *ActionsService) RerunWorkflowByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun", owner, repo, runID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RerunFailedJobsByID re-runs all of the failed jobs and their dependent jobs in a workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#re-run-failed-jobs-from-a-workflow-run +func (s *ActionsService) RerunFailedJobsByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/rerun-failed-jobs", owner, repo, runID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RerunJobByID re-runs a job and its dependent jobs in a workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#re-run-a-job-from-a-workflow-run +func (s *ActionsService) RerunJobByID(ctx context.Context, owner, repo string, jobID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/jobs/%v/rerun", owner, repo, jobID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CancelWorkflowRunByID cancels a workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#cancel-a-workflow-run +func (s *ActionsService) CancelWorkflowRunByID(ctx context.Context, owner, repo string, runID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/cancel", owner, repo, runID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetWorkflowRunLogs gets a redirect URL to download a plain text file of logs for a workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#download-workflow-run-logs +func (s *ActionsService) GetWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64, maxRedirects int) (*url.URL, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) + + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusFound { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + parsedURL, err := url.Parse(resp.Header.Get("Location")) + return parsedURL, newResponse(resp), err +} + +// DeleteWorkflowRun deletes a workflow run by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#delete-a-workflow-run +func (s *ActionsService) DeleteWorkflowRun(ctx context.Context, owner, repo string, runID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v", owner, repo, runID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteWorkflowRunLogs deletes all logs for a workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#delete-workflow-run-logs +func (s *ActionsService) DeleteWorkflowRunLogs(ctx context.Context, owner, repo string, runID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/logs", owner, repo, runID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetWorkflowRunUsageByID gets a specific workflow usage run by run ID in the unit of billable milliseconds. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#get-workflow-run-usage +func (s *ActionsService) GetWorkflowRunUsageByID(ctx context.Context, owner, repo string, runID int64) (*WorkflowRunUsage, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/timing", owner, repo, runID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + workflowRunUsage := new(WorkflowRunUsage) + resp, err := s.client.Do(ctx, req, workflowRunUsage) + if err != nil { + return nil, resp, err + } + + return workflowRunUsage, resp, nil +} + +// PendingDeployments approve or reject pending deployments that are waiting on approval by a required reviewer. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run +func (s *ActionsService) PendingDeployments(ctx context.Context, owner, repo string, runID int64, request *PendingDeploymentsRequest) ([]*Deployment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/runs/%v/pending_deployments", owner, repo, runID) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + var deployments []*Deployment + resp, err := s.client.Do(ctx, req, &deployments) + if err != nil { + return nil, resp, err + } + + return deployments, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/actions_workflows.go b/vendor/github.com/google/go-github/v56/github/actions_workflows.go new file mode 100644 index 000000000..c9b47ed4b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/actions_workflows.go @@ -0,0 +1,215 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Workflow represents a repository action workflow. +type Workflow struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + State *string `json:"state,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + BadgeURL *string `json:"badge_url,omitempty"` +} + +// Workflows represents a slice of repository action workflows. +type Workflows struct { + TotalCount *int `json:"total_count,omitempty"` + Workflows []*Workflow `json:"workflows,omitempty"` +} + +// WorkflowUsage represents a usage of a specific workflow. +type WorkflowUsage struct { + Billable *WorkflowBillMap `json:"billable,omitempty"` +} + +// WorkflowBillMap represents different runner environments available for a workflow. +// Its key is the name of its environment, e.g. "UBUNTU", "MACOS", "WINDOWS", etc. +type WorkflowBillMap map[string]*WorkflowBill + +// WorkflowBill specifies billable time for a specific environment in a workflow. +type WorkflowBill struct { + TotalMS *int64 `json:"total_ms,omitempty"` +} + +// CreateWorkflowDispatchEventRequest represents a request to create a workflow dispatch event. +type CreateWorkflowDispatchEventRequest struct { + // Ref represents the reference of the workflow run. + // The reference can be a branch or a tag. + // Ref is required when creating a workflow dispatch event. + Ref string `json:"ref"` + // Inputs represents input keys and values configured in the workflow file. + // The maximum number of properties is 10. + // Default: Any default properties configured in the workflow file will be used when `inputs` are omitted. + Inputs map[string]interface{} `json:"inputs,omitempty"` +} + +// ListWorkflows lists all workflows in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#list-repository-workflows +func (s *ActionsService) ListWorkflows(ctx context.Context, owner, repo string, opts *ListOptions) (*Workflows, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/actions/workflows", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + workflows := new(Workflows) + resp, err := s.client.Do(ctx, req, &workflows) + if err != nil { + return nil, resp, err + } + + return workflows, resp, nil +} + +// GetWorkflowByID gets a specific workflow by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#get-a-workflow +func (s *ActionsService) GetWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Workflow, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowID) + + return s.getWorkflow(ctx, u) +} + +// GetWorkflowByFileName gets a specific workflow by file name. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#get-a-workflow +func (s *ActionsService) GetWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Workflow, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v", owner, repo, workflowFileName) + + return s.getWorkflow(ctx, u) +} + +func (s *ActionsService) getWorkflow(ctx context.Context, url string) (*Workflow, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + workflow := new(Workflow) + resp, err := s.client.Do(ctx, req, workflow) + if err != nil { + return nil, resp, err + } + + return workflow, resp, nil +} + +// GetWorkflowUsageByID gets a specific workflow usage by ID in the unit of billable milliseconds. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#get-workflow-usage +func (s *ActionsService) GetWorkflowUsageByID(ctx context.Context, owner, repo string, workflowID int64) (*WorkflowUsage, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowID) + + return s.getWorkflowUsage(ctx, u) +} + +// GetWorkflowUsageByFileName gets a specific workflow usage by file name in the unit of billable milliseconds. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#get-workflow-usage +func (s *ActionsService) GetWorkflowUsageByFileName(ctx context.Context, owner, repo, workflowFileName string) (*WorkflowUsage, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/timing", owner, repo, workflowFileName) + + return s.getWorkflowUsage(ctx, u) +} + +func (s *ActionsService) getWorkflowUsage(ctx context.Context, url string) (*WorkflowUsage, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + workflowUsage := new(WorkflowUsage) + resp, err := s.client.Do(ctx, req, workflowUsage) + if err != nil { + return nil, resp, err + } + + return workflowUsage, resp, nil +} + +// CreateWorkflowDispatchEventByID manually triggers a GitHub Actions workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#create-a-workflow-dispatch-event +func (s *ActionsService) CreateWorkflowDispatchEventByID(ctx context.Context, owner, repo string, workflowID int64, event CreateWorkflowDispatchEventRequest) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowID) + + return s.createWorkflowDispatchEvent(ctx, u, &event) +} + +// CreateWorkflowDispatchEventByFileName manually triggers a GitHub Actions workflow run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#create-a-workflow-dispatch-event +func (s *ActionsService) CreateWorkflowDispatchEventByFileName(ctx context.Context, owner, repo, workflowFileName string, event CreateWorkflowDispatchEventRequest) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/dispatches", owner, repo, workflowFileName) + + return s.createWorkflowDispatchEvent(ctx, u, &event) +} + +func (s *ActionsService) createWorkflowDispatchEvent(ctx context.Context, url string, event *CreateWorkflowDispatchEventRequest) (*Response, error) { + req, err := s.client.NewRequest("POST", url, event) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// EnableWorkflowByID enables a workflow and sets the state of the workflow to "active". +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#enable-a-workflow +func (s *ActionsService) EnableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowID) + return s.doNewPutRequest(ctx, u) +} + +// EnableWorkflowByFileName enables a workflow and sets the state of the workflow to "active". +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#enable-a-workflow +func (s *ActionsService) EnableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/enable", owner, repo, workflowFileName) + return s.doNewPutRequest(ctx, u) +} + +// DisableWorkflowByID disables a workflow and sets the state of the workflow to "disabled_manually". +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#disable-a-workflow +func (s *ActionsService) DisableWorkflowByID(ctx context.Context, owner, repo string, workflowID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowID) + return s.doNewPutRequest(ctx, u) +} + +// DisableWorkflowByFileName disables a workflow and sets the state of the workflow to "disabled_manually". +// +// GitHub API docs: https://docs.github.com/en/rest/actions/workflows#disable-a-workflow +func (s *ActionsService) DisableWorkflowByFileName(ctx context.Context, owner, repo, workflowFileName string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/workflows/%v/disable", owner, repo, workflowFileName) + return s.doNewPutRequest(ctx, u) +} + +func (s *ActionsService) doNewPutRequest(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/activity.go b/vendor/github.com/google/go-github/v56/github/activity.go new file mode 100644 index 000000000..9cd9f9b71 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/activity.go @@ -0,0 +1,73 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import "context" + +// ActivityService handles communication with the activity related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/ +type ActivityService service + +// FeedLink represents a link to a related resource. +type FeedLink struct { + HRef *string `json:"href,omitempty"` + Type *string `json:"type,omitempty"` +} + +// Feeds represents timeline resources in Atom format. +type Feeds struct { + TimelineURL *string `json:"timeline_url,omitempty"` + UserURL *string `json:"user_url,omitempty"` + CurrentUserPublicURL *string `json:"current_user_public_url,omitempty"` + CurrentUserURL *string `json:"current_user_url,omitempty"` + CurrentUserActorURL *string `json:"current_user_actor_url,omitempty"` + CurrentUserOrganizationURL *string `json:"current_user_organization_url,omitempty"` + CurrentUserOrganizationURLs []string `json:"current_user_organization_urls,omitempty"` + Links *FeedLinks `json:"_links,omitempty"` +} + +// FeedLinks represents the links in a Feed. +type FeedLinks struct { + Timeline *FeedLink `json:"timeline,omitempty"` + User *FeedLink `json:"user,omitempty"` + CurrentUserPublic *FeedLink `json:"current_user_public,omitempty"` + CurrentUser *FeedLink `json:"current_user,omitempty"` + CurrentUserActor *FeedLink `json:"current_user_actor,omitempty"` + CurrentUserOrganization *FeedLink `json:"current_user_organization,omitempty"` + CurrentUserOrganizations []*FeedLink `json:"current_user_organizations,omitempty"` +} + +// ListFeeds lists all the feeds available to the authenticated user. +// +// GitHub provides several timeline resources in Atom format: +// +// Timeline: The GitHub global public timeline +// User: The public timeline for any user, using URI template +// Current user public: The public timeline for the authenticated user +// Current user: The private timeline for the authenticated user +// Current user actor: The private timeline for activity created by the +// authenticated user +// Current user organizations: The private timeline for the organizations +// the authenticated user is a member of. +// +// Note: Private feeds are only returned when authenticating via Basic Auth +// since current feed URIs use the older, non revocable auth tokens. +func (s *ActivityService) ListFeeds(ctx context.Context) (*Feeds, *Response, error) { + req, err := s.client.NewRequest("GET", "feeds", nil) + if err != nil { + return nil, nil, err + } + + f := &Feeds{} + resp, err := s.client.Do(ctx, req, f) + if err != nil { + return nil, resp, err + } + + return f, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/activity_events.go b/vendor/github.com/google/go-github/v56/github/activity_events.go new file mode 100644 index 000000000..d6f0f043b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/activity_events.go @@ -0,0 +1,217 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListEvents drinks from the firehose of all public events across GitHub. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-public-events +func (s *ActivityService) ListEvents(ctx context.Context, opts *ListOptions) ([]*Event, *Response, error) { + u, err := addOptions("events", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListRepositoryEvents lists events for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-repository-events +func (s *ActivityService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/events", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListIssueEventsForRepository lists issue events for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/events#list-issue-events-for-a-repository +func (s *ActivityService) ListIssueEventsForRepository(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*IssueEvent + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListEventsForRepoNetwork lists public events for a network of repositories. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-public-events-for-a-network-of-repositories +func (s *ActivityService) ListEventsForRepoNetwork(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Event, *Response, error) { + u := fmt.Sprintf("networks/%v/%v/events", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListEventsForOrganization lists public events for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-public-organization-events +func (s *ActivityService) ListEventsForOrganization(ctx context.Context, org string, opts *ListOptions) ([]*Event, *Response, error) { + u := fmt.Sprintf("orgs/%v/events", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListEventsPerformedByUser lists the events performed by a user. If publicOnly is +// true, only public events will be returned. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-events-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-public-events-for-a-user +func (s *ActivityService) ListEventsPerformedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { + var u string + if publicOnly { + u = fmt.Sprintf("users/%v/events/public", user) + } else { + u = fmt.Sprintf("users/%v/events", user) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListEventsReceivedByUser lists the events received by a user. If publicOnly is +// true, only public events will be returned. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-events-received-by-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-public-events-received-by-a-user +func (s *ActivityService) ListEventsReceivedByUser(ctx context.Context, user string, publicOnly bool, opts *ListOptions) ([]*Event, *Response, error) { + var u string + if publicOnly { + u = fmt.Sprintf("users/%v/received_events/public", user) + } else { + u = fmt.Sprintf("users/%v/received_events", user) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListUserEventsForOrganization provides the user’s organization dashboard. You +// must be authenticated as the user to view this. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/events#list-organization-events-for-the-authenticated-user +func (s *ActivityService) ListUserEventsForOrganization(ctx context.Context, org, user string, opts *ListOptions) ([]*Event, *Response, error) { + u := fmt.Sprintf("users/%v/events/orgs/%v", user, org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*Event + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/activity_notifications.go b/vendor/github.com/google/go-github/v56/github/activity_notifications.go new file mode 100644 index 000000000..03476c2e2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/activity_notifications.go @@ -0,0 +1,223 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "time" +) + +// Notification identifies a GitHub notification for a user. +type Notification struct { + ID *string `json:"id,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Subject *NotificationSubject `json:"subject,omitempty"` + + // Reason identifies the event that triggered the notification. + // + // GitHub API docs: https://docs.github.com/en/rest/activity#notification-reasons + Reason *string `json:"reason,omitempty"` + + Unread *bool `json:"unread,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + LastReadAt *Timestamp `json:"last_read_at,omitempty"` + URL *string `json:"url,omitempty"` +} + +// NotificationSubject identifies the subject of a notification. +type NotificationSubject struct { + Title *string `json:"title,omitempty"` + URL *string `json:"url,omitempty"` + LatestCommentURL *string `json:"latest_comment_url,omitempty"` + Type *string `json:"type,omitempty"` +} + +// NotificationListOptions specifies the optional parameters to the +// ActivityService.ListNotifications method. +type NotificationListOptions struct { + All bool `url:"all,omitempty"` + Participating bool `url:"participating,omitempty"` + Since time.Time `url:"since,omitempty"` + Before time.Time `url:"before,omitempty"` + + ListOptions +} + +// ListNotifications lists all notifications for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#list-notifications-for-the-authenticated-user +func (s *ActivityService) ListNotifications(ctx context.Context, opts *NotificationListOptions) ([]*Notification, *Response, error) { + u := "notifications" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var notifications []*Notification + resp, err := s.client.Do(ctx, req, ¬ifications) + if err != nil { + return nil, resp, err + } + + return notifications, resp, nil +} + +// ListRepositoryNotifications lists all notifications in a given repository +// for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#list-repository-notifications-for-the-authenticated-user +func (s *ActivityService) ListRepositoryNotifications(ctx context.Context, owner, repo string, opts *NotificationListOptions) ([]*Notification, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var notifications []*Notification + resp, err := s.client.Do(ctx, req, ¬ifications) + if err != nil { + return nil, resp, err + } + + return notifications, resp, nil +} + +type markReadOptions struct { + LastReadAt Timestamp `json:"last_read_at,omitempty"` +} + +// MarkNotificationsRead marks all notifications up to lastRead as read. +// +// GitHub API docs: https://docs.github.com/en/rest/activity#mark-as-read +func (s *ActivityService) MarkNotificationsRead(ctx context.Context, lastRead Timestamp) (*Response, error) { + opts := &markReadOptions{ + LastReadAt: lastRead, + } + req, err := s.client.NewRequest("PUT", "notifications", opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// MarkRepositoryNotificationsRead marks all notifications up to lastRead in +// the specified repository as read. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#mark-repository-notifications-as-read +func (s *ActivityService) MarkRepositoryNotificationsRead(ctx context.Context, owner, repo string, lastRead Timestamp) (*Response, error) { + opts := &markReadOptions{ + LastReadAt: lastRead, + } + u := fmt.Sprintf("repos/%v/%v/notifications", owner, repo) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetThread gets the specified notification thread. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#get-a-thread +func (s *ActivityService) GetThread(ctx context.Context, id string) (*Notification, *Response, error) { + u := fmt.Sprintf("notifications/threads/%v", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + notification := new(Notification) + resp, err := s.client.Do(ctx, req, notification) + if err != nil { + return nil, resp, err + } + + return notification, resp, nil +} + +// MarkThreadRead marks the specified thread as read. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#mark-a-thread-as-read +func (s *ActivityService) MarkThreadRead(ctx context.Context, id string) (*Response, error) { + u := fmt.Sprintf("notifications/threads/%v", id) + + req, err := s.client.NewRequest("PATCH", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetThreadSubscription checks to see if the authenticated user is subscribed +// to a thread. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#get-a-thread-subscription-for-the-authenticated-user +func (s *ActivityService) GetThreadSubscription(ctx context.Context, id string) (*Subscription, *Response, error) { + u := fmt.Sprintf("notifications/threads/%v/subscription", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + sub := new(Subscription) + resp, err := s.client.Do(ctx, req, sub) + if err != nil { + return nil, resp, err + } + + return sub, resp, nil +} + +// SetThreadSubscription sets the subscription for the specified thread for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#set-a-thread-subscription +func (s *ActivityService) SetThreadSubscription(ctx context.Context, id string, subscription *Subscription) (*Subscription, *Response, error) { + u := fmt.Sprintf("notifications/threads/%v/subscription", id) + + req, err := s.client.NewRequest("PUT", u, subscription) + if err != nil { + return nil, nil, err + } + + sub := new(Subscription) + resp, err := s.client.Do(ctx, req, sub) + if err != nil { + return nil, resp, err + } + + return sub, resp, nil +} + +// DeleteThreadSubscription deletes the subscription for the specified thread +// for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/notifications#delete-a-thread-subscription +func (s *ActivityService) DeleteThreadSubscription(ctx context.Context, id string) (*Response, error) { + u := fmt.Sprintf("notifications/threads/%v/subscription", id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/activity_star.go b/vendor/github.com/google/go-github/v56/github/activity_star.go new file mode 100644 index 000000000..65a316f53 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/activity_star.go @@ -0,0 +1,141 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strings" +) + +// StarredRepository is returned by ListStarred. +type StarredRepository struct { + StarredAt *Timestamp `json:"starred_at,omitempty"` + Repository *Repository `json:"repo,omitempty"` +} + +// Stargazer represents a user that has starred a repository. +type Stargazer struct { + StarredAt *Timestamp `json:"starred_at,omitempty"` + User *User `json:"user,omitempty"` +} + +// ListStargazers lists people who have starred the specified repo. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#list-stargazers +func (s *ActivityService) ListStargazers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Stargazer, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/stargazers", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeStarringPreview) + + var stargazers []*Stargazer + resp, err := s.client.Do(ctx, req, &stargazers) + if err != nil { + return nil, resp, err + } + + return stargazers, resp, nil +} + +// ActivityListStarredOptions specifies the optional parameters to the +// ActivityService.ListStarred method. +type ActivityListStarredOptions struct { + // How to sort the repository list. Possible values are: created, updated, + // pushed, full_name. Default is "full_name". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort repositories. Possible values are: asc, desc. + // Default is "asc" when sort is "full_name", otherwise default is "desc". + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// ListStarred lists all the repos starred by a user. Passing the empty string +// will list the starred repositories for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#list-repositories-starred-by-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#list-repositories-starred-by-a-user +func (s *ActivityService) ListStarred(ctx context.Context, user string, opts *ActivityListStarredOptions) ([]*StarredRepository, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/starred", user) + } else { + u = "user/starred" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when APIs fully launch + acceptHeaders := []string{mediaTypeStarringPreview, mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var repos []*StarredRepository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// IsStarred checks if a repository is starred by authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#check-if-a-repository-is-starred-by-the-authenticated-user +func (s *ActivityService) IsStarred(ctx context.Context, owner, repo string) (bool, *Response, error) { + u := fmt.Sprintf("user/starred/%v/%v", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + starred, err := parseBoolResponse(err) + return starred, resp, err +} + +// Star a repository as the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#star-a-repository-for-the-authenticated-user +func (s *ActivityService) Star(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("user/starred/%v/%v", owner, repo) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unstar a repository as the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/starring#unstar-a-repository-for-the-authenticated-user +func (s *ActivityService) Unstar(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("user/starred/%v/%v", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/activity_watching.go b/vendor/github.com/google/go-github/v56/github/activity_watching.go new file mode 100644 index 000000000..2d6fafcc7 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/activity_watching.go @@ -0,0 +1,147 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Subscription identifies a repository or thread subscription. +type Subscription struct { + Subscribed *bool `json:"subscribed,omitempty"` + Ignored *bool `json:"ignored,omitempty"` + Reason *string `json:"reason,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + + // only populated for repository subscriptions + RepositoryURL *string `json:"repository_url,omitempty"` + + // only populated for thread subscriptions + ThreadURL *string `json:"thread_url,omitempty"` +} + +// ListWatchers lists watchers of a particular repo. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#list-watchers +func (s *ActivityService) ListWatchers(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/subscribers", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var watchers []*User + resp, err := s.client.Do(ctx, req, &watchers) + if err != nil { + return nil, resp, err + } + + return watchers, resp, nil +} + +// ListWatched lists the repositories the specified user is watching. Passing +// the empty string will fetch watched repos for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#list-repositories-watched-by-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#list-repositories-watched-by-a-user +func (s *ActivityService) ListWatched(ctx context.Context, user string, opts *ListOptions) ([]*Repository, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/subscriptions", user) + } else { + u = "user/subscriptions" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var watched []*Repository + resp, err := s.client.Do(ctx, req, &watched) + if err != nil { + return nil, resp, err + } + + return watched, resp, nil +} + +// GetRepositorySubscription returns the subscription for the specified +// repository for the authenticated user. If the authenticated user is not +// watching the repository, a nil Subscription is returned. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#get-a-repository-subscription +func (s *ActivityService) GetRepositorySubscription(ctx context.Context, owner, repo string) (*Subscription, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + sub := new(Subscription) + resp, err := s.client.Do(ctx, req, sub) + if err != nil { + // if it's just a 404, don't return that as an error + _, err = parseBoolResponse(err) + return nil, resp, err + } + + return sub, resp, nil +} + +// SetRepositorySubscription sets the subscription for the specified repository +// for the authenticated user. +// +// To watch a repository, set subscription.Subscribed to true. +// To ignore notifications made within a repository, set subscription.Ignored to true. +// To stop watching a repository, use DeleteRepositorySubscription. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#set-a-repository-subscription +func (s *ActivityService) SetRepositorySubscription(ctx context.Context, owner, repo string, subscription *Subscription) (*Subscription, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) + + req, err := s.client.NewRequest("PUT", u, subscription) + if err != nil { + return nil, nil, err + } + + sub := new(Subscription) + resp, err := s.client.Do(ctx, req, sub) + if err != nil { + return nil, resp, err + } + + return sub, resp, nil +} + +// DeleteRepositorySubscription deletes the subscription for the specified +// repository for the authenticated user. +// +// This is used to stop watching a repository. To control whether or not to +// receive notifications from a repository, use SetRepositorySubscription. +// +// GitHub API docs: https://docs.github.com/en/rest/activity/watching#delete-a-repository-subscription +func (s *ActivityService) DeleteRepositorySubscription(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%s/%s/subscription", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/admin.go b/vendor/github.com/google/go-github/v56/github/admin.go new file mode 100644 index 000000000..1b28ef64c --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/admin.go @@ -0,0 +1,119 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// AdminService handles communication with the admin related methods of the +// GitHub API. These API routes are normally only accessible for GitHub +// Enterprise installations. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin +type AdminService service + +// TeamLDAPMapping represents the mapping between a GitHub team and an LDAP group. +type TeamLDAPMapping struct { + ID *int64 `json:"id,omitempty"` + LDAPDN *string `json:"ldap_dn,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Slug *string `json:"slug,omitempty"` + Description *string `json:"description,omitempty"` + Privacy *string `json:"privacy,omitempty"` + Permission *string `json:"permission,omitempty"` + + MembersURL *string `json:"members_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` +} + +func (m TeamLDAPMapping) String() string { + return Stringify(m) +} + +// UserLDAPMapping represents the mapping between a GitHub user and an LDAP user. +type UserLDAPMapping struct { + ID *int64 `json:"id,omitempty"` + LDAPDN *string `json:"ldap_dn,omitempty"` + Login *string `json:"login,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + GravatarID *string `json:"gravatar_id,omitempty"` + Type *string `json:"type,omitempty"` + SiteAdmin *bool `json:"site_admin,omitempty"` + + URL *string `json:"url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + FollowingURL *string `json:"following_url,omitempty"` + FollowersURL *string `json:"followers_url,omitempty"` + GistsURL *string `json:"gists_url,omitempty"` + OrganizationsURL *string `json:"organizations_url,omitempty"` + ReceivedEventsURL *string `json:"received_events_url,omitempty"` + ReposURL *string `json:"repos_url,omitempty"` + StarredURL *string `json:"starred_url,omitempty"` + SubscriptionsURL *string `json:"subscriptions_url,omitempty"` +} + +func (m UserLDAPMapping) String() string { + return Stringify(m) +} + +// Enterprise represents the GitHub enterprise profile. +type Enterprise struct { + ID *int `json:"id,omitempty"` + Slug *string `json:"slug,omitempty"` + Name *string `json:"name,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + Description *string `json:"description,omitempty"` + WebsiteURL *string `json:"website_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +func (m Enterprise) String() string { + return Stringify(m) +} + +// UpdateUserLDAPMapping updates the mapping between a GitHub user and an LDAP user. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server/rest/enterprise-admin/ldap#update-ldap-mapping-for-a-user +func (s *AdminService) UpdateUserLDAPMapping(ctx context.Context, user string, mapping *UserLDAPMapping) (*UserLDAPMapping, *Response, error) { + u := fmt.Sprintf("admin/ldap/users/%v/mapping", user) + req, err := s.client.NewRequest("PATCH", u, mapping) + if err != nil { + return nil, nil, err + } + + m := new(UserLDAPMapping) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// UpdateTeamLDAPMapping updates the mapping between a GitHub team and an LDAP group. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise/ldap/#update-ldap-mapping-for-a-team +func (s *AdminService) UpdateTeamLDAPMapping(ctx context.Context, team int64, mapping *TeamLDAPMapping) (*TeamLDAPMapping, *Response, error) { + u := fmt.Sprintf("admin/ldap/teams/%v/mapping", team) + req, err := s.client.NewRequest("PATCH", u, mapping) + if err != nil { + return nil, nil, err + } + + m := new(TeamLDAPMapping) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/admin_orgs.go b/vendor/github.com/google/go-github/v56/github/admin_orgs.go new file mode 100644 index 000000000..448e51f63 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/admin_orgs.go @@ -0,0 +1,89 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// createOrgRequest is a subset of Organization and is used internally +// by CreateOrg to pass only the known fields for the endpoint. +type createOrgRequest struct { + Login *string `json:"login,omitempty"` + Admin *string `json:"admin,omitempty"` +} + +// CreateOrg creates a new organization in GitHub Enterprise. +// +// Note that only a subset of the org fields are used and org must +// not be nil. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/orgs/#create-an-organization +func (s *AdminService) CreateOrg(ctx context.Context, org *Organization, admin string) (*Organization, *Response, error) { + u := "admin/organizations" + + orgReq := &createOrgRequest{ + Login: org.Login, + Admin: &admin, + } + + req, err := s.client.NewRequest("POST", u, orgReq) + if err != nil { + return nil, nil, err + } + + o := new(Organization) + resp, err := s.client.Do(ctx, req, o) + if err != nil { + return nil, resp, err + } + + return o, resp, nil +} + +// renameOrgRequest is a subset of Organization and is used internally +// by RenameOrg and RenameOrgByName to pass only the known fields for the endpoint. +type renameOrgRequest struct { + Login *string `json:"login,omitempty"` +} + +// RenameOrgResponse is the response given when renaming an Organization. +type RenameOrgResponse struct { + Message *string `json:"message,omitempty"` + URL *string `json:"url,omitempty"` +} + +// RenameOrg renames an organization in GitHub Enterprise. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/orgs/#rename-an-organization +func (s *AdminService) RenameOrg(ctx context.Context, org *Organization, newName string) (*RenameOrgResponse, *Response, error) { + return s.RenameOrgByName(ctx, *org.Login, newName) +} + +// RenameOrgByName renames an organization in GitHub Enterprise using its current name. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/orgs/#rename-an-organization +func (s *AdminService) RenameOrgByName(ctx context.Context, org, newName string) (*RenameOrgResponse, *Response, error) { + u := fmt.Sprintf("admin/organizations/%v", org) + + orgReq := &renameOrgRequest{ + Login: &newName, + } + + req, err := s.client.NewRequest("PATCH", u, orgReq) + if err != nil { + return nil, nil, err + } + + o := new(RenameOrgResponse) + resp, err := s.client.Do(ctx, req, o) + if err != nil { + return nil, resp, err + } + + return o, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/admin_stats.go b/vendor/github.com/google/go-github/v56/github/admin_stats.go new file mode 100644 index 000000000..17e568f62 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/admin_stats.go @@ -0,0 +1,170 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" +) + +// AdminStats represents a variety of stats of a GitHub Enterprise +// installation. +type AdminStats struct { + Issues *IssueStats `json:"issues,omitempty"` + Hooks *HookStats `json:"hooks,omitempty"` + Milestones *MilestoneStats `json:"milestones,omitempty"` + Orgs *OrgStats `json:"orgs,omitempty"` + Comments *CommentStats `json:"comments,omitempty"` + Pages *PageStats `json:"pages,omitempty"` + Users *UserStats `json:"users,omitempty"` + Gists *GistStats `json:"gists,omitempty"` + Pulls *PullStats `json:"pulls,omitempty"` + Repos *RepoStats `json:"repos,omitempty"` +} + +func (s AdminStats) String() string { + return Stringify(s) +} + +// IssueStats represents the number of total, open and closed issues. +type IssueStats struct { + TotalIssues *int `json:"total_issues,omitempty"` + OpenIssues *int `json:"open_issues,omitempty"` + ClosedIssues *int `json:"closed_issues,omitempty"` +} + +func (s IssueStats) String() string { + return Stringify(s) +} + +// HookStats represents the number of total, active and inactive hooks. +type HookStats struct { + TotalHooks *int `json:"total_hooks,omitempty"` + ActiveHooks *int `json:"active_hooks,omitempty"` + InactiveHooks *int `json:"inactive_hooks,omitempty"` +} + +func (s HookStats) String() string { + return Stringify(s) +} + +// MilestoneStats represents the number of total, open and close milestones. +type MilestoneStats struct { + TotalMilestones *int `json:"total_milestones,omitempty"` + OpenMilestones *int `json:"open_milestones,omitempty"` + ClosedMilestones *int `json:"closed_milestones,omitempty"` +} + +func (s MilestoneStats) String() string { + return Stringify(s) +} + +// OrgStats represents the number of total, disabled organizations and the team +// and team member count. +type OrgStats struct { + TotalOrgs *int `json:"total_orgs,omitempty"` + DisabledOrgs *int `json:"disabled_orgs,omitempty"` + TotalTeams *int `json:"total_teams,omitempty"` + TotalTeamMembers *int `json:"total_team_members,omitempty"` +} + +func (s OrgStats) String() string { + return Stringify(s) +} + +// CommentStats represents the number of total comments on commits, gists, issues +// and pull requests. +type CommentStats struct { + TotalCommitComments *int `json:"total_commit_comments,omitempty"` + TotalGistComments *int `json:"total_gist_comments,omitempty"` + TotalIssueComments *int `json:"total_issue_comments,omitempty"` + TotalPullRequestComments *int `json:"total_pull_request_comments,omitempty"` +} + +func (s CommentStats) String() string { + return Stringify(s) +} + +// PageStats represents the total number of github pages. +type PageStats struct { + TotalPages *int `json:"total_pages,omitempty"` +} + +func (s PageStats) String() string { + return Stringify(s) +} + +// UserStats represents the number of total, admin and suspended users. +type UserStats struct { + TotalUsers *int `json:"total_users,omitempty"` + AdminUsers *int `json:"admin_users,omitempty"` + SuspendedUsers *int `json:"suspended_users,omitempty"` +} + +func (s UserStats) String() string { + return Stringify(s) +} + +// GistStats represents the number of total, private and public gists. +type GistStats struct { + TotalGists *int `json:"total_gists,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` +} + +func (s GistStats) String() string { + return Stringify(s) +} + +// PullStats represents the number of total, merged, mergable and unmergeable +// pull-requests. +type PullStats struct { + TotalPulls *int `json:"total_pulls,omitempty"` + MergedPulls *int `json:"merged_pulls,omitempty"` + MergablePulls *int `json:"mergeable_pulls,omitempty"` + UnmergablePulls *int `json:"unmergeable_pulls,omitempty"` +} + +func (s PullStats) String() string { + return Stringify(s) +} + +// RepoStats represents the number of total, root, fork, organization repositories +// together with the total number of pushes and wikis. +type RepoStats struct { + TotalRepos *int `json:"total_repos,omitempty"` + RootRepos *int `json:"root_repos,omitempty"` + ForkRepos *int `json:"fork_repos,omitempty"` + OrgRepos *int `json:"org_repos,omitempty"` + TotalPushes *int `json:"total_pushes,omitempty"` + TotalWikis *int `json:"total_wikis,omitempty"` +} + +func (s RepoStats) String() string { + return Stringify(s) +} + +// GetAdminStats returns a variety of metrics about a GitHub Enterprise +// installation. +// +// Please note that this is only available to site administrators, +// otherwise it will error with a 404 not found (instead of 401 or 403). +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/admin_stats/ +func (s *AdminService) GetAdminStats(ctx context.Context) (*AdminStats, *Response, error) { + u := "enterprise/stats/all" + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + m := new(AdminStats) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/admin_users.go b/vendor/github.com/google/go-github/v56/github/admin_users.go new file mode 100644 index 000000000..d756a77e2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/admin_users.go @@ -0,0 +1,133 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// createUserRequest is a subset of User and is used internally +// by CreateUser to pass only the known fields for the endpoint. +type createUserRequest struct { + Login *string `json:"login,omitempty"` + Email *string `json:"email,omitempty"` +} + +// CreateUser creates a new user in GitHub Enterprise. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#create-a-new-user +func (s *AdminService) CreateUser(ctx context.Context, login, email string) (*User, *Response, error) { + u := "admin/users" + + userReq := &createUserRequest{ + Login: &login, + Email: &email, + } + + req, err := s.client.NewRequest("POST", u, userReq) + if err != nil { + return nil, nil, err + } + + var user User + resp, err := s.client.Do(ctx, req, &user) + if err != nil { + return nil, resp, err + } + + return &user, resp, nil +} + +// DeleteUser deletes a user in GitHub Enterprise. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#delete-a-user +func (s *AdminService) DeleteUser(ctx context.Context, username string) (*Response, error) { + u := "admin/users/" + username + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// ImpersonateUserOptions represents the scoping for the OAuth token. +type ImpersonateUserOptions struct { + Scopes []string `json:"scopes,omitempty"` +} + +// OAuthAPP represents the GitHub Site Administrator OAuth app. +type OAuthAPP struct { + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + ClientID *string `json:"client_id,omitempty"` +} + +func (s OAuthAPP) String() string { + return Stringify(s) +} + +// UserAuthorization represents the impersonation response. +type UserAuthorization struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Scopes []string `json:"scopes,omitempty"` + Token *string `json:"token,omitempty"` + TokenLastEight *string `json:"token_last_eight,omitempty"` + HashedToken *string `json:"hashed_token,omitempty"` + App *OAuthAPP `json:"app,omitempty"` + Note *string `json:"note,omitempty"` + NoteURL *string `json:"note_url,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + Fingerprint *string `json:"fingerprint,omitempty"` +} + +// CreateUserImpersonation creates an impersonation OAuth token. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#create-an-impersonation-oauth-token +func (s *AdminService) CreateUserImpersonation(ctx context.Context, username string, opts *ImpersonateUserOptions) (*UserAuthorization, *Response, error) { + u := fmt.Sprintf("admin/users/%s/authorizations", username) + + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + a := new(UserAuthorization) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} + +// DeleteUserImpersonation deletes an impersonation OAuth token. +// +// GitHub Enterprise API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#delete-an-impersonation-oauth-token +func (s *AdminService) DeleteUserImpersonation(ctx context.Context, username string) (*Response, error) { + u := fmt.Sprintf("admin/users/%s/authorizations", username) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/apps.go b/vendor/github.com/google/go-github/v56/github/apps.go new file mode 100644 index 000000000..8965e6681 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps.go @@ -0,0 +1,391 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// AppsService provides access to the installation related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/ +type AppsService service + +// App represents a GitHub App. +type App struct { + ID *int64 `json:"id,omitempty"` + Slug *string `json:"slug,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + ExternalURL *string `json:"external_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Permissions *InstallationPermissions `json:"permissions,omitempty"` + Events []string `json:"events,omitempty"` + InstallationsCount *int `json:"installations_count,omitempty"` +} + +// InstallationToken represents an installation token. +type InstallationToken struct { + Token *string `json:"token,omitempty"` + ExpiresAt *Timestamp `json:"expires_at,omitempty"` + Permissions *InstallationPermissions `json:"permissions,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +// InstallationTokenOptions allow restricting a token's access to specific repositories. +type InstallationTokenOptions struct { + // The IDs of the repositories that the installation token can access. + // Providing repository IDs restricts the access of an installation token to specific repositories. + RepositoryIDs []int64 `json:"repository_ids,omitempty"` + + // The names of the repositories that the installation token can access. + // Providing repository names restricts the access of an installation token to specific repositories. + Repositories []string `json:"repositories,omitempty"` + + // The permissions granted to the access token. + // The permissions object includes the permission names and their access type. + Permissions *InstallationPermissions `json:"permissions,omitempty"` +} + +// InstallationPermissions lists the repository and organization permissions for an installation. +// +// Permission names taken from: +// +// https://docs.github.com/en/enterprise-server@3.0/rest/apps#create-an-installation-access-token-for-an-app +// https://docs.github.com/en/rest/apps#create-an-installation-access-token-for-an-app +type InstallationPermissions struct { + Actions *string `json:"actions,omitempty"` + Administration *string `json:"administration,omitempty"` + Blocking *string `json:"blocking,omitempty"` + Checks *string `json:"checks,omitempty"` + Contents *string `json:"contents,omitempty"` + ContentReferences *string `json:"content_references,omitempty"` + Deployments *string `json:"deployments,omitempty"` + Emails *string `json:"emails,omitempty"` + Environments *string `json:"environments,omitempty"` + Followers *string `json:"followers,omitempty"` + Issues *string `json:"issues,omitempty"` + Metadata *string `json:"metadata,omitempty"` + Members *string `json:"members,omitempty"` + OrganizationAdministration *string `json:"organization_administration,omitempty"` + OrganizationCustomRoles *string `json:"organization_custom_roles,omitempty"` + OrganizationHooks *string `json:"organization_hooks,omitempty"` + OrganizationPackages *string `json:"organization_packages,omitempty"` + OrganizationPlan *string `json:"organization_plan,omitempty"` + OrganizationPreReceiveHooks *string `json:"organization_pre_receive_hooks,omitempty"` + OrganizationProjects *string `json:"organization_projects,omitempty"` + OrganizationSecrets *string `json:"organization_secrets,omitempty"` + OrganizationSelfHostedRunners *string `json:"organization_self_hosted_runners,omitempty"` + OrganizationUserBlocking *string `json:"organization_user_blocking,omitempty"` + Packages *string `json:"packages,omitempty"` + Pages *string `json:"pages,omitempty"` + PullRequests *string `json:"pull_requests,omitempty"` + RepositoryHooks *string `json:"repository_hooks,omitempty"` + RepositoryProjects *string `json:"repository_projects,omitempty"` + RepositoryPreReceiveHooks *string `json:"repository_pre_receive_hooks,omitempty"` + Secrets *string `json:"secrets,omitempty"` + SecretScanningAlerts *string `json:"secret_scanning_alerts,omitempty"` + SecurityEvents *string `json:"security_events,omitempty"` + SingleFile *string `json:"single_file,omitempty"` + Statuses *string `json:"statuses,omitempty"` + TeamDiscussions *string `json:"team_discussions,omitempty"` + VulnerabilityAlerts *string `json:"vulnerability_alerts,omitempty"` + Workflows *string `json:"workflows,omitempty"` +} + +// InstallationRequest represents a pending GitHub App installation request. +type InstallationRequest struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Account *User `json:"account,omitempty"` + Requester *User `json:"requester,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` +} + +// Installation represents a GitHub Apps installation. +type Installation struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AppID *int64 `json:"app_id,omitempty"` + AppSlug *string `json:"app_slug,omitempty"` + TargetID *int64 `json:"target_id,omitempty"` + Account *User `json:"account,omitempty"` + AccessTokensURL *string `json:"access_tokens_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + TargetType *string `json:"target_type,omitempty"` + SingleFileName *string `json:"single_file_name,omitempty"` + RepositorySelection *string `json:"repository_selection,omitempty"` + Events []string `json:"events,omitempty"` + SingleFilePaths []string `json:"single_file_paths,omitempty"` + Permissions *InstallationPermissions `json:"permissions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + HasMultipleSingleFiles *bool `json:"has_multiple_single_files,omitempty"` + SuspendedBy *User `json:"suspended_by,omitempty"` + SuspendedAt *Timestamp `json:"suspended_at,omitempty"` +} + +// Attachment represents a GitHub Apps attachment. +type Attachment struct { + ID *int64 `json:"id,omitempty"` + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` +} + +// ContentReference represents a reference to a URL in an issue or pull request. +type ContentReference struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Reference *string `json:"reference,omitempty"` +} + +func (i Installation) String() string { + return Stringify(i) +} + +// Get a single GitHub App. Passing the empty string will get +// the authenticated GitHub App. +// +// Note: appSlug is just the URL-friendly name of your GitHub App. +// You can find this on the settings page for your GitHub App +// (e.g., https://github.com/settings/apps/:app_slug). +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-the-authenticated-app +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-an-app +func (s *AppsService) Get(ctx context.Context, appSlug string) (*App, *Response, error) { + var u string + if appSlug != "" { + u = fmt.Sprintf("apps/%v", appSlug) + } else { + u = "app" + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + app := new(App) + resp, err := s.client.Do(ctx, req, app) + if err != nil { + return nil, resp, err + } + + return app, resp, nil +} + +// ListInstallationRequests lists the pending installation requests that the current GitHub App has. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#list-installation-requests-for-the-authenticated-app +func (s *AppsService) ListInstallationRequests(ctx context.Context, opts *ListOptions) ([]*InstallationRequest, *Response, error) { + u, err := addOptions("app/installation-requests", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var i []*InstallationRequest + resp, err := s.client.Do(ctx, req, &i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// ListInstallations lists the installations that the current GitHub App has. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#list-installations-for-the-authenticated-app +func (s *AppsService) ListInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { + u, err := addOptions("app/installations", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var i []*Installation + resp, err := s.client.Do(ctx, req, &i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// GetInstallation returns the specified installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-an-installation-for-the-authenticated-app +func (s *AppsService) GetInstallation(ctx context.Context, id int64) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("app/installations/%v", id)) +} + +// ListUserInstallations lists installations that are accessible to the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#list-app-installations-accessible-to-the-user-access-token +func (s *AppsService) ListUserInstallations(ctx context.Context, opts *ListOptions) ([]*Installation, *Response, error) { + u, err := addOptions("user/installations", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var i struct { + Installations []*Installation `json:"installations"` + } + resp, err := s.client.Do(ctx, req, &i) + if err != nil { + return nil, resp, err + } + + return i.Installations, resp, nil +} + +// SuspendInstallation suspends the specified installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#suspend-an-app-installation +func (s *AppsService) SuspendInstallation(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("app/installations/%v/suspended", id) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// UnsuspendInstallation unsuspends the specified installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#unsuspend-an-app-installation +func (s *AppsService) UnsuspendInstallation(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("app/installations/%v/suspended", id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteInstallation deletes the specified installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#delete-an-installation-for-the-authenticated-app +func (s *AppsService) DeleteInstallation(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("app/installations/%v", id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CreateInstallationToken creates a new installation token. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#create-an-installation-access-token-for-an-app +func (s *AppsService) CreateInstallationToken(ctx context.Context, id int64, opts *InstallationTokenOptions) (*InstallationToken, *Response, error) { + u := fmt.Sprintf("app/installations/%v/access_tokens", id) + + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + t := new(InstallationToken) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// CreateAttachment creates a new attachment on user comment containing a url. +// +// TODO: Find GitHub API docs. +func (s *AppsService) CreateAttachment(ctx context.Context, contentReferenceID int64, title, body string) (*Attachment, *Response, error) { + u := fmt.Sprintf("content_references/%v/attachments", contentReferenceID) + payload := &Attachment{Title: String(title), Body: String(body)} + req, err := s.client.NewRequest("POST", u, payload) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeContentAttachmentsPreview) + + m := &Attachment{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// FindOrganizationInstallation finds the organization's installation information. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-an-organization-installation-for-the-authenticated-app +func (s *AppsService) FindOrganizationInstallation(ctx context.Context, org string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("orgs/%v/installation", org)) +} + +// FindRepositoryInstallation finds the repository's installation information. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-a-repository-installation-for-the-authenticated-app +func (s *AppsService) FindRepositoryInstallation(ctx context.Context, owner, repo string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("repos/%v/%v/installation", owner, repo)) +} + +// FindRepositoryInstallationByID finds the repository's installation information. +// +// Note: FindRepositoryInstallationByID uses the undocumented GitHub API endpoint /repositories/:id/installation. +func (s *AppsService) FindRepositoryInstallationByID(ctx context.Context, id int64) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("repositories/%d/installation", id)) +} + +// FindUserInstallation finds the user's installation information. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#get-a-user-installation-for-the-authenticated-app +func (s *AppsService) FindUserInstallation(ctx context.Context, user string) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("users/%v/installation", user)) +} + +func (s *AppsService) getInstallation(ctx context.Context, url string) (*Installation, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + i := new(Installation) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/apps_hooks.go b/vendor/github.com/google/go-github/v56/github/apps_hooks.go new file mode 100644 index 000000000..e3bd2afc0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps_hooks.go @@ -0,0 +1,48 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" +) + +// GetHookConfig returns the webhook configuration for a GitHub App. +// The underlying transport must be authenticated as an app. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#get-a-webhook-configuration-for-an-app +func (s *AppsService) GetHookConfig(ctx context.Context) (*HookConfig, *Response, error) { + req, err := s.client.NewRequest("GET", "app/hook/config", nil) + if err != nil { + return nil, nil, err + } + + config := new(HookConfig) + resp, err := s.client.Do(ctx, req, &config) + if err != nil { + return nil, resp, err + } + + return config, resp, nil +} + +// UpdateHookConfig updates the webhook configuration for a GitHub App. +// The underlying transport must be authenticated as an app. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#update-a-webhook-configuration-for-an-app +func (s *AppsService) UpdateHookConfig(ctx context.Context, config *HookConfig) (*HookConfig, *Response, error) { + req, err := s.client.NewRequest("PATCH", "app/hook/config", config) + if err != nil { + return nil, nil, err + } + + c := new(HookConfig) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/apps_hooks_deliveries.go b/vendor/github.com/google/go-github/v56/github/apps_hooks_deliveries.go new file mode 100644 index 000000000..33102f36d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps_hooks_deliveries.go @@ -0,0 +1,72 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListHookDeliveries lists deliveries of an App webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/webhooks#list-deliveries-for-an-app-webhook +func (s *AppsService) ListHookDeliveries(ctx context.Context, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { + u, err := addOptions("app/hook/deliveries", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + deliveries := []*HookDelivery{} + resp, err := s.client.Do(ctx, req, &deliveries) + if err != nil { + return nil, resp, err + } + + return deliveries, resp, nil +} + +// GetHookDelivery returns the App webhook delivery with the specified ID. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/webhooks#get-a-delivery-for-an-app-webhook +func (s *AppsService) GetHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("app/hook/deliveries/%v", deliveryID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// RedeliverHookDelivery redelivers a delivery for an App webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/webhooks#redeliver-a-delivery-for-an-app-webhook +func (s *AppsService) RedeliverHookDelivery(ctx context.Context, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("app/hook/deliveries/%v/attempts", deliveryID) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/apps_installation.go b/vendor/github.com/google/go-github/v56/github/apps_installation.go new file mode 100644 index 000000000..b61908071 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps_installation.go @@ -0,0 +1,128 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strings" +) + +// ListRepositories represents the response from the list repos endpoints. +type ListRepositories struct { + TotalCount *int `json:"total_count,omitempty"` + Repositories []*Repository `json:"repositories"` +} + +// ListRepos lists the repositories that are accessible to the authenticated installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#list-repositories-accessible-to-the-app-installation +func (s *AppsService) ListRepos(ctx context.Context, opts *ListOptions) (*ListRepositories, *Response, error) { + u, err := addOptions("installation/repositories", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{ + mediaTypeTopicsPreview, + mediaTypeRepositoryVisibilityPreview, + mediaTypeRepositoryTemplatePreview, + } + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var r *ListRepositories + + resp, err := s.client.Do(ctx, req, &r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// ListUserRepos lists repositories that are accessible +// to the authenticated user for an installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#list-repositories-accessible-to-the-user-access-token +func (s *AppsService) ListUserRepos(ctx context.Context, id int64, opts *ListOptions) (*ListRepositories, *Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories", id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{ + mediaTypeTopicsPreview, + mediaTypeRepositoryVisibilityPreview, + mediaTypeRepositoryTemplatePreview, + } + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var r *ListRepositories + resp, err := s.client.Do(ctx, req, &r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// AddRepository adds a single repository to an installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#add-a-repository-to-an-app-installation +func (s *AppsService) AddRepository(ctx context.Context, instID, repoID int64) (*Repository, *Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, nil, err + } + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// RemoveRepository removes a single repository from an installation. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#remove-a-repository-from-an-app-installation +func (s *AppsService) RemoveRepository(ctx context.Context, instID, repoID int64) (*Response, error) { + u := fmt.Sprintf("user/installations/%v/repositories/%v", instID, repoID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RevokeInstallationToken revokes an installation token. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/installations#revoke-an-installation-access-token +func (s *AppsService) RevokeInstallationToken(ctx context.Context) (*Response, error) { + u := "installation/token" + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/apps_manifest.go b/vendor/github.com/google/go-github/v56/github/apps_manifest.go new file mode 100644 index 000000000..fa4c85379 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps_manifest.go @@ -0,0 +1,49 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// AppConfig describes the configuration of a GitHub App. +type AppConfig struct { + ID *int64 `json:"id,omitempty"` + Slug *string `json:"slug,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + ExternalURL *string `json:"external_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ClientID *string `json:"client_id,omitempty"` + ClientSecret *string `json:"client_secret,omitempty"` + WebhookSecret *string `json:"webhook_secret,omitempty"` + PEM *string `json:"pem,omitempty"` +} + +// CompleteAppManifest completes the App manifest handshake flow for the given +// code. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/apps#create-a-github-app-from-a-manifest +func (s *AppsService) CompleteAppManifest(ctx context.Context, code string) (*AppConfig, *Response, error) { + u := fmt.Sprintf("app-manifests/%s/conversions", code) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + cfg := new(AppConfig) + resp, err := s.client.Do(ctx, req, cfg) + if err != nil { + return nil, resp, err + } + + return cfg, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/apps_marketplace.go b/vendor/github.com/google/go-github/v56/github/apps_marketplace.go new file mode 100644 index 000000000..32889abd2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/apps_marketplace.go @@ -0,0 +1,192 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// MarketplaceService handles communication with the marketplace related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#marketplace +type MarketplaceService struct { + client *Client + // Stubbed controls whether endpoints that return stubbed data are used + // instead of production endpoints. Stubbed data is fake data that's useful + // for testing your GitHub Apps. Stubbed data is hard-coded and will not + // change based on actual subscriptions. + // + // GitHub API docs: https://docs.github.com/en/rest/apps#testing-with-stubbed-endpoints + Stubbed bool +} + +// MarketplacePlan represents a GitHub Apps Marketplace Listing Plan. +type MarketplacePlan struct { + URL *string `json:"url,omitempty"` + AccountsURL *string `json:"accounts_url,omitempty"` + ID *int64 `json:"id,omitempty"` + Number *int `json:"number,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + MonthlyPriceInCents *int `json:"monthly_price_in_cents,omitempty"` + YearlyPriceInCents *int `json:"yearly_price_in_cents,omitempty"` + // The pricing model for this listing. Can be one of "flat-rate", "per-unit", or "free". + PriceModel *string `json:"price_model,omitempty"` + UnitName *string `json:"unit_name,omitempty"` + Bullets *[]string `json:"bullets,omitempty"` + // State can be one of the values "draft" or "published". + State *string `json:"state,omitempty"` + HasFreeTrial *bool `json:"has_free_trial,omitempty"` +} + +// MarketplacePurchase represents a GitHub Apps Marketplace Purchase. +type MarketplacePurchase struct { + Account *MarketplacePurchaseAccount `json:"account,omitempty"` + // BillingCycle can be one of the values "yearly", "monthly" or nil. + BillingCycle *string `json:"billing_cycle,omitempty"` + NextBillingDate *Timestamp `json:"next_billing_date,omitempty"` + UnitCount *int `json:"unit_count,omitempty"` + Plan *MarketplacePlan `json:"plan,omitempty"` + OnFreeTrial *bool `json:"on_free_trial,omitempty"` + FreeTrialEndsOn *Timestamp `json:"free_trial_ends_on,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +// MarketplacePendingChange represents a pending change to a GitHub Apps Marketplace Plan. +type MarketplacePendingChange struct { + EffectiveDate *Timestamp `json:"effective_date,omitempty"` + UnitCount *int `json:"unit_count,omitempty"` + ID *int64 `json:"id,omitempty"` + Plan *MarketplacePlan `json:"plan,omitempty"` +} + +// MarketplacePlanAccount represents a GitHub Account (user or organization) on a specific plan. +type MarketplacePlanAccount struct { + URL *string `json:"url,omitempty"` + Type *string `json:"type,omitempty"` + ID *int64 `json:"id,omitempty"` + Login *string `json:"login,omitempty"` + OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` + MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` + MarketplacePendingChange *MarketplacePendingChange `json:"marketplace_pending_change,omitempty"` +} + +// MarketplacePurchaseAccount represents a GitHub Account (user or organization) for a Purchase. +type MarketplacePurchaseAccount struct { + URL *string `json:"url,omitempty"` + Type *string `json:"type,omitempty"` + ID *int64 `json:"id,omitempty"` + Login *string `json:"login,omitempty"` + OrganizationBillingEmail *string `json:"organization_billing_email,omitempty"` + Email *string `json:"email,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +// ListPlans lists all plans for your Marketplace listing. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#list-plans +func (s *MarketplaceService) ListPlans(ctx context.Context, opts *ListOptions) ([]*MarketplacePlan, *Response, error) { + uri := s.marketplaceURI("plans") + u, err := addOptions(uri, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var plans []*MarketplacePlan + resp, err := s.client.Do(ctx, req, &plans) + if err != nil { + return nil, resp, err + } + + return plans, resp, nil +} + +// ListPlanAccountsForPlan lists all GitHub accounts (user or organization) on a specific plan. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#list-accounts-for-a-plan +func (s *MarketplaceService) ListPlanAccountsForPlan(ctx context.Context, planID int64, opts *ListOptions) ([]*MarketplacePlanAccount, *Response, error) { + uri := s.marketplaceURI(fmt.Sprintf("plans/%v/accounts", planID)) + u, err := addOptions(uri, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var accounts []*MarketplacePlanAccount + resp, err := s.client.Do(ctx, req, &accounts) + if err != nil { + return nil, resp, err + } + + return accounts, resp, nil +} + +// GetPlanAccountForAccount get GitHub account (user or organization) associated with an account. +// +// GitHub API docs: https://docs.github.com/en/rest/apps#get-a-subscription-plan-for-an-account +func (s *MarketplaceService) GetPlanAccountForAccount(ctx context.Context, accountID int64) (*MarketplacePlanAccount, *Response, error) { + uri := s.marketplaceURI(fmt.Sprintf("accounts/%v", accountID)) + + req, err := s.client.NewRequest("GET", uri, nil) + if err != nil { + return nil, nil, err + } + + var account *MarketplacePlanAccount + resp, err := s.client.Do(ctx, req, &account) + if err != nil { + return nil, resp, err + } + + return account, resp, nil +} + +// ListMarketplacePurchasesForUser lists all GitHub marketplace purchases made by a user. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user-stubbed +// GitHub API docs: https://docs.github.com/en/rest/apps/marketplace#list-subscriptions-for-the-authenticated-user +func (s *MarketplaceService) ListMarketplacePurchasesForUser(ctx context.Context, opts *ListOptions) ([]*MarketplacePurchase, *Response, error) { + uri := "user/marketplace_purchases" + if s.Stubbed { + uri = "user/marketplace_purchases/stubbed" + } + + u, err := addOptions(uri, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var purchases []*MarketplacePurchase + resp, err := s.client.Do(ctx, req, &purchases) + if err != nil { + return nil, resp, err + } + return purchases, resp, nil +} + +func (s *MarketplaceService) marketplaceURI(endpoint string) string { + url := "marketplace_listing" + if s.Stubbed { + url = "marketplace_listing/stubbed" + } + return url + "/" + endpoint +} diff --git a/vendor/github.com/google/go-github/v56/github/authorizations.go b/vendor/github.com/google/go-github/v56/github/authorizations.go new file mode 100644 index 000000000..ea0897e36 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/authorizations.go @@ -0,0 +1,281 @@ +// Copyright 2015 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Scope models a GitHub authorization scope. +// +// GitHub API docs: https://docs.github.com/en/rest/oauth/#scopes +type Scope string + +// This is the set of scopes for GitHub API V3 +const ( + ScopeNone Scope = "(no scope)" // REVISIT: is this actually returned, or just a documentation artifact? + ScopeUser Scope = "user" + ScopeUserEmail Scope = "user:email" + ScopeUserFollow Scope = "user:follow" + ScopePublicRepo Scope = "public_repo" + ScopeRepo Scope = "repo" + ScopeRepoDeployment Scope = "repo_deployment" + ScopeRepoStatus Scope = "repo:status" + ScopeDeleteRepo Scope = "delete_repo" + ScopeNotifications Scope = "notifications" + ScopeGist Scope = "gist" + ScopeReadRepoHook Scope = "read:repo_hook" + ScopeWriteRepoHook Scope = "write:repo_hook" + ScopeAdminRepoHook Scope = "admin:repo_hook" + ScopeAdminOrgHook Scope = "admin:org_hook" + ScopeReadOrg Scope = "read:org" + ScopeWriteOrg Scope = "write:org" + ScopeAdminOrg Scope = "admin:org" + ScopeReadPublicKey Scope = "read:public_key" + ScopeWritePublicKey Scope = "write:public_key" + ScopeAdminPublicKey Scope = "admin:public_key" + ScopeReadGPGKey Scope = "read:gpg_key" + ScopeWriteGPGKey Scope = "write:gpg_key" + ScopeAdminGPGKey Scope = "admin:gpg_key" + ScopeSecurityEvents Scope = "security_events" +) + +// AuthorizationsService handles communication with the authorization related +// methods of the GitHub API. +// +// This service requires HTTP Basic Authentication; it cannot be accessed using +// an OAuth token. +// +// GitHub API docs: https://docs.github.com/en/rest/oauth-authorizations +type AuthorizationsService service + +// Authorization represents an individual GitHub authorization. +type Authorization struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Scopes []Scope `json:"scopes,omitempty"` + Token *string `json:"token,omitempty"` + TokenLastEight *string `json:"token_last_eight,omitempty"` + HashedToken *string `json:"hashed_token,omitempty"` + App *AuthorizationApp `json:"app,omitempty"` + Note *string `json:"note,omitempty"` + NoteURL *string `json:"note_url,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + Fingerprint *string `json:"fingerprint,omitempty"` + + // User is only populated by the Check and Reset methods. + User *User `json:"user,omitempty"` +} + +func (a Authorization) String() string { + return Stringify(a) +} + +// AuthorizationApp represents an individual GitHub app (in the context of authorization). +type AuthorizationApp struct { + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + ClientID *string `json:"client_id,omitempty"` +} + +func (a AuthorizationApp) String() string { + return Stringify(a) +} + +// Grant represents an OAuth application that has been granted access to an account. +type Grant struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + App *AuthorizationApp `json:"app,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Scopes []string `json:"scopes,omitempty"` +} + +func (g Grant) String() string { + return Stringify(g) +} + +// AuthorizationRequest represents a request to create an authorization. +type AuthorizationRequest struct { + Scopes []Scope `json:"scopes,omitempty"` + Note *string `json:"note,omitempty"` + NoteURL *string `json:"note_url,omitempty"` + ClientID *string `json:"client_id,omitempty"` + ClientSecret *string `json:"client_secret,omitempty"` + Fingerprint *string `json:"fingerprint,omitempty"` +} + +func (a AuthorizationRequest) String() string { + return Stringify(a) +} + +// AuthorizationUpdateRequest represents a request to update an authorization. +// +// Note that for any one update, you must only provide one of the "scopes" +// fields. That is, you may provide only one of "Scopes", or "AddScopes", or +// "RemoveScopes". +// +// GitHub API docs: https://docs.github.com/en/rest/oauth-authorizations#update-an-existing-authorization +type AuthorizationUpdateRequest struct { + Scopes []string `json:"scopes,omitempty"` + AddScopes []string `json:"add_scopes,omitempty"` + RemoveScopes []string `json:"remove_scopes,omitempty"` + Note *string `json:"note,omitempty"` + NoteURL *string `json:"note_url,omitempty"` + Fingerprint *string `json:"fingerprint,omitempty"` +} + +func (a AuthorizationUpdateRequest) String() string { + return Stringify(a) +} + +// Check if an OAuth token is valid for a specific app. +// +// Note that this operation requires the use of BasicAuth, but where the +// username is the OAuth application clientID, and the password is its +// clientSecret. Invalid tokens will return a 404 Not Found. +// +// The returned Authorization.User field will be populated. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/oauth-applications#check-a-token +func (s *AuthorizationsService) Check(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { + u := fmt.Sprintf("applications/%v/token", clientID) + + reqBody := &struct { + AccessToken string `json:"access_token"` + }{AccessToken: accessToken} + + req, err := s.client.NewRequest("POST", u, reqBody) + if err != nil { + return nil, nil, err + } + req.Header.Set("Accept", mediaTypeOAuthAppPreview) + + a := new(Authorization) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} + +// Reset is used to reset a valid OAuth token without end user involvement. +// Applications must save the "token" property in the response, because changes +// take effect immediately. +// +// Note that this operation requires the use of BasicAuth, but where the +// username is the OAuth application clientID, and the password is its +// clientSecret. Invalid tokens will return a 404 Not Found. +// +// The returned Authorization.User field will be populated. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/oauth-applications#reset-a-token +func (s *AuthorizationsService) Reset(ctx context.Context, clientID, accessToken string) (*Authorization, *Response, error) { + u := fmt.Sprintf("applications/%v/token", clientID) + + reqBody := &struct { + AccessToken string `json:"access_token"` + }{AccessToken: accessToken} + + req, err := s.client.NewRequest("PATCH", u, reqBody) + if err != nil { + return nil, nil, err + } + req.Header.Set("Accept", mediaTypeOAuthAppPreview) + + a := new(Authorization) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} + +// Revoke an authorization for an application. +// +// Note that this operation requires the use of BasicAuth, but where the +// username is the OAuth application clientID, and the password is its +// clientSecret. Invalid tokens will return a 404 Not Found. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/oauth-applications#delete-an-app-token +func (s *AuthorizationsService) Revoke(ctx context.Context, clientID, accessToken string) (*Response, error) { + u := fmt.Sprintf("applications/%v/token", clientID) + + reqBody := &struct { + AccessToken string `json:"access_token"` + }{AccessToken: accessToken} + + req, err := s.client.NewRequest("DELETE", u, reqBody) + if err != nil { + return nil, err + } + req.Header.Set("Accept", mediaTypeOAuthAppPreview) + + return s.client.Do(ctx, req, nil) +} + +// DeleteGrant deletes an OAuth application grant. Deleting an application's +// grant will also delete all OAuth tokens associated with the application for +// the user. +// +// GitHub API docs: https://docs.github.com/en/rest/apps/oauth-applications#delete-an-app-authorization +func (s *AuthorizationsService) DeleteGrant(ctx context.Context, clientID, accessToken string) (*Response, error) { + u := fmt.Sprintf("applications/%v/grant", clientID) + + reqBody := &struct { + AccessToken string `json:"access_token"` + }{AccessToken: accessToken} + + req, err := s.client.NewRequest("DELETE", u, reqBody) + if err != nil { + return nil, err + } + req.Header.Set("Accept", mediaTypeOAuthAppPreview) + + return s.client.Do(ctx, req, nil) +} + +// CreateImpersonation creates an impersonation OAuth token. +// +// This requires admin permissions. With the returned Authorization.Token +// you can e.g. create or delete a user's public SSH key. NOTE: creating a +// new token automatically revokes an existing one. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#create-an-impersonation-oauth-token +func (s *AuthorizationsService) CreateImpersonation(ctx context.Context, username string, authReq *AuthorizationRequest) (*Authorization, *Response, error) { + u := fmt.Sprintf("admin/users/%v/authorizations", username) + req, err := s.client.NewRequest("POST", u, authReq) + if err != nil { + return nil, nil, err + } + + a := new(Authorization) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + return a, resp, nil +} + +// DeleteImpersonation deletes an impersonation OAuth token. +// +// NOTE: there can be only one at a time. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#delete-an-impersonation-oauth-token +func (s *AuthorizationsService) DeleteImpersonation(ctx context.Context, username string) (*Response, error) { + u := fmt.Sprintf("admin/users/%v/authorizations", username) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/billing.go b/vendor/github.com/google/go-github/v56/github/billing.go new file mode 100644 index 000000000..7a76bf86f --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/billing.go @@ -0,0 +1,201 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// BillingService provides access to the billing related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/billing +type BillingService service + +// ActionBilling represents a GitHub Action billing. +type ActionBilling struct { + TotalMinutesUsed float64 `json:"total_minutes_used"` + TotalPaidMinutesUsed float64 `json:"total_paid_minutes_used"` + IncludedMinutes float64 `json:"included_minutes"` + MinutesUsedBreakdown MinutesUsedBreakdown `json:"minutes_used_breakdown"` +} + +// MinutesUsedBreakdown counts the actions minutes used by machine type (e.g. UBUNTU, WINDOWS, MACOS). +type MinutesUsedBreakdown = map[string]int + +// PackageBilling represents a GitHub Package billing. +type PackageBilling struct { + TotalGigabytesBandwidthUsed int `json:"total_gigabytes_bandwidth_used"` + TotalPaidGigabytesBandwidthUsed int `json:"total_paid_gigabytes_bandwidth_used"` + IncludedGigabytesBandwidth float64 `json:"included_gigabytes_bandwidth"` +} + +// StorageBilling represents a GitHub Storage billing. +type StorageBilling struct { + DaysLeftInBillingCycle int `json:"days_left_in_billing_cycle"` + EstimatedPaidStorageForMonth float64 `json:"estimated_paid_storage_for_month"` + EstimatedStorageForMonth float64 `json:"estimated_storage_for_month"` +} + +// ActiveCommitters represents the total active committers across all repositories in an Organization. +type ActiveCommitters struct { + TotalAdvancedSecurityCommitters int `json:"total_advanced_security_committers"` + Repositories []*RepositoryActiveCommitters `json:"repositories,omitempty"` +} + +// RepositoryActiveCommitters represents active committers on each repository. +type RepositoryActiveCommitters struct { + Name *string `json:"name,omitempty"` + AdvancedSecurityCommitters *int `json:"advanced_security_committers,omitempty"` + AdvancedSecurityCommittersBreakdown []*AdvancedSecurityCommittersBreakdown `json:"advanced_security_committers_breakdown,omitempty"` +} + +// AdvancedSecurityCommittersBreakdown represents the user activity breakdown for ActiveCommitters. +type AdvancedSecurityCommittersBreakdown struct { + UserLogin *string `json:"user_login,omitempty"` + LastPushedDate *string `json:"last_pushed_date,omitempty"` +} + +// GetActionsBillingOrg returns the summary of the free and paid GitHub Actions minutes used for an Org. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-github-actions-billing-for-an-organization +func (s *BillingService) GetActionsBillingOrg(ctx context.Context, org string) (*ActionBilling, *Response, error) { + u := fmt.Sprintf("orgs/%v/settings/billing/actions", org) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionsOrgBilling := new(ActionBilling) + resp, err := s.client.Do(ctx, req, actionsOrgBilling) + if err != nil { + return nil, resp, err + } + + return actionsOrgBilling, resp, nil +} + +// GetPackagesBillingOrg returns the free and paid storage used for GitHub Packages in gigabytes for an Org. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-github-packages-billing-for-an-organization +func (s *BillingService) GetPackagesBillingOrg(ctx context.Context, org string) (*PackageBilling, *Response, error) { + u := fmt.Sprintf("orgs/%v/settings/billing/packages", org) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + packagesOrgBilling := new(PackageBilling) + resp, err := s.client.Do(ctx, req, packagesOrgBilling) + if err != nil { + return nil, resp, err + } + + return packagesOrgBilling, resp, nil +} + +// GetStorageBillingOrg returns the estimated paid and estimated total storage used for GitHub Actions +// and GitHub Packages in gigabytes for an Org. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-shared-storage-billing-for-an-organization +func (s *BillingService) GetStorageBillingOrg(ctx context.Context, org string) (*StorageBilling, *Response, error) { + u := fmt.Sprintf("orgs/%v/settings/billing/shared-storage", org) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + storageOrgBilling := new(StorageBilling) + resp, err := s.client.Do(ctx, req, storageOrgBilling) + if err != nil { + return nil, resp, err + } + + return storageOrgBilling, resp, nil +} + +// GetAdvancedSecurityActiveCommittersOrg returns the GitHub Advanced Security active committers for an organization per repository. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/billing?apiVersion=2022-11-28#get-github-advanced-security-active-committers-for-an-organization +func (s *BillingService) GetAdvancedSecurityActiveCommittersOrg(ctx context.Context, org string, opts *ListOptions) (*ActiveCommitters, *Response, error) { + u := fmt.Sprintf("orgs/%v/settings/billing/advanced-security", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + activeOrgCommitters := new(ActiveCommitters) + resp, err := s.client.Do(ctx, req, activeOrgCommitters) + if err != nil { + return nil, resp, err + } + + return activeOrgCommitters, resp, nil +} + +// GetActionsBillingUser returns the summary of the free and paid GitHub Actions minutes used for a user. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-github-actions-billing-for-a-user +func (s *BillingService) GetActionsBillingUser(ctx context.Context, user string) (*ActionBilling, *Response, error) { + u := fmt.Sprintf("users/%v/settings/billing/actions", user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionsUserBilling := new(ActionBilling) + resp, err := s.client.Do(ctx, req, actionsUserBilling) + if err != nil { + return nil, resp, err + } + + return actionsUserBilling, resp, nil +} + +// GetPackagesBillingUser returns the free and paid storage used for GitHub Packages in gigabytes for a user. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-github-packages-billing-for-a-user +func (s *BillingService) GetPackagesBillingUser(ctx context.Context, user string) (*PackageBilling, *Response, error) { + u := fmt.Sprintf("users/%v/settings/billing/packages", user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + packagesUserBilling := new(PackageBilling) + resp, err := s.client.Do(ctx, req, packagesUserBilling) + if err != nil { + return nil, resp, err + } + + return packagesUserBilling, resp, nil +} + +// GetStorageBillingUser returns the estimated paid and estimated total storage used for GitHub Actions +// and GitHub Packages in gigabytes for a user. +// +// GitHub API docs: https://docs.github.com/en/rest/billing#get-shared-storage-billing-for-a-user +func (s *BillingService) GetStorageBillingUser(ctx context.Context, user string) (*StorageBilling, *Response, error) { + u := fmt.Sprintf("users/%v/settings/billing/shared-storage", user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + storageUserBilling := new(StorageBilling) + resp, err := s.client.Do(ctx, req, storageUserBilling) + if err != nil { + return nil, resp, err + } + + return storageUserBilling, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/checks.go b/vendor/github.com/google/go-github/v56/github/checks.go new file mode 100644 index 000000000..12d08530c --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/checks.go @@ -0,0 +1,451 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ChecksService provides access to the Checks API in the +// GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/ +type ChecksService service + +// CheckRun represents a GitHub check run on a repository associated with a GitHub app. +type CheckRun struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + ExternalID *string `json:"external_id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + DetailsURL *string `json:"details_url,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + CompletedAt *Timestamp `json:"completed_at,omitempty"` + Output *CheckRunOutput `json:"output,omitempty"` + Name *string `json:"name,omitempty"` + CheckSuite *CheckSuite `json:"check_suite,omitempty"` + App *App `json:"app,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` +} + +// CheckRunOutput represents the output of a CheckRun. +type CheckRunOutput struct { + Title *string `json:"title,omitempty"` + Summary *string `json:"summary,omitempty"` + Text *string `json:"text,omitempty"` + AnnotationsCount *int `json:"annotations_count,omitempty"` + AnnotationsURL *string `json:"annotations_url,omitempty"` + Annotations []*CheckRunAnnotation `json:"annotations,omitempty"` + Images []*CheckRunImage `json:"images,omitempty"` +} + +// CheckRunAnnotation represents an annotation object for a CheckRun output. +type CheckRunAnnotation struct { + Path *string `json:"path,omitempty"` + StartLine *int `json:"start_line,omitempty"` + EndLine *int `json:"end_line,omitempty"` + StartColumn *int `json:"start_column,omitempty"` + EndColumn *int `json:"end_column,omitempty"` + AnnotationLevel *string `json:"annotation_level,omitempty"` + Message *string `json:"message,omitempty"` + Title *string `json:"title,omitempty"` + RawDetails *string `json:"raw_details,omitempty"` +} + +// CheckRunImage represents an image object for a CheckRun output. +type CheckRunImage struct { + Alt *string `json:"alt,omitempty"` + ImageURL *string `json:"image_url,omitempty"` + Caption *string `json:"caption,omitempty"` +} + +// CheckSuite represents a suite of check runs. +type CheckSuite struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + URL *string `json:"url,omitempty"` + BeforeSHA *string `json:"before,omitempty"` + AfterSHA *string `json:"after,omitempty"` + Status *string `json:"status,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + App *App `json:"app,omitempty"` + Repository *Repository `json:"repository,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` + + // The following fields are only populated by Webhook events. + HeadCommit *Commit `json:"head_commit,omitempty"` +} + +func (c CheckRun) String() string { + return Stringify(c) +} + +func (c CheckSuite) String() string { + return Stringify(c) +} + +// GetCheckRun gets a check-run for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#get-a-check-run +func (s *ChecksService) GetCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// GetCheckSuite gets a single check suite. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/suites#get-a-check-suite +func (s *ChecksService) GetCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*CheckSuite, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v", owner, repo, checkSuiteID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkSuite := new(CheckSuite) + resp, err := s.client.Do(ctx, req, checkSuite) + if err != nil { + return nil, resp, err + } + + return checkSuite, resp, nil +} + +// CreateCheckRunOptions sets up parameters needed to create a CheckRun. +type CreateCheckRunOptions struct { + Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) + HeadSHA string `json:"head_sha"` // The SHA of the commit. (Required.) + DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) + ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) + Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) + Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) + StartedAt *Timestamp `json:"started_at,omitempty"` // The time that the check run began. (Optional.) + CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) + Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) + Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) +} + +// CheckRunAction exposes further actions the integrator can perform, which a user may trigger. +type CheckRunAction struct { + Label string `json:"label"` // The text to be displayed on a button in the web UI. The maximum size is 20 characters. (Required.) + Description string `json:"description"` // A short explanation of what this action would do. The maximum size is 40 characters. (Required.) + Identifier string `json:"identifier"` // A reference for the action on the integrator's system. The maximum size is 20 characters. (Required.) +} + +// CreateCheckRun creates a check run for repository. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#create-a-check-run +func (s *ChecksService) CreateCheckRun(ctx context.Context, owner, repo string, opts CreateCheckRunOptions) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs", owner, repo) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// UpdateCheckRunOptions sets up parameters needed to update a CheckRun. +type UpdateCheckRunOptions struct { + Name string `json:"name"` // The name of the check (e.g., "code-coverage"). (Required.) + DetailsURL *string `json:"details_url,omitempty"` // The URL of the integrator's site that has the full details of the check. (Optional.) + ExternalID *string `json:"external_id,omitempty"` // A reference for the run on the integrator's system. (Optional.) + Status *string `json:"status,omitempty"` // The current status. Can be one of "queued", "in_progress", or "completed". Default: "queued". (Optional.) + Conclusion *string `json:"conclusion,omitempty"` // Can be one of "success", "failure", "neutral", "cancelled", "skipped", "timed_out", or "action_required". (Optional. Required if you provide a status of "completed".) + CompletedAt *Timestamp `json:"completed_at,omitempty"` // The time the check completed. (Optional. Required if you provide conclusion.) + Output *CheckRunOutput `json:"output,omitempty"` // Provide descriptive details about the run. (Optional) + Actions []*CheckRunAction `json:"actions,omitempty"` // Possible further actions the integrator can perform, which a user may trigger. (Optional.) +} + +// UpdateCheckRun updates a check run for a specific commit in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#update-a-check-run +func (s *ChecksService) UpdateCheckRun(ctx context.Context, owner, repo string, checkRunID int64, opts UpdateCheckRunOptions) (*CheckRun, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v", owner, repo, checkRunID) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkRun := new(CheckRun) + resp, err := s.client.Do(ctx, req, checkRun) + if err != nil { + return nil, resp, err + } + + return checkRun, resp, nil +} + +// ListCheckRunAnnotations lists the annotations for a check run. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#list-check-run-annotations +func (s *ChecksService) ListCheckRunAnnotations(ctx context.Context, owner, repo string, checkRunID int64, opts *ListOptions) ([]*CheckRunAnnotation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v/annotations", owner, repo, checkRunID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunAnnotations []*CheckRunAnnotation + resp, err := s.client.Do(ctx, req, &checkRunAnnotations) + if err != nil { + return nil, resp, err + } + + return checkRunAnnotations, resp, nil +} + +// ListCheckRunsOptions represents parameters to list check runs. +type ListCheckRunsOptions struct { + CheckName *string `url:"check_name,omitempty"` // Returns check runs with the specified name. + Status *string `url:"status,omitempty"` // Returns check runs with the specified status. Can be one of "queued", "in_progress", or "completed". + Filter *string `url:"filter,omitempty"` // Filters check runs by their completed_at timestamp. Can be one of "latest" (returning the most recent check runs) or "all". Default: "latest" + AppID *int64 `url:"app_id,omitempty"` // Filters check runs by GitHub App ID. + + ListOptions +} + +// ListCheckRunsResults represents the result of a check run list. +type ListCheckRunsResults struct { + Total *int `json:"total_count,omitempty"` + CheckRuns []*CheckRun `json:"check_runs,omitempty"` +} + +// ListCheckRunsForRef lists check runs for a specific ref. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#list-check-runs-for-a-git-reference +func (s *ChecksService) ListCheckRunsForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/check-runs", owner, repo, refURLEscape(ref)) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunResults *ListCheckRunsResults + resp, err := s.client.Do(ctx, req, &checkRunResults) + if err != nil { + return nil, resp, err + } + + return checkRunResults, resp, nil +} + +// ListCheckRunsCheckSuite lists check runs for a check suite. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#list-check-runs-in-a-check-suite +func (s *ChecksService) ListCheckRunsCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64, opts *ListCheckRunsOptions) (*ListCheckRunsResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v/check-runs", owner, repo, checkSuiteID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkRunResults *ListCheckRunsResults + resp, err := s.client.Do(ctx, req, &checkRunResults) + if err != nil { + return nil, resp, err + } + + return checkRunResults, resp, nil +} + +// ReRequestCheckRun triggers GitHub to rerequest an existing check run. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/runs#rerequest-a-check-run +func (s *ChecksService) ReRequestCheckRun(ctx context.Context, owner, repo string, checkRunID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-runs/%v/rerequest", owner, repo, checkRunID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ListCheckSuiteOptions represents parameters to list check suites. +type ListCheckSuiteOptions struct { + CheckName *string `url:"check_name,omitempty"` // Filters checks suites by the name of the check run. + AppID *int `url:"app_id,omitempty"` // Filters check suites by GitHub App id. + + ListOptions +} + +// ListCheckSuiteResults represents the result of a check run list. +type ListCheckSuiteResults struct { + Total *int `json:"total_count,omitempty"` + CheckSuites []*CheckSuite `json:"check_suites,omitempty"` +} + +// ListCheckSuitesForRef lists check suite for a specific ref. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/suites#list-check-suites-for-a-git-reference +func (s *ChecksService) ListCheckSuitesForRef(ctx context.Context, owner, repo, ref string, opts *ListCheckSuiteOptions) (*ListCheckSuiteResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/check-suites", owner, repo, refURLEscape(ref)) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkSuiteResults *ListCheckSuiteResults + resp, err := s.client.Do(ctx, req, &checkSuiteResults) + if err != nil { + return nil, resp, err + } + + return checkSuiteResults, resp, nil +} + +// AutoTriggerCheck enables or disables automatic creation of CheckSuite events upon pushes to the repository. +type AutoTriggerCheck struct { + AppID *int64 `json:"app_id,omitempty"` // The id of the GitHub App. (Required.) + Setting *bool `json:"setting,omitempty"` // Set to "true" to enable automatic creation of CheckSuite events upon pushes to the repository, or "false" to disable them. Default: "true" (Required.) +} + +// CheckSuitePreferenceOptions set options for check suite preferences for a repository. +type CheckSuitePreferenceOptions struct { + AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. +} + +// CheckSuitePreferenceResults represents the results of the preference set operation. +type CheckSuitePreferenceResults struct { + Preferences *PreferenceList `json:"preferences,omitempty"` + Repository *Repository `json:"repository,omitempty"` +} + +// PreferenceList represents a list of auto trigger checks for repository +type PreferenceList struct { + AutoTriggerChecks []*AutoTriggerCheck `json:"auto_trigger_checks,omitempty"` // A slice of auto trigger checks that can be set for a check suite in a repository. +} + +// SetCheckSuitePreferences changes the default automatic flow when creating check suites. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/suites#update-repository-preferences-for-check-suites +func (s *ChecksService) SetCheckSuitePreferences(ctx context.Context, owner, repo string, opts CheckSuitePreferenceOptions) (*CheckSuitePreferenceResults, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/preferences", owner, repo) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + var checkSuitePrefResults *CheckSuitePreferenceResults + resp, err := s.client.Do(ctx, req, &checkSuitePrefResults) + if err != nil { + return nil, resp, err + } + + return checkSuitePrefResults, resp, nil +} + +// CreateCheckSuiteOptions sets up parameters to manually create a check suites +type CreateCheckSuiteOptions struct { + HeadSHA string `json:"head_sha"` // The sha of the head commit. (Required.) + HeadBranch *string `json:"head_branch,omitempty"` // The name of the head branch where the code changes are implemented. +} + +// CreateCheckSuite manually creates a check suite for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/suites#create-a-check-suite +func (s *ChecksService) CreateCheckSuite(ctx context.Context, owner, repo string, opts CreateCheckSuiteOptions) (*CheckSuite, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites", owner, repo) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + checkSuite := new(CheckSuite) + resp, err := s.client.Do(ctx, req, checkSuite) + if err != nil { + return nil, resp, err + } + + return checkSuite, resp, nil +} + +// ReRequestCheckSuite triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/checks/suites#rerequest-a-check-suite +func (s *ChecksService) ReRequestCheckSuite(ctx context.Context, owner, repo string, checkSuiteID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/check-suites/%v/rerequest", owner, repo, checkSuiteID) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", mediaTypeCheckRunsPreview) + + resp, err := s.client.Do(ctx, req, nil) + return resp, err +} diff --git a/vendor/github.com/google/go-github/v56/github/code-scanning.go b/vendor/github.com/google/go-github/v56/github/code-scanning.go new file mode 100644 index 000000000..0ae269be6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/code-scanning.go @@ -0,0 +1,624 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strconv" + "strings" +) + +// CodeScanningService handles communication with the code scanning related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type CodeScanningService service + +// Rule represents the complete details of GitHub Code Scanning alert type. +type Rule struct { + ID *string `json:"id,omitempty"` + Severity *string `json:"severity,omitempty"` + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + SecuritySeverityLevel *string `json:"security_severity_level,omitempty"` + FullDescription *string `json:"full_description,omitempty"` + Tags []string `json:"tags,omitempty"` + Help *string `json:"help,omitempty"` +} + +// Location represents the exact location of the GitHub Code Scanning Alert in the scanned project. +type Location struct { + Path *string `json:"path,omitempty"` + StartLine *int `json:"start_line,omitempty"` + EndLine *int `json:"end_line,omitempty"` + StartColumn *int `json:"start_column,omitempty"` + EndColumn *int `json:"end_column,omitempty"` +} + +// Message is a part of MostRecentInstance struct which provides the appropriate message when any action is performed on the analysis object. +type Message struct { + Text *string `json:"text,omitempty"` +} + +// MostRecentInstance provides details of the most recent instance of this alert for the default branch or for the specified Git reference. +type MostRecentInstance struct { + Ref *string `json:"ref,omitempty"` + AnalysisKey *string `json:"analysis_key,omitempty"` + Category *string `json:"category,omitempty"` + Environment *string `json:"environment,omitempty"` + State *string `json:"state,omitempty"` + CommitSHA *string `json:"commit_sha,omitempty"` + Message *Message `json:"message,omitempty"` + Location *Location `json:"location,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Classifications []string `json:"classifications,omitempty"` +} + +// Tool represents the tool used to generate a GitHub Code Scanning Alert. +type Tool struct { + Name *string `json:"name,omitempty"` + GUID *string `json:"guid,omitempty"` + Version *string `json:"version,omitempty"` +} + +// Alert represents an individual GitHub Code Scanning Alert on a single repository. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type Alert struct { + Number *int `json:"number,omitempty"` + Repository *Repository `json:"repository,omitempty"` + RuleID *string `json:"rule_id,omitempty"` + RuleSeverity *string `json:"rule_severity,omitempty"` + RuleDescription *string `json:"rule_description,omitempty"` + Rule *Rule `json:"rule,omitempty"` + Tool *Tool `json:"tool,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + FixedAt *Timestamp `json:"fixed_at,omitempty"` + State *string `json:"state,omitempty"` + ClosedBy *User `json:"closed_by,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + MostRecentInstance *MostRecentInstance `json:"most_recent_instance,omitempty"` + Instances []*MostRecentInstance `json:"instances,omitempty"` + DismissedBy *User `json:"dismissed_by,omitempty"` + DismissedAt *Timestamp `json:"dismissed_at,omitempty"` + DismissedReason *string `json:"dismissed_reason,omitempty"` + DismissedComment *string `json:"dismissed_comment,omitempty"` + InstancesURL *string `json:"instances_url,omitempty"` +} + +// ID returns the ID associated with an alert. It is the number at the end of the security alert's URL. +func (a *Alert) ID() int64 { + if a == nil { + return 0 + } + + s := a.GetHTMLURL() + + // Check for an ID to parse at the end of the url + if i := strings.LastIndex(s, "/"); i >= 0 { + s = s[i+1:] + } + + // Return the alert ID as a 64-bit integer. Unable to convert or out of range returns 0. + id, err := strconv.ParseInt(s, 10, 64) + if err != nil { + return 0 + } + + return id +} + +// AlertInstancesListOptions specifies optional parameters to the CodeScanningService.ListAlertInstances method. +type AlertInstancesListOptions struct { + // Return code scanning alert instances for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge + Ref string `url:"ref,omitempty"` + + ListOptions +} + +// AlertListOptions specifies optional parameters to the CodeScanningService.ListAlerts method. +type AlertListOptions struct { + // State of the code scanning alerts to list. Set to closed to list only closed code scanning alerts. Default: open + State string `url:"state,omitempty"` + + // Return code scanning alerts for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge + Ref string `url:"ref,omitempty"` + + // If specified, only code scanning alerts with this severity will be returned. Possible values are: critical, high, medium, low, warning, note, error. + Severity string `url:"severity,omitempty"` + + // The name of a code scanning tool. Only results by this tool will be listed. + ToolName string `url:"tool_name,omitempty"` + + ListCursorOptions + + // Add ListOptions so offset pagination with integer type "page" query parameter is accepted + // since ListCursorOptions accepts "page" as string only. + ListOptions +} + +// AnalysesListOptions specifies optional parameters to the CodeScanningService.ListAnalysesForRepo method. +type AnalysesListOptions struct { + // Return code scanning analyses belonging to the same SARIF upload. + SarifID *string `url:"sarif_id,omitempty"` + + // Return code scanning analyses for a specific branch reference. + // The ref can be formatted as refs/heads/ or simply . To reference a pull request use refs/pull//merge + Ref *string `url:"ref,omitempty"` + + ListOptions +} + +// CodeQLDatabase represents a metadata about the CodeQL database. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type CodeQLDatabase struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Language *string `json:"language,omitempty"` + Uploader *User `json:"uploader,omitempty"` + ContentType *string `json:"content_type,omitempty"` + Size *int64 `json:"size,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` +} + +// ScanningAnalysis represents an individual GitHub Code Scanning ScanningAnalysis on a single repository. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type ScanningAnalysis struct { + ID *int64 `json:"id,omitempty"` + Ref *string `json:"ref,omitempty"` + CommitSHA *string `json:"commit_sha,omitempty"` + AnalysisKey *string `json:"analysis_key,omitempty"` + Environment *string `json:"environment,omitempty"` + Error *string `json:"error,omitempty"` + Category *string `json:"category,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ResultsCount *int `json:"results_count,omitempty"` + RulesCount *int `json:"rules_count,omitempty"` + URL *string `json:"url,omitempty"` + SarifID *string `json:"sarif_id,omitempty"` + Tool *Tool `json:"tool,omitempty"` + Deletable *bool `json:"deletable,omitempty"` + Warning *string `json:"warning,omitempty"` +} + +// SarifAnalysis specifies the results of a code scanning job. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type SarifAnalysis struct { + CommitSHA *string `json:"commit_sha,omitempty"` + Ref *string `json:"ref,omitempty"` + Sarif *string `json:"sarif,omitempty"` + CheckoutURI *string `json:"checkout_uri,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + ToolName *string `json:"tool_name,omitempty"` +} + +// CodeScanningAlertState specifies the state of a code scanning alert. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type CodeScanningAlertState struct { + // State sets the state of the code scanning alert and is a required field. + // You must also provide DismissedReason when you set the state to "dismissed". + // State can be one of: "open", "dismissed". + State string `json:"state"` + // DismissedReason represents the reason for dismissing or closing the alert. + // It is required when the state is "dismissed". + // It can be one of: "false positive", "won't fix", "used in tests". + DismissedReason *string `json:"dismissed_reason,omitempty"` + // DismissedComment is associated with the dismissal of the alert. + DismissedComment *string `json:"dismissed_comment,omitempty"` +} + +// SarifID identifies a sarif analysis upload. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning +type SarifID struct { + ID *string `json:"id,omitempty"` + URL *string `json:"url,omitempty"` +} + +// ListAlertsForOrg lists code scanning alerts for an org. +// +// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events +// read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-an-organization +func (s *CodeScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *AlertListOptions) ([]*Alert, *Response, error) { + u := fmt.Sprintf("orgs/%v/code-scanning/alerts", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*Alert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// ListAlertsForRepo lists code scanning alerts for a repository. +// +// Lists all open code scanning alerts for the default branch (usually master) and protected branches in a repository. +// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events +// read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-a-repository +func (s *CodeScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *AlertListOptions) ([]*Alert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*Alert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// GetAlert gets a single code scanning alert for a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// The security alert_id is the number at the end of the security alert's URL. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-alert +func (s *CodeScanningService) GetAlert(ctx context.Context, owner, repo string, id int64) (*Alert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + a := new(Alert) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} + +// UpdateAlert updates the state of a single code scanning alert for a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// The security alert_id is the number at the end of the security alert's URL. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning?apiVersion=2022-11-28#update-a-code-scanning-alert +func (s *CodeScanningService) UpdateAlert(ctx context.Context, owner, repo string, id int64, stateInfo *CodeScanningAlertState) (*Alert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v", owner, repo, id) + + req, err := s.client.NewRequest("PATCH", u, stateInfo) + if err != nil { + return nil, nil, err + } + + a := new(Alert) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} + +// ListAlertInstances lists instances of a code scanning alert. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-instances-of-a-code-scanning-alert +func (s *CodeScanningService) ListAlertInstances(ctx context.Context, owner, repo string, id int64, opts *AlertInstancesListOptions) ([]*MostRecentInstance, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/alerts/%v/instances", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alertInstances []*MostRecentInstance + resp, err := s.client.Do(ctx, req, &alertInstances) + if err != nil { + return nil, resp, err + } + + return alertInstances, resp, nil +} + +// UploadSarif uploads the result of code scanning job to GitHub. +// +// For the parameter sarif, you must first compress your SARIF file using gzip and then translate the contents of the file into a Base64 encoding string. +// You must use an access token with the security_events scope to use this endpoint. GitHub Apps must have the security_events +// write permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#upload-an-analysis-as-sarif-data +func (s *CodeScanningService) UploadSarif(ctx context.Context, owner, repo string, sarif *SarifAnalysis) (*SarifID, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs", owner, repo) + + req, err := s.client.NewRequest("POST", u, sarif) + if err != nil { + return nil, nil, err + } + + sarifID := new(SarifID) + resp, err := s.client.Do(ctx, req, sarifID) + if err != nil { + return nil, resp, err + } + + return sarifID, resp, nil +} + +// SARIFUpload represents information about a SARIF upload. +type SARIFUpload struct { + // `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. + // `failed` files have either not been processed at all, or could only be partially processed. + ProcessingStatus *string `json:"processing_status,omitempty"` + // The REST API URL for getting the analyses associated with the upload. + AnalysesURL *string `json:"analyses_url,omitempty"` +} + +// GetSARIF gets information about a SARIF upload. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-information-about-a-sarif-upload +func (s *CodeScanningService) GetSARIF(ctx context.Context, owner, repo, sarifID string) (*SARIFUpload, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/sarifs/%v", owner, repo, sarifID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + sarifUpload := new(SARIFUpload) + resp, err := s.client.Do(ctx, req, sarifUpload) + if err != nil { + return nil, resp, err + } + + return sarifUpload, resp, nil +} + +// ListAnalysesForRepo lists code scanning analyses for a repository. +// +// Lists the details of all code scanning analyses for a repository, starting with the most recent. +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-analyses-for-a-repository +func (s *CodeScanningService) ListAnalysesForRepo(ctx context.Context, owner, repo string, opts *AnalysesListOptions) ([]*ScanningAnalysis, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var analyses []*ScanningAnalysis + resp, err := s.client.Do(ctx, req, &analyses) + if err != nil { + return nil, resp, err + } + + return analyses, resp, nil +} + +// GetAnalysis gets a single code scanning analysis for a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-analysis-for-a-repository +func (s *CodeScanningService) GetAnalysis(ctx context.Context, owner, repo string, id int64) (*ScanningAnalysis, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + analysis := new(ScanningAnalysis) + resp, err := s.client.Do(ctx, req, analysis) + if err != nil { + return nil, resp, err + } + + return analysis, resp, nil +} + +// DeleteAnalysis represents a successful deletion of a code scanning analysis. +type DeleteAnalysis struct { + // Next deletable analysis in chain, without last analysis deletion confirmation + NextAnalysisURL *string `json:"next_analysis_url,omitempty"` + // Next deletable analysis in chain, with last analysis deletion confirmation + ConfirmDeleteURL *string `json:"confirm_delete_url,omitempty"` +} + +// DeleteAnalysis deletes a single code scanning analysis from a repository. +// +// You must use an access token with the repo scope to use this endpoint. +// GitHub Apps must have the security_events read permission to use this endpoint. +// +// The security analysis_id is the ID of the analysis, as returned from the ListAnalysesForRepo operation. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#delete-a-code-scanning-analysis-from-a-repository +func (s *CodeScanningService) DeleteAnalysis(ctx context.Context, owner, repo string, id int64) (*DeleteAnalysis, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/analyses/%v", owner, repo, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, nil, err + } + + deleteAnalysis := new(DeleteAnalysis) + resp, err := s.client.Do(ctx, req, deleteAnalysis) + if err != nil { + return nil, resp, err + } + + return deleteAnalysis, resp, nil +} + +// ListCodeQLDatabases lists the CodeQL databases that are available in a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the contents read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#list-codeql-databases-for-a-repository +func (s *CodeScanningService) ListCodeQLDatabases(ctx context.Context, owner, repo string) ([]*CodeQLDatabase, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var codeqlDatabases []*CodeQLDatabase + resp, err := s.client.Do(ctx, req, &codeqlDatabases) + if err != nil { + return nil, resp, err + } + + return codeqlDatabases, resp, nil +} + +// GetCodeQLDatabase gets a CodeQL database for a language in a repository. +// +// You must use an access token with the security_events scope to use this endpoint. +// GitHub Apps must have the contents read permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-codeql-database-for-a-repository +func (s *CodeScanningService) GetCodeQLDatabase(ctx context.Context, owner, repo, language string) (*CodeQLDatabase, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/code-scanning/codeql/databases/%v", owner, repo, language) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + codeqlDatabase := new(CodeQLDatabase) + resp, err := s.client.Do(ctx, req, codeqlDatabase) + if err != nil { + return nil, resp, err + } + + return codeqlDatabase, resp, nil +} + +// DefaultSetupConfiguration represents a code scanning default setup configuration. +type DefaultSetupConfiguration struct { + State *string `json:"state,omitempty"` + Languages []string `json:"languages,omitempty"` + QuerySuite *string `json:"query_suite,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +// GetDefaultSetupConfiguration gets a code scanning default setup configuration. +// +// You must use an access token with the repo scope to use this +// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write +// permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-default-setup-configuration +func (s *CodeScanningService) GetDefaultSetupConfiguration(ctx context.Context, owner, repo string) (*DefaultSetupConfiguration, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + cfg := new(DefaultSetupConfiguration) + resp, err := s.client.Do(ctx, req, cfg) + if err != nil { + return nil, resp, err + } + + return cfg, resp, nil +} + +// UpdateDefaultSetupConfigurationOptions specifies parameters to the CodeScanningService.UpdateDefaultSetupConfiguration +// method. +type UpdateDefaultSetupConfigurationOptions struct { + State string `json:"state"` + QuerySuite *string `json:"query_suite,omitempty"` + Languages []string `json:"languages,omitempty"` +} + +// UpdateDefaultSetupConfigurationResponse represents a response from updating a code scanning default setup configuration. +type UpdateDefaultSetupConfigurationResponse struct { + RunID *int64 `json:"run_id,omitempty"` + RunURL *string `json:"run_url,omitempty"` +} + +// UpdateDefaultSetupConfiguration updates a code scanning default setup configuration. +// +// You must use an access token with the repo scope to use this +// endpoint with private repos or the public_repo scope for public repos. GitHub Apps must have the repo write +// permission to use this endpoint. +// +// This method might return an AcceptedError and a status code of 202. This is because this is the status that GitHub +// returns to signify that it has now scheduled the update of the pull request branch in a background task. +// +// GitHub API docs: https://docs.github.com/en/rest/code-scanning/code-scanning#update-a-code-scanning-default-setup-configuration +func (s *CodeScanningService) UpdateDefaultSetupConfiguration(ctx context.Context, owner, repo string, options *UpdateDefaultSetupConfigurationOptions) (*UpdateDefaultSetupConfigurationResponse, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/code-scanning/default-setup", owner, repo) + + req, err := s.client.NewRequest("PATCH", u, options) + if err != nil { + return nil, nil, err + } + + a := new(UpdateDefaultSetupConfigurationResponse) + resp, err := s.client.Do(ctx, req, a) + if err != nil { + return nil, resp, err + } + + return a, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/codesofconduct.go b/vendor/github.com/google/go-github/v56/github/codesofconduct.go new file mode 100644 index 000000000..4318ba56d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/codesofconduct.go @@ -0,0 +1,81 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// CodesOfConductService provides access to code-of-conduct-related functions in the GitHub API. +type CodesOfConductService service + +// CodeOfConduct represents a code of conduct. +type CodeOfConduct struct { + Name *string `json:"name,omitempty"` + Key *string `json:"key,omitempty"` + URL *string `json:"url,omitempty"` + Body *string `json:"body,omitempty"` +} + +func (c *CodeOfConduct) String() string { + return Stringify(c) +} + +// List returns all codes of conduct. +// +// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-all-codes-of-conduct +func (s *CodesOfConductService) List(ctx context.Context) ([]*CodeOfConduct, *Response, error) { + req, err := s.client.NewRequest("GET", "codes_of_conduct", nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeCodesOfConductPreview) + + var cs []*CodeOfConduct + resp, err := s.client.Do(ctx, req, &cs) + if err != nil { + return nil, resp, err + } + + return cs, resp, nil +} + +// ListCodesOfConduct +// Deprecated: Use CodesOfConductService.List instead +func (c *Client) ListCodesOfConduct(ctx context.Context) ([]*CodeOfConduct, *Response, error) { + return c.CodesOfConduct.List(ctx) +} + +// Get returns an individual code of conduct. +// +// GitHub API docs: https://docs.github.com/rest/codes-of-conduct/codes-of-conduct#get-a-code-of-conduct +func (s *CodesOfConductService) Get(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { + u := fmt.Sprintf("codes_of_conduct/%s", key) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeCodesOfConductPreview) + + coc := new(CodeOfConduct) + resp, err := s.client.Do(ctx, req, coc) + if err != nil { + return nil, resp, err + } + + return coc, resp, nil +} + +// GetCodeOfConduct +// Deprecated: Use CodesOfConductService.Get instead +func (c *Client) GetCodeOfConduct(ctx context.Context, key string) (*CodeOfConduct, *Response, error) { + return c.CodesOfConduct.Get(ctx, key) +} diff --git a/vendor/github.com/google/go-github/v56/github/codespaces.go b/vendor/github.com/google/go-github/v56/github/codespaces.go new file mode 100644 index 000000000..cd8b0e5be --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/codespaces.go @@ -0,0 +1,254 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// CodespacesService handles communication with the Codespaces related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/ +type CodespacesService service + +// Codespace represents a codespace. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces +type Codespace struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + DisplayName *string `json:"display_name,omitempty"` + EnvironmentID *string `json:"environment_id,omitempty"` + Owner *User `json:"owner,omitempty"` + BillableOwner *User `json:"billable_owner,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Machine *CodespacesMachine `json:"machine,omitempty"` + DevcontainerPath *string `json:"devcontainer_path,omitempty"` + Prebuild *bool `json:"prebuild,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + LastUsedAt *Timestamp `json:"last_used_at,omitempty"` + State *string `json:"state,omitempty"` + URL *string `json:"url,omitempty"` + GitStatus *CodespacesGitStatus `json:"git_status,omitempty"` + Location *string `json:"location,omitempty"` + IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` + WebURL *string `json:"web_url,omitempty"` + MachinesURL *string `json:"machines_url,omitempty"` + StartURL *string `json:"start_url,omitempty"` + StopURL *string `json:"stop_url,omitempty"` + PullsURL *string `json:"pulls_url,omitempty"` + RecentFolders []string `json:"recent_folders,omitempty"` + RuntimeConstraints *CodespacesRuntimeConstraints `json:"runtime_constraints,omitempty"` + PendingOperation *bool `json:"pending_operation,omitempty"` + PendingOperationDisabledReason *string `json:"pending_operation_disabled_reason,omitempty"` + IdleTimeoutNotice *string `json:"idle_timeout_notice,omitempty"` + RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` + RetentionExpiresAt *Timestamp `json:"retention_expires_at,omitempty"` + LastKnownStopNotice *string `json:"last_known_stop_notice,omitempty"` +} + +// CodespacesGitStatus represents the git status of a codespace. +type CodespacesGitStatus struct { + Ahead *int `json:"ahead,omitempty"` + Behind *int `json:"behind,omitempty"` + HasUnpushedChanges *bool `json:"has_unpushed_changes,omitempty"` + HasUncommittedChanges *bool `json:"has_uncommitted_changes,omitempty"` + Ref *string `json:"ref,omitempty"` +} + +// CodespacesMachine represents the machine type of a codespace. +type CodespacesMachine struct { + Name *string `json:"name,omitempty"` + DisplayName *string `json:"display_name,omitempty"` + OperatingSystem *string `json:"operating_system,omitempty"` + StorageInBytes *int64 `json:"storage_in_bytes,omitempty"` + MemoryInBytes *int64 `json:"memory_in_bytes,omitempty"` + CPUs *int `json:"cpus,omitempty"` + PrebuildAvailability *string `json:"prebuild_availability,omitempty"` +} + +// CodespacesRuntimeConstraints represents the runtime constraints of a codespace. +type CodespacesRuntimeConstraints struct { + AllowedPortPrivacySettings []string `json:"allowed_port_privacy_settings,omitempty"` +} + +// ListCodespaces represents the response from the list codespaces endpoints. +type ListCodespaces struct { + TotalCount *int `json:"total_count,omitempty"` + Codespaces []*Codespace `json:"codespaces"` +} + +// ListInRepo lists codespaces for a user in a repository. +// +// Lists the codespaces associated with a specified repository and the authenticated user. +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#list-codespaces-in-a-repository-for-the-authenticated-user +func (s *CodespacesService) ListInRepo(ctx context.Context, owner, repo string, opts *ListOptions) (*ListCodespaces, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var codespaces *ListCodespaces + resp, err := s.client.Do(ctx, req, &codespaces) + if err != nil { + return nil, resp, err + } + + return codespaces, resp, nil +} + +// ListOptions represents the options for listing codespaces for a user. +type ListCodespacesOptions struct { + ListOptions + RepositoryID int64 `url:"repository_id,omitempty"` +} + +// List lists codespaces for an authenticated user. +// +// Lists the authenticated user's codespaces. +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have read access to the codespaces repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#list-codespaces-for-the-authenticated-user +func (s *CodespacesService) List(ctx context.Context, opts *ListCodespacesOptions) (*ListCodespaces, *Response, error) { + u := "user/codespaces" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var codespaces *ListCodespaces + resp, err := s.client.Do(ctx, req, &codespaces) + if err != nil { + return nil, resp, err + } + + return codespaces, resp, nil +} + +// CreateCodespaceOptions represents options for the creation of a codespace in a repository. +type CreateCodespaceOptions struct { + Ref *string `json:"ref,omitempty"` + // Geo represents the geographic area for this codespace. + // If not specified, the value is assigned by IP. + // This property replaces location, which is being deprecated. + // Geo can be one of: `EuropeWest`, `SoutheastAsia`, `UsEast`, `UsWest`. + Geo *string `json:"geo,omitempty"` + ClientIP *string `json:"client_ip,omitempty"` + Machine *string `json:"machine,omitempty"` + DevcontainerPath *string `json:"devcontainer_path,omitempty"` + MultiRepoPermissionsOptOut *bool `json:"multi_repo_permissions_opt_out,omitempty"` + WorkingDirectory *string `json:"working_directory,omitempty"` + IdleTimeoutMinutes *int `json:"idle_timeout_minutes,omitempty"` + DisplayName *string `json:"display_name,omitempty"` + // RetentionPeriodMinutes represents the duration in minutes after codespace has gone idle in which it will be deleted. + // Must be integer minutes between 0 and 43200 (30 days). + RetentionPeriodMinutes *int `json:"retention_period_minutes,omitempty"` +} + +// CreateInRepo creates a codespace in a repository. +// +// Creates a codespace owned by the authenticated user in the specified repository. +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#create-a-codespace-in-a-repository +func (s *CodespacesService) CreateInRepo(ctx context.Context, owner, repo string, request *CreateCodespaceOptions) (*Codespace, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces", owner, repo) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + var codespace *Codespace + resp, err := s.client.Do(ctx, req, &codespace) + if err != nil { + return nil, resp, err + } + + return codespace, resp, nil +} + +// Start starts a codespace. +// +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#start-a-codespace-for-the-authenticated-user +func (s *CodespacesService) Start(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { + u := fmt.Sprintf("user/codespaces/%v/start", codespaceName) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + var codespace *Codespace + resp, err := s.client.Do(ctx, req, &codespace) + if err != nil { + return nil, resp, err + } + + return codespace, resp, nil +} + +// Stop stops a codespace. +// +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have write access to the codespaces_lifecycle_admin repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#stop-a-codespace-for-the-authenticated-user +func (s *CodespacesService) Stop(ctx context.Context, codespaceName string) (*Codespace, *Response, error) { + u := fmt.Sprintf("user/codespaces/%v/stop", codespaceName) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + var codespace *Codespace + resp, err := s.client.Do(ctx, req, &codespace) + if err != nil { + return nil, resp, err + } + + return codespace, resp, nil +} + +// Delete deletes a codespace. +// +// You must authenticate using an access token with the codespace scope to use this endpoint. +// GitHub Apps must have write access to the codespaces repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/codespaces?apiVersion=2022-11-28#delete-a-codespace-for-the-authenticated-user +func (s *CodespacesService) Delete(ctx context.Context, codespaceName string) (*Response, error) { + u := fmt.Sprintf("user/codespaces/%v", codespaceName) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/codespaces_secrets.go b/vendor/github.com/google/go-github/v56/github/codespaces_secrets.go new file mode 100644 index 000000000..e11c679c6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/codespaces_secrets.go @@ -0,0 +1,405 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListUserSecrets list all secrets available for a users codespace +// +// Lists all secrets available for a user's Codespaces without revealing their encrypted values +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint +// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#list-secrets-for-the-authenticated-user +func (s *CodespacesService) ListUserSecrets(ctx context.Context, opts *ListOptions) (*Secrets, *Response, error) { + u, err := addOptions("user/codespaces/secrets", opts) + if err != nil { + return nil, nil, err + } + return s.listSecrets(ctx, u) +} + +// ListOrgSecrets list all secrets available to an org +// +// Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#list-organization-secrets +func (s *CodespacesService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + return s.listSecrets(ctx, u) +} + +// ListRepoSecrets list all secrets available to a repo +// +// Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/repository-secrets?apiVersion=2022-11-28#list-repository-secrets +func (s *CodespacesService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces/secrets", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + return s.listSecrets(ctx, u) +} + +func (s *CodespacesService) listSecrets(ctx context.Context, url string) (*Secrets, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var secrets *Secrets + resp, err := s.client.Do(ctx, req, &secrets) + if err != nil { + return nil, resp, err + } + + return secrets, resp, nil +} + +// GetUserPublicKey gets the users public key for encrypting codespace secrets +// +// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. +// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#get-public-key-for-the-authenticated-user +func (s *CodespacesService) GetUserPublicKey(ctx context.Context) (*PublicKey, *Response, error) { + return s.getPublicKey(ctx, "user/codespaces/secrets/public-key") +} + +// GetOrgPublicKey gets the org public key for encrypting codespace secrets +// +// Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#get-an-organization-public-key +func (s *CodespacesService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { + return s.getPublicKey(ctx, fmt.Sprintf("orgs/%v/codespaces/secrets/public-key", org)) +} + +// GetRepoPublicKey gets the repo public key for encrypting codespace secrets +// +// Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the repo scope. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/repository-secrets?apiVersion=2022-11-28#get-a-repository-public-key +func (s *CodespacesService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { + return s.getPublicKey(ctx, fmt.Sprintf("repos/%v/%v/codespaces/secrets/public-key", owner, repo)) +} + +func (s *CodespacesService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var publicKey *PublicKey + resp, err := s.client.Do(ctx, req, &publicKey) + if err != nil { + return nil, resp, err + } + + return publicKey, resp, nil +} + +// GetUserSecret gets a users codespace secret +// +// Gets a secret available to a user's codespaces without revealing its encrypted value. +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. +// GitHub Apps must have read access to the codespaces_user_secrets user permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#get-a-secret-for-the-authenticated-user +func (s *CodespacesService) GetUserSecret(ctx context.Context, name string) (*Secret, *Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v", name) + return s.getSecret(ctx, u) +} + +// GetOrgSecret gets an org codespace secret +// +// Gets an organization secret without revealing its encrypted value. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#get-an-organization-secret +func (s *CodespacesService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) + return s.getSecret(ctx, u) +} + +// GetRepoSecret gets a repo codespace secret +// +// Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/repository-secrets?apiVersion=2022-11-28#get-a-repository-secret +func (s *CodespacesService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) + return s.getSecret(ctx, u) +} + +func (s *CodespacesService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var secret *Secret + resp, err := s.client.Do(ctx, req, &secret) + if err != nil { + return nil, resp, err + } + + return secret, resp, nil +} + +// CreateOrUpdateUserSecret creates or updates a users codespace secret +// +// Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using LibSodium. +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must also have Codespaces access to use this endpoint. +// GitHub Apps must have write access to the codespaces_user_secrets user permission and codespaces_secrets repository permission on all referenced repositories to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#create-or-update-a-secret-for-the-authenticated-user +func (s *CodespacesService) CreateOrUpdateUserSecret(ctx context.Context, eSecret *EncryptedSecret) (*Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v", eSecret.Name) + return s.createOrUpdateSecret(ctx, u, eSecret) +} + +// CreateOrUpdateOrgSecret creates or updates an orgs codespace secret +// +// Creates or updates an organization secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#create-or-update-an-organization-secret +func (s *CodespacesService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *EncryptedSecret) (*Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, eSecret.Name) + return s.createOrUpdateSecret(ctx, u, eSecret) +} + +// CreateOrUpdateRepoSecret creates or updates a repos codespace secret +// +// Creates or updates a repository secret with an encrypted value. Encrypt your secret using LibSodium. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/repository-secrets?apiVersion=2022-11-28#create-or-update-a-repository-secret +func (s *CodespacesService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *EncryptedSecret) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, eSecret.Name) + return s.createOrUpdateSecret(ctx, u, eSecret) +} + +func (s *CodespacesService) createOrUpdateSecret(ctx context.Context, url string, eSecret *EncryptedSecret) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, eSecret) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// DeleteUserSecret deletes a users codespace secret +// +// Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. +// GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#delete-a-secret-for-the-authenticated-user +func (s *CodespacesService) DeleteUserSecret(ctx context.Context, name string) (*Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v", name) + return s.deleteSecret(ctx, u) +} + +// DeleteOrgSecret deletes an orgs codespace secret +// +// Deletes an organization secret using the secret name. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#delete-an-organization-secret +func (s *CodespacesService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v", org, name) + return s.deleteSecret(ctx, u) +} + +// DeleteRepoSecret deletes a repos codespace secret +// +// Deletes a secret in a repository using the secret name. You must authenticate using an access token with the repo scope to use this endpoint. GitHub Apps must have write access to the codespaces_secrets repository permission to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/repository-secrets?apiVersion=2022-11-28#delete-a-repository-secret +func (s *CodespacesService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/codespaces/secrets/%v", owner, repo, name) + return s.deleteSecret(ctx, u) +} + +func (s *CodespacesService) deleteSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// ListSelectedReposForUserSecret lists the repositories that have been granted the ability to use a user's codespace secret. +// +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. +// GitHub Apps must have read access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#list-selected-repositories-for-a-user-secret +func (s *CodespacesService) ListSelectedReposForUserSecret(ctx context.Context, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + return s.listSelectedReposForSecret(ctx, u) +} + +// ListSelectedReposForOrgSecret lists the repositories that have been granted the ability to use an organization's codespace secret. +// +// Lists all repositories that have been selected when the visibility for repository access to a secret is set to selected. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#list-selected-repositories-for-an-organization-secret +func (s *CodespacesService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + return s.listSelectedReposForSecret(ctx, u) +} + +func (s *CodespacesService) listSelectedReposForSecret(ctx context.Context, url string) (*SelectedReposList, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var repositories *SelectedReposList + resp, err := s.client.Do(ctx, req, &repositories) + if err != nil { + return nil, resp, err + } + + return repositories, resp, nil +} + +// SetSelectedReposForUserSecret sets the repositories that have been granted the ability to use a user's codespace secret. +// +// You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. +// GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on all referenced repositories to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#set-selected-repositories-for-a-user-secret +func (s *CodespacesService) SetSelectedReposForUserSecret(ctx context.Context, name string, ids SelectedRepoIDs) (*Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v/repositories", name) + return s.setSelectedRepoForSecret(ctx, u, ids) +} + +// SetSelectedReposForOrgSecret sets the repositories that have been granted the ability to use a user's codespace secret. +// +// Replaces all repositories for an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#set-selected-repositories-for-a-user-secret +func (s *CodespacesService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids SelectedRepoIDs) (*Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories", org, name) + return s.setSelectedRepoForSecret(ctx, u, ids) +} + +func (s *CodespacesService) setSelectedRepoForSecret(ctx context.Context, url string, ids SelectedRepoIDs) (*Response, error) { + type repoIDs struct { + SelectedIDs SelectedRepoIDs `json:"selected_repository_ids"` + } + + req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// AddSelectedRepoToUserSecret adds a repository to the list of repositories that have been granted the ability to use a user's codespace secret. +// +// Adds a repository to the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission and write access to the codespaces_secrets repository permission on the referenced repository to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#add-a-selected-repository-to-a-user-secret +func (s *CodespacesService) AddSelectedRepoToUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) + return s.addSelectedRepoToSecret(ctx, u) +} + +// AddSelectedRepoToOrgSecret adds a repository to the list of repositories that have been granted the ability to use an organization's codespace secret. +// +// Adds a repository to an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#add-selected-repository-to-an-organization-secret +func (s *CodespacesService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) + return s.addSelectedRepoToSecret(ctx, u) +} + +func (s *CodespacesService) addSelectedRepoToSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// RemoveSelectedRepoFromUserSecret removes a repository from the list of repositories that have been granted the ability to use a user's codespace secret. +// +// Removes a repository from the selected repositories for a user's codespace secret. You must authenticate using an access token with the codespace or codespace:secrets scope to use this endpoint. User must have Codespaces access to use this endpoint. GitHub Apps must have write access to the codespaces_user_secrets user permission to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/secrets?apiVersion=2022-11-28#remove-a-selected-repository-from-a-user-secret +func (s *CodespacesService) RemoveSelectedRepoFromUserSecret(ctx context.Context, name string, repo *Repository) (*Response, error) { + u := fmt.Sprintf("user/codespaces/secrets/%v/repositories/%v", name, *repo.ID) + return s.removeSelectedRepoFromSecret(ctx, u) +} + +// RemoveSelectedRepoFromOrgSecret removes a repository from the list of repositories that have been granted the ability to use an organization's codespace secret. +// +// Removes a repository from an organization secret when the visibility for repository access is set to selected. The visibility is set when you Create or update an organization secret. You must authenticate using an access token with the admin:org scope to use this endpoint. +// +// Github API docs: https://docs.github.com/en/rest/codespaces/organization-secrets?apiVersion=2022-11-28#remove-selected-repository-from-an-organization-secret +func (s *CodespacesService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + u := fmt.Sprintf("orgs/%v/codespaces/secrets/%v/repositories/%v", org, name, *repo.ID) + return s.removeSelectedRepoFromSecret(ctx, u) +} + +func (s *CodespacesService) removeSelectedRepoFromSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/dependabot.go b/vendor/github.com/google/go-github/v56/github/dependabot.go new file mode 100644 index 000000000..07e68b506 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/dependabot.go @@ -0,0 +1,12 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// DependabotService handles communication with the Dependabot related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/ +type DependabotService service diff --git a/vendor/github.com/google/go-github/v56/github/dependabot_alerts.go b/vendor/github.com/google/go-github/v56/github/dependabot_alerts.go new file mode 100644 index 000000000..177316b1d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/dependabot_alerts.go @@ -0,0 +1,138 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Dependency reprensents the vulnerable dependency. +type Dependency struct { + Package *VulnerabilityPackage `json:"package,omitempty"` + ManifestPath *string `json:"manifest_path,omitempty"` + Scope *string `json:"scope,omitempty"` +} + +// AdvisoryCVSS represents the advisory pertaining to the Common Vulnerability Scoring System. +type AdvisoryCVSS struct { + Score *float64 `json:"score,omitempty"` + VectorString *string `json:"vector_string,omitempty"` +} + +// AdvisoryCWEs reprensent the advisory pertaining to Common Weakness Enumeration. +type AdvisoryCWEs struct { + CWEID *string `json:"cwe_id,omitempty"` + Name *string `json:"name,omitempty"` +} + +// DependabotSecurityAdvisory represents the GitHub Security Advisory. +type DependabotSecurityAdvisory struct { + GHSAID *string `json:"ghsa_id,omitempty"` + CVEID *string `json:"cve_id,omitempty"` + Summary *string `json:"summary,omitempty"` + Description *string `json:"description,omitempty"` + Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` + Severity *string `json:"severity,omitempty"` + CVSS *AdvisoryCVSS `json:"cvss,omitempty"` + CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` + Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` + References []*AdvisoryReference `json:"references,omitempty"` + PublishedAt *Timestamp `json:"published_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` +} + +// DependabotAlert represents a Dependabot alert. +type DependabotAlert struct { + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + Dependency *Dependency `json:"dependency,omitempty"` + SecurityAdvisory *DependabotSecurityAdvisory `json:"security_advisory,omitempty"` + SecurityVulnerability *AdvisoryVulnerability `json:"security_vulnerability,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + DismissedAt *Timestamp `json:"dismissed_at,omitempty"` + DismissedBy *User `json:"dismissed_by,omitempty"` + DismissedReason *string `json:"dismissed_reason,omitempty"` + DismissedComment *string `json:"dismissed_comment,omitempty"` + FixedAt *Timestamp `json:"fixed_at,omitempty"` + AutoDismissedAt *Timestamp `json:"auto_dismissed_at,omitempty"` + // The repository is always empty for events + Repository *Repository `json:"repository,omitempty"` +} + +// ListAlertsOptions specifies the optional parameters to the DependabotService.ListRepoAlerts +// and DependabotService.ListOrgAlerts methods. +type ListAlertsOptions struct { + State *string `url:"state,omitempty"` + Severity *string `url:"severity,omitempty"` + Ecosystem *string `url:"ecosystem,omitempty"` + Package *string `url:"package,omitempty"` + Scope *string `url:"scope,omitempty"` + Sort *string `url:"sort,omitempty"` + Direction *string `url:"direction,omitempty"` + + ListOptions + ListCursorOptions +} + +func (s *DependabotService) listAlerts(ctx context.Context, url string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*DependabotAlert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// ListRepoAlerts lists all Dependabot alerts of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-a-repository +func (s *DependabotService) ListRepoAlerts(ctx context.Context, owner, repo string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/alerts", owner, repo) + return s.listAlerts(ctx, url, opts) +} + +// ListOrgAlerts lists all Dependabot alerts of an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization +func (s *DependabotService) ListOrgAlerts(ctx context.Context, org string, opts *ListAlertsOptions) ([]*DependabotAlert, *Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/alerts", org) + return s.listAlerts(ctx, url, opts) +} + +// GetRepoAlert gets a single repository Dependabot alert. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/alerts#get-a-dependabot-alert +func (s *DependabotService) GetRepoAlert(ctx context.Context, owner, repo string, number int) (*DependabotAlert, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/alerts/%v", owner, repo, number) + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + alert := new(DependabotAlert) + resp, err := s.client.Do(ctx, req, alert) + if err != nil { + return nil, resp, err + } + + return alert, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/dependabot_secrets.go b/vendor/github.com/google/go-github/v56/github/dependabot_secrets.go new file mode 100644 index 000000000..685f7bea8 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/dependabot_secrets.go @@ -0,0 +1,261 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +func (s *DependabotService) getPublicKey(ctx context.Context, url string) (*PublicKey, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + pubKey := new(PublicKey) + resp, err := s.client.Do(ctx, req, pubKey) + if err != nil { + return nil, resp, err + } + + return pubKey, resp, nil +} + +// GetRepoPublicKey gets a public key that should be used for Dependabot secret encryption. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#get-a-repository-public-key +func (s *DependabotService) GetRepoPublicKey(ctx context.Context, owner, repo string) (*PublicKey, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/public-key", owner, repo) + return s.getPublicKey(ctx, url) +} + +// GetOrgPublicKey gets a public key that should be used for Dependabot secret encryption. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#get-an-organization-public-key +func (s *DependabotService) GetOrgPublicKey(ctx context.Context, org string) (*PublicKey, *Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/public-key", org) + return s.getPublicKey(ctx, url) +} + +func (s *DependabotService) listSecrets(ctx context.Context, url string, opts *ListOptions) (*Secrets, *Response, error) { + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + secrets := new(Secrets) + resp, err := s.client.Do(ctx, req, &secrets) + if err != nil { + return nil, resp, err + } + + return secrets, resp, nil +} + +// ListRepoSecrets lists all Dependabot secrets available in a repository +// without revealing their encrypted values. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#list-repository-secrets +func (s *DependabotService) ListRepoSecrets(ctx context.Context, owner, repo string, opts *ListOptions) (*Secrets, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/secrets", owner, repo) + return s.listSecrets(ctx, url, opts) +} + +// ListOrgSecrets lists all Dependabot secrets available in an organization +// without revealing their encrypted values. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#list-organization-secrets +func (s *DependabotService) ListOrgSecrets(ctx context.Context, org string, opts *ListOptions) (*Secrets, *Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets", org) + return s.listSecrets(ctx, url, opts) +} + +func (s *DependabotService) getSecret(ctx context.Context, url string) (*Secret, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + secret := new(Secret) + resp, err := s.client.Do(ctx, req, secret) + if err != nil { + return nil, resp, err + } + + return secret, resp, nil +} + +// GetRepoSecret gets a single repository Dependabot secret without revealing its encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#get-a-repository-secret +func (s *DependabotService) GetRepoSecret(ctx context.Context, owner, repo, name string) (*Secret, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) + return s.getSecret(ctx, url) +} + +// GetOrgSecret gets a single organization Dependabot secret without revealing its encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#get-an-organization-secret +func (s *DependabotService) GetOrgSecret(ctx context.Context, org, name string) (*Secret, *Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) + return s.getSecret(ctx, url) +} + +// DependabotEncryptedSecret represents a secret that is encrypted using a public key for Dependabot. +// +// The value of EncryptedValue must be your secret, encrypted with +// LibSodium (see documentation here: https://libsodium.gitbook.io/doc/bindings_for_other_languages) +// using the public key retrieved using the GetPublicKey method. +type DependabotEncryptedSecret struct { + Name string `json:"-"` + KeyID string `json:"key_id"` + EncryptedValue string `json:"encrypted_value"` + Visibility string `json:"visibility,omitempty"` + SelectedRepositoryIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids,omitempty"` +} + +func (s *DependabotService) putSecret(ctx context.Context, url string, eSecret *DependabotEncryptedSecret) (*Response, error) { + req, err := s.client.NewRequest("PUT", url, eSecret) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CreateOrUpdateRepoSecret creates or updates a repository Dependabot secret with an encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#create-or-update-a-repository-secret +func (s *DependabotService) CreateOrUpdateRepoSecret(ctx context.Context, owner, repo string, eSecret *DependabotEncryptedSecret) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, eSecret.Name) + return s.putSecret(ctx, url, eSecret) +} + +// CreateOrUpdateOrgSecret creates or updates an organization Dependabot secret with an encrypted value. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#create-or-update-an-organization-secret +func (s *DependabotService) CreateOrUpdateOrgSecret(ctx context.Context, org string, eSecret *DependabotEncryptedSecret) (*Response, error) { + repoIDs := make([]string, len(eSecret.SelectedRepositoryIDs)) + for i, secret := range eSecret.SelectedRepositoryIDs { + repoIDs[i] = fmt.Sprintf("%v", secret) + } + params := struct { + *DependabotEncryptedSecret + SelectedRepositoryIDs []string `json:"selected_repository_ids,omitempty"` + }{ + DependabotEncryptedSecret: eSecret, + SelectedRepositoryIDs: repoIDs, + } + + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, eSecret.Name) + req, err := s.client.NewRequest("PUT", url, params) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +func (s *DependabotService) deleteSecret(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteRepoSecret deletes a Dependabot secret in a repository using the secret name. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#delete-a-repository-secret +func (s *DependabotService) DeleteRepoSecret(ctx context.Context, owner, repo, name string) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/dependabot/secrets/%v", owner, repo, name) + return s.deleteSecret(ctx, url) +} + +// DeleteOrgSecret deletes a Dependabot secret in an organization using the secret name. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#delete-an-organization-secret +func (s *DependabotService) DeleteOrgSecret(ctx context.Context, org, name string) (*Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v", org, name) + return s.deleteSecret(ctx, url) +} + +// ListSelectedReposForOrgSecret lists all repositories that have access to a Dependabot secret. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#list-selected-repositories-for-an-organization-secret +func (s *DependabotService) ListSelectedReposForOrgSecret(ctx context.Context, org, name string, opts *ListOptions) (*SelectedReposList, *Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) + u, err := addOptions(url, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + result := new(SelectedReposList) + resp, err := s.client.Do(ctx, req, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// DependabotSecretsSelectedRepoIDs are the repository IDs that have access to the dependabot secrets. +type DependabotSecretsSelectedRepoIDs []int64 + +// SetSelectedReposForOrgSecret sets the repositories that have access to a Dependabot secret. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#set-selected-repositories-for-an-organization-secret +func (s *DependabotService) SetSelectedReposForOrgSecret(ctx context.Context, org, name string, ids DependabotSecretsSelectedRepoIDs) (*Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories", org, name) + type repoIDs struct { + SelectedIDs DependabotSecretsSelectedRepoIDs `json:"selected_repository_ids"` + } + + req, err := s.client.NewRequest("PUT", url, repoIDs{SelectedIDs: ids}) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddSelectedRepoToOrgSecret adds a repository to an organization Dependabot secret. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#add-selected-repository-to-an-organization-secret +func (s *DependabotService) AddSelectedRepoToOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) + req, err := s.client.NewRequest("PUT", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveSelectedRepoFromOrgSecret removes a repository from an organization Dependabot secret. +// +// GitHub API docs: https://docs.github.com/en/rest/dependabot/secrets#remove-selected-repository-from-an-organization-secret +func (s *DependabotService) RemoveSelectedRepoFromOrgSecret(ctx context.Context, org, name string, repo *Repository) (*Response, error) { + url := fmt.Sprintf("orgs/%v/dependabot/secrets/%v/repositories/%v", org, name, *repo.ID) + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/dependency_graph.go b/vendor/github.com/google/go-github/v56/github/dependency_graph.go new file mode 100644 index 000000000..e578965cc --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/dependency_graph.go @@ -0,0 +1,80 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +type DependencyGraphService service + +// SBOM represents a software bill of materials, which describes the +// packages/libraries that a repository depends on. +type SBOM struct { + SBOM *SBOMInfo `json:"sbom,omitempty"` +} + +// CreationInfo represents when the SBOM was created and who created it. +type CreationInfo struct { + Created *Timestamp `json:"created,omitempty"` + Creators []string `json:"creators,omitempty"` +} + +// RepoDependencies represents the dependencies of a repo. +type RepoDependencies struct { + SPDXID *string `json:"SPDXID,omitempty"` + // Package name + Name *string `json:"name,omitempty"` + VersionInfo *string `json:"versionInfo,omitempty"` + DownloadLocation *string `json:"downloadLocation,omitempty"` + FilesAnalyzed *bool `json:"filesAnalyzed,omitempty"` + LicenseConcluded *string `json:"licenseConcluded,omitempty"` + LicenseDeclared *string `json:"licenseDeclared,omitempty"` +} + +// SBOMInfo represents a software bill of materials (SBOM) using SPDX. +// SPDX is an open standard for SBOMs that +// identifies and catalogs components, licenses, copyrights, security +// references, and other metadata relating to software. +type SBOMInfo struct { + SPDXID *string `json:"SPDXID,omitempty"` + SPDXVersion *string `json:"spdxVersion,omitempty"` + CreationInfo *CreationInfo `json:"creationInfo,omitempty"` + + // Repo name + Name *string `json:"name,omitempty"` + DataLicense *string `json:"dataLicense,omitempty"` + DocumentDescribes []string `json:"documentDescribes,omitempty"` + DocumentNamespace *string `json:"documentNamespace,omitempty"` + + // List of packages dependencies + Packages []*RepoDependencies `json:"packages,omitempty"` +} + +func (s SBOM) String() string { + return Stringify(s) +} + +// GetSBOM fetches the software bill of materials for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/dependency-graph/sboms +func (s *DependencyGraphService) GetSBOM(ctx context.Context, owner, repo string) (*SBOM, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/dependency-graph/sbom", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var sbom *SBOM + resp, err := s.client.Do(ctx, req, &sbom) + if err != nil { + return nil, resp, err + } + + return sbom, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/doc.go b/vendor/github.com/google/go-github/v56/github/doc.go new file mode 100644 index 000000000..4a9aa95c3 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/doc.go @@ -0,0 +1,194 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package github provides a client for using the GitHub API. + +Usage: + + import "github.com/google/go-github/v56/github" // with go modules enabled (GO111MODULE=on or outside GOPATH) + import "github.com/google/go-github/github" // with go modules disabled + +Construct a new GitHub client, then use the various services on the client to +access different parts of the GitHub API. For example: + + client := github.NewClient(nil) + + // list all organizations for user "willnorris" + orgs, _, err := client.Organizations.List(ctx, "willnorris", nil) + +Some API methods have optional parameters that can be passed. For example: + + client := github.NewClient(nil) + + // list public repositories for org "github" + opt := &github.RepositoryListByOrgOptions{Type: "public"} + repos, _, err := client.Repositories.ListByOrg(ctx, "github", opt) + +The services of a client divide the API into logical chunks and correspond to +the structure of the GitHub API documentation at +https://docs.github.com/en/rest . + +NOTE: Using the https://godoc.org/context package, one can easily +pass cancelation signals and deadlines to various services of the client for +handling a request. In case there is no context available, then context.Background() +can be used as a starting point. + +For more sample code snippets, head over to the https://github.com/google/go-github/tree/master/example directory. + +# Authentication + +Use Client.WithAuthToken to configure your client to authenticate using an Oauth token +(for example, a personal access token). This is what is needed for a majority of use cases +aside from GitHub Apps. + + client := github.NewClient(nil).WithAuthToken("... your access token ...") + +Note that when using an authenticated Client, all calls made by the client will +include the specified OAuth token. Therefore, authenticated clients should +almost never be shared between different users. + +For API methods that require HTTP Basic Authentication, use the +BasicAuthTransport. + +GitHub Apps authentication can be provided by the +https://github.com/bradleyfalzon/ghinstallation package. +It supports both authentication as an installation, using an installation access token, +and as an app, using a JWT. + +To authenticate as an installation: + + import "github.com/bradleyfalzon/ghinstallation" + + func main() { + // Wrap the shared transport for use with the integration ID 1 authenticating with installation ID 99. + itr, err := ghinstallation.NewKeyFromFile(http.DefaultTransport, 1, 99, "2016-10-19.private-key.pem") + if err != nil { + // Handle error. + } + + // Use installation transport with client + client := github.NewClient(&http.Client{Transport: itr}) + + // Use client... + } + +To authenticate as an app, using a JWT: + + import "github.com/bradleyfalzon/ghinstallation" + + func main() { + // Wrap the shared transport for use with the application ID 1. + atr, err := ghinstallation.NewAppsTransportKeyFromFile(http.DefaultTransport, 1, "2016-10-19.private-key.pem") + if err != nil { + // Handle error. + } + + // Use app transport with client + client := github.NewClient(&http.Client{Transport: atr}) + + // Use client... + } + +# Rate Limiting + +GitHub imposes a rate limit on all API clients. Unauthenticated clients are +limited to 60 requests per hour, while authenticated clients can make up to +5,000 requests per hour. The Search API has a custom rate limit. Unauthenticated +clients are limited to 10 requests per minute, while authenticated clients +can make up to 30 requests per minute. To receive the higher rate limit when +making calls that are not issued on behalf of a user, +use UnauthenticatedRateLimitedTransport. + +The returned Response.Rate value contains the rate limit information +from the most recent API call. If a recent enough response isn't +available, you can use RateLimits to fetch the most up-to-date rate +limit data for the client. + +To detect an API rate limit error, you can check if its type is *github.RateLimitError. +For secondary rate limits, you can check if its type is *github.AbuseRateLimitError: + + repos, _, err := client.Repositories.List(ctx, "", nil) + if _, ok := err.(*github.RateLimitError); ok { + log.Println("hit rate limit") + } + if _, ok := err.(*github.AbuseRateLimitError); ok { + log.Println("hit secondary rate limit") + } + +Learn more about GitHub rate limiting at +https://docs.github.com/en/rest/rate-limit . + +# Accepted Status + +Some endpoints may return a 202 Accepted status code, meaning that the +information required is not yet ready and was scheduled to be gathered on +the GitHub side. Methods known to behave like this are documented specifying +this behavior. + +To detect this condition of error, you can check if its type is +*github.AcceptedError: + + stats, _, err := client.Repositories.ListContributorsStats(ctx, org, repo) + if _, ok := err.(*github.AcceptedError); ok { + log.Println("scheduled on GitHub side") + } + +# Conditional Requests + +The GitHub API has good support for conditional requests which will help +prevent you from burning through your rate limit, as well as help speed up your +application. go-github does not handle conditional requests directly, but is +instead designed to work with a caching http.Transport. We recommend using +https://github.com/gregjones/httpcache for that. + +Learn more about GitHub conditional requests at +https://docs.github.com/en/rest/overview/resources-in-the-rest-api#conditional-requests. + +# Creating and Updating Resources + +All structs for GitHub resources use pointer values for all non-repeated fields. +This allows distinguishing between unset fields and those set to a zero-value. +Helper functions have been provided to easily create these pointers for string, +bool, and int values. For example: + + // create a new private repository named "foo" + repo := &github.Repository{ + Name: github.String("foo"), + Private: github.Bool(true), + } + client.Repositories.Create(ctx, "", repo) + +Users who have worked with protocol buffers should find this pattern familiar. + +# Pagination + +All requests for resource collections (repos, pull requests, issues, etc.) +support pagination. Pagination options are described in the +github.ListOptions struct and passed to the list methods directly or as an +embedded type of a more specific list options struct (for example +github.PullRequestListOptions). Pages information is available via the +github.Response struct. + + client := github.NewClient(nil) + + opt := &github.RepositoryListByOrgOptions{ + ListOptions: github.ListOptions{PerPage: 10}, + } + // get all pages of results + var allRepos []*github.Repository + for { + repos, resp, err := client.Repositories.ListByOrg(ctx, "github", opt) + if err != nil { + return err + } + allRepos = append(allRepos, repos...) + if resp.NextPage == 0 { + break + } + opt.Page = resp.NextPage + } +*/ +package github diff --git a/vendor/github.com/google/go-github/v56/github/emojis.go b/vendor/github.com/google/go-github/v56/github/emojis.go new file mode 100644 index 000000000..15b57130b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/emojis.go @@ -0,0 +1,37 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" +) + +// EmojisService provides access to emoji-related functions in the GitHub API. +type EmojisService service + +// List returns the emojis available to use on GitHub. +// +// GitHub API docs: https://docs.github.com/rest/emojis/emojis#get-emojis +func (s *EmojisService) List(ctx context.Context) (map[string]string, *Response, error) { + req, err := s.client.NewRequest("GET", "emojis", nil) + if err != nil { + return nil, nil, err + } + + var emoji map[string]string + resp, err := s.client.Do(ctx, req, &emoji) + if err != nil { + return nil, resp, err + } + + return emoji, resp, nil +} + +// ListEmojis +// Deprecated: Use EmojisService.List instead +func (c *Client) ListEmojis(ctx context.Context) (map[string]string, *Response, error) { + return c.Emojis.List(ctx) +} diff --git a/vendor/github.com/google/go-github/v56/github/enterprise.go b/vendor/github.com/google/go-github/v56/github/enterprise.go new file mode 100644 index 000000000..1c9b06956 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/enterprise.go @@ -0,0 +1,12 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// EnterpriseService provides access to the enterprise related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/ +type EnterpriseService service diff --git a/vendor/github.com/google/go-github/v56/github/enterprise_actions_runner_groups.go b/vendor/github.com/google/go-github/v56/github/enterprise_actions_runner_groups.go new file mode 100644 index 000000000..b6bb70dae --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/enterprise_actions_runner_groups.go @@ -0,0 +1,310 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListOrganizations represents the response from the list orgs endpoints. +type ListOrganizations struct { + TotalCount *int `json:"total_count,omitempty"` + Organizations []*Organization `json:"organizations"` +} + +// EnterpriseRunnerGroup represents a self-hosted runner group configured in an enterprise. +type EnterpriseRunnerGroup struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + Default *bool `json:"default,omitempty"` + SelectedOrganizationsURL *string `json:"selected_organizations_url,omitempty"` + RunnersURL *string `json:"runners_url,omitempty"` + Inherited *bool `json:"inherited,omitempty"` + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + SelectedWorkflows []string `json:"selected_workflows,omitempty"` + WorkflowRestrictionsReadOnly *bool `json:"workflow_restrictions_read_only,omitempty"` +} + +// EnterpriseRunnerGroups represents a collection of self-hosted runner groups configured for an enterprise. +type EnterpriseRunnerGroups struct { + TotalCount *int `json:"total_count,omitempty"` + RunnerGroups []*EnterpriseRunnerGroup `json:"runner_groups"` +} + +// CreateEnterpriseRunnerGroupRequest represents a request to create a Runner group for an enterprise. +type CreateEnterpriseRunnerGroupRequest struct { + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + // List of organization IDs that can access the runner group. + SelectedOrganizationIDs []int64 `json:"selected_organization_ids,omitempty"` + // Runners represent a list of runner IDs to add to the runner group. + Runners []int64 `json:"runners,omitempty"` + // If set to True, public repos can use this runner group + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + // If true, the runner group will be restricted to running only the workflows specified in the SelectedWorkflows slice. + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + // List of workflows the runner group should be allowed to run. This setting will be ignored unless RestrictedToWorkflows is set to true. + SelectedWorkflows []string `json:"selected_workflows,omitempty"` +} + +// UpdateEnterpriseRunnerGroupRequest represents a request to update a Runner group for an enterprise. +type UpdateEnterpriseRunnerGroupRequest struct { + Name *string `json:"name,omitempty"` + Visibility *string `json:"visibility,omitempty"` + AllowsPublicRepositories *bool `json:"allows_public_repositories,omitempty"` + RestrictedToWorkflows *bool `json:"restricted_to_workflows,omitempty"` + SelectedWorkflows []string `json:"selected_workflows,omitempty"` +} + +// SetOrgAccessRunnerGroupRequest represents a request to replace the list of organizations +// that can access a self-hosted runner group configured in an enterprise. +type SetOrgAccessRunnerGroupRequest struct { + // Updated list of organization IDs that should be given access to the runner group. + SelectedOrganizationIDs []int64 `json:"selected_organization_ids"` +} + +// ListEnterpriseRunnerGroupOptions extend ListOptions to have the optional parameters VisibleToOrganization. +type ListEnterpriseRunnerGroupOptions struct { + ListOptions + + // Only return runner groups that are allowed to be used by this organization. + VisibleToOrganization string `url:"visible_to_organization,omitempty"` +} + +// ListRunnerGroups lists all self-hosted runner groups configured in an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#list-self-hosted-runner-groups-for-an-enterprise +func (s *EnterpriseService) ListRunnerGroups(ctx context.Context, enterprise string, opts *ListEnterpriseRunnerGroupOptions) (*EnterpriseRunnerGroups, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + groups := &EnterpriseRunnerGroups{} + resp, err := s.client.Do(ctx, req, &groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// GetEnterpriseRunnerGroup gets a specific self-hosted runner group for an enterprise using its RunnerGroup ID. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#get-a-self-hosted-runner-group-for-an-enterprise +func (s *EnterpriseService) GetEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*EnterpriseRunnerGroup, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(EnterpriseRunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// DeleteEnterpriseRunnerGroup deletes a self-hosted runner group from an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#delete-a-self-hosted-runner-group-from-an-enterprise +func (s *EnterpriseService) DeleteEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// CreateEnterpriseRunnerGroup creates a new self-hosted runner group for an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#create-a-self-hosted-runner-group-for-an-enterprise +func (s *EnterpriseService) CreateEnterpriseRunnerGroup(ctx context.Context, enterprise string, createReq CreateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups", enterprise) + req, err := s.client.NewRequest("POST", u, createReq) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(EnterpriseRunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// UpdateEnterpriseRunnerGroup updates a self-hosted runner group for an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#update-a-self-hosted-runner-group-for-an-enterprise +func (s *EnterpriseService) UpdateEnterpriseRunnerGroup(ctx context.Context, enterprise string, groupID int64, updateReq UpdateEnterpriseRunnerGroupRequest) (*EnterpriseRunnerGroup, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v", enterprise, groupID) + req, err := s.client.NewRequest("PATCH", u, updateReq) + if err != nil { + return nil, nil, err + } + + runnerGroup := new(EnterpriseRunnerGroup) + resp, err := s.client.Do(ctx, req, runnerGroup) + if err != nil { + return nil, resp, err + } + + return runnerGroup, resp, nil +} + +// ListOrganizationAccessRunnerGroup lists the organizations with access to a self-hosted runner group configured in an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#list-organization-access-to-a-self-hosted-runner-group-in-an-enterprise +func (s *EnterpriseService) ListOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*ListOrganizations, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + orgs := &ListOrganizations{} + resp, err := s.client.Do(ctx, req, &orgs) + if err != nil { + return nil, resp, err + } + + return orgs, resp, nil +} + +// SetOrganizationAccessRunnerGroup replaces the list of organizations that have access to a self-hosted runner group configured in an enterprise +// with a new List of organizations. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#set-organization-access-for-a-self-hosted-runner-group-in-an-enterprise +func (s *EnterpriseService) SetOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID int64, ids SetOrgAccessRunnerGroupRequest) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations", enterprise, groupID) + + req, err := s.client.NewRequest("PUT", u, ids) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddOrganizationAccessRunnerGroup adds an organization to the list of selected organizations that can access a self-hosted runner group. +// The runner group must have visibility set to 'selected'. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#add-organization-access-to-a-self-hosted-runner-group-in-an-enterprise +func (s *EnterpriseService) AddOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveOrganizationAccessRunnerGroup removes an organization from the list of selected organizations that can access a self-hosted runner group. +// The runner group must have visibility set to 'selected'. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#remove-organization-access-to-a-self-hosted-runner-group-in-an-enterprise +func (s *EnterpriseService) RemoveOrganizationAccessRunnerGroup(ctx context.Context, enterprise string, groupID, orgID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/organizations/%v", enterprise, groupID, orgID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListRunnerGroupRunners lists self-hosted runners that are in a specific enterprise group. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#list-self-hosted-runners-in-a-group-for-an-enterprise +func (s *EnterpriseService) ListRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, opts *ListOptions) (*Runners, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runners := &Runners{} + resp, err := s.client.Do(ctx, req, &runners) + if err != nil { + return nil, resp, err + } + + return runners, resp, nil +} + +// SetRunnerGroupRunners replaces the list of self-hosted runners that are part of an enterprise runner group +// with a new list of runners. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#set-self-hosted-runners-in-a-group-for-an-enterprise +func (s *EnterpriseService) SetRunnerGroupRunners(ctx context.Context, enterprise string, groupID int64, ids SetRunnerGroupRunnersRequest) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners", enterprise, groupID) + + req, err := s.client.NewRequest("PUT", u, ids) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddRunnerGroupRunners adds a self-hosted runner to a runner group configured in an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#add-a-self-hosted-runner-to-a-group-for-an-enterprise +func (s *EnterpriseService) AddRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveRunnerGroupRunners removes a self-hosted runner from a group configured in an enterprise. +// The runner is then returned to the default group. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runner-groups?apiVersion=2022-11-28#remove-a-self-hosted-runner-from-a-group-for-an-enterprise +func (s *EnterpriseService) RemoveRunnerGroupRunners(ctx context.Context, enterprise string, groupID, runnerID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runner-groups/%v/runners/%v", enterprise, groupID, runnerID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/enterprise_actions_runners.go b/vendor/github.com/google/go-github/v56/github/enterprise_actions_runners.go new file mode 100644 index 000000000..6fc30d502 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/enterprise_actions_runners.go @@ -0,0 +1,108 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListRunnerApplicationDownloads lists self-hosted runner application binaries that can be downloaded and run. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-runner-applications-for-an-enterprise +func (s *EnterpriseService) ListRunnerApplicationDownloads(ctx context.Context, enterprise string) ([]*RunnerApplicationDownload, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runners/downloads", enterprise) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rads []*RunnerApplicationDownload + resp, err := s.client.Do(ctx, req, &rads) + if err != nil { + return nil, resp, err + } + + return rads, resp, nil +} + +// GenerateEnterpriseJITConfig generates a just-in-time configuration for an enterprise. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/actions/self-hosted-runners?apiVersion=2022-11-28#create-configuration-for-a-just-in-time-runner-for-an-enterprise +func (s *EnterpriseService) GenerateEnterpriseJITConfig(ctx context.Context, enterprise string, request *GenerateJITConfigRequest) (*JITRunnerConfig, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runners/generate-jitconfig", enterprise) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + jitConfig := new(JITRunnerConfig) + resp, err := s.client.Do(ctx, req, jitConfig) + if err != nil { + return nil, resp, err + } + + return jitConfig, resp, nil +} + +// CreateRegistrationToken creates a token that can be used to add a self-hosted runner. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#create-a-registration-token-for-an-enterprise +func (s *EnterpriseService) CreateRegistrationToken(ctx context.Context, enterprise string) (*RegistrationToken, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runners/registration-token", enterprise) + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + registrationToken := new(RegistrationToken) + resp, err := s.client.Do(ctx, req, registrationToken) + if err != nil { + return nil, resp, err + } + + return registrationToken, resp, nil +} + +// ListRunners lists all the self-hosted runners for a enterprise. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#list-self-hosted-runners-for-an-enterprise +func (s *EnterpriseService) ListRunners(ctx context.Context, enterprise string, opts *ListOptions) (*Runners, *Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runners", enterprise) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + runners := &Runners{} + resp, err := s.client.Do(ctx, req, &runners) + if err != nil { + return nil, resp, err + } + + return runners, resp, nil +} + +// RemoveRunner forces the removal of a self-hosted runner from an enterprise using the runner id. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/self-hosted-runners#delete-a-self-hosted-runner-from-an-enterprise +func (s *EnterpriseService) RemoveRunner(ctx context.Context, enterprise string, runnerID int64) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/actions/runners/%v", enterprise, runnerID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/enterprise_audit_log.go b/vendor/github.com/google/go-github/v56/github/enterprise_audit_log.go new file mode 100644 index 000000000..406486733 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/enterprise_audit_log.go @@ -0,0 +1,35 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetAuditLog gets the audit-log entries for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/audit-log#get-the-audit-log-for-an-enterprise +func (s *EnterpriseService) GetAuditLog(ctx context.Context, enterprise string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { + u := fmt.Sprintf("enterprises/%v/audit-log", enterprise) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var auditEntries []*AuditEntry + resp, err := s.client.Do(ctx, req, &auditEntries) + if err != nil { + return nil, resp, err + } + + return auditEntries, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/enterprise_code_security_and_analysis.go b/vendor/github.com/google/go-github/v56/github/enterprise_code_security_and_analysis.go new file mode 100644 index 000000000..3980a86aa --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/enterprise_code_security_and_analysis.go @@ -0,0 +1,78 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// EnterpriseSecurityAnalysisSettings represents security analysis settings for an enterprise. +type EnterpriseSecurityAnalysisSettings struct { + AdvancedSecurityEnabledForNewRepositories *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` + SecretScanningEnabledForNewRepositories *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` + SecretScanningPushProtectionEnabledForNewRepositories *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` + SecretScanningPushProtectionCustomLink *string `json:"secret_scanning_push_protection_custom_link,omitempty"` +} + +// GetCodeSecurityAndAnalysis gets code security and analysis features for an enterprise. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/code-security-and-analysis?apiVersion=2022-11-28#get-code-security-and-analysis-features-for-an-enterprise +func (s *EnterpriseService) GetCodeSecurityAndAnalysis(ctx context.Context, enterprise string) (*EnterpriseSecurityAnalysisSettings, *Response, error) { + u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + settings := new(EnterpriseSecurityAnalysisSettings) + resp, err := s.client.Do(ctx, req, settings) + if err != nil { + return nil, resp, err + } + + return settings, resp, nil +} + +// UpdateCodeSecurityAndAnalysis updates code security and analysis features for new repositories in an enterprise. +// +// GitHub API docs: https://docs.github.com/en/rest/enterprise-admin/code-security-and-analysis?apiVersion=2022-11-28#update-code-security-and-analysis-features-for-an-enterprise +func (s *EnterpriseService) UpdateCodeSecurityAndAnalysis(ctx context.Context, enterprise string, settings *EnterpriseSecurityAnalysisSettings) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/code_security_and_analysis", enterprise) + req, err := s.client.NewRequest("PATCH", u, settings) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// EnableDisableSecurityFeature enables or disables a security feature for all repositories in an enterprise. +// +// Valid values for securityProduct: "advanced_security", "secret_scanning", "secret_scanning_push_protection". +// Valid values for enablement: "enable_all", "disable_all". +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/enterprise-admin/code-security-and-analysis?apiVersion=2022-11-28#enable-or-disable-a-security-feature +func (s *EnterpriseService) EnableDisableSecurityFeature(ctx context.Context, enterprise, securityProduct, enablement string) (*Response, error) { + u := fmt.Sprintf("enterprises/%v/%v/%v", enterprise, securityProduct, enablement) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/event.go b/vendor/github.com/google/go-github/v56/github/event.go new file mode 100644 index 000000000..e98606bce --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/event.go @@ -0,0 +1,54 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "encoding/json" +) + +// Event represents a GitHub event. +type Event struct { + Type *string `json:"type,omitempty"` + Public *bool `json:"public,omitempty"` + RawPayload *json.RawMessage `json:"payload,omitempty"` + Repo *Repository `json:"repo,omitempty"` + Actor *User `json:"actor,omitempty"` + Org *Organization `json:"org,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ID *string `json:"id,omitempty"` +} + +func (e Event) String() string { + return Stringify(e) +} + +// ParsePayload parses the event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +func (e *Event) ParsePayload() (interface{}, error) { + // It would be nice if e.Type were the snake_case name of the event, + // but the existing interface uses the struct name instead. + payload := EventForType(typeToMessageMapping[e.GetType()]) + + if err := json.Unmarshal(e.GetRawPayload(), &payload); err != nil { + return nil, err + } + + return payload, nil +} + +// Payload returns the parsed event payload. For recognized event types, +// a value of the corresponding struct type will be returned. +// +// Deprecated: Use ParsePayload instead, which returns an error +// rather than panics if JSON unmarshaling raw payload fails. +func (e *Event) Payload() (payload interface{}) { + var err error + payload, err = e.ParsePayload() + if err != nil { + panic(err) + } + return payload +} diff --git a/vendor/github.com/google/go-github/v56/github/event_types.go b/vendor/github.com/google/go-github/v56/github/event_types.go new file mode 100644 index 000000000..e91d22686 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/event_types.go @@ -0,0 +1,1789 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// These event types are shared between the Events API and used as Webhook payloads. + +package github + +import "encoding/json" + +// RequestedAction is included in a CheckRunEvent when a user has invoked an action, +// i.e. when the CheckRunEvent's Action field is "requested_action". +type RequestedAction struct { + Identifier string `json:"identifier"` // The integrator reference of the action requested by the user. +} + +// BranchProtectionRuleEvent triggered when a check suite is "created", "edited", or "deleted". +// The Webhook event name is "branch_protection_rule". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#branch_protection_rule +type BranchProtectionRuleEvent struct { + Action *string `json:"action,omitempty"` + Rule *BranchProtectionRule `json:"rule,omitempty"` + Changes *ProtectionChanges `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// CheckRunEvent is triggered when a check run is "created", "completed", or "rerequested". +// The Webhook event name is "check_run". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#check_run +type CheckRunEvent struct { + CheckRun *CheckRun `json:"check_run,omitempty"` + // The action performed. Possible values are: "created", "completed", "rerequested" or "requested_action". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The action requested by the user. Populated when the Action is "requested_action". + RequestedAction *RequestedAction `json:"requested_action,omitempty"` // +} + +// CheckSuiteEvent is triggered when a check suite is "completed", "requested", or "rerequested". +// The Webhook event name is "check_suite". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#check_suite +type CheckSuiteEvent struct { + CheckSuite *CheckSuite `json:"check_suite,omitempty"` + // The action performed. Possible values are: "completed", "requested" or "rerequested". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// CommitCommentEvent is triggered when a commit comment is created. +// The Webhook event name is "commit_comment". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#commit_comment +type CommitCommentEvent struct { + Comment *RepositoryComment `json:"comment,omitempty"` + + // The following fields are only populated by Webhook events. + Action *string `json:"action,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// ContentReferenceEvent is triggered when the body or comment of an issue or +// pull request includes a URL that matches a configured content reference +// domain. +// The Webhook event name is "content_reference". +// +// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#content_reference +type ContentReferenceEvent struct { + Action *string `json:"action,omitempty"` + ContentReference *ContentReference `json:"content_reference,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// CreateEvent represents a created repository, branch, or tag. +// The Webhook event name is "create". +// +// Note: webhooks will not receive this event for created repositories. +// Additionally, webhooks will not receive this event for tags if more +// than three tags are pushed at once. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/github-event-types#createevent +type CreateEvent struct { + Ref *string `json:"ref,omitempty"` + // RefType is the object that was created. Possible values are: "repository", "branch", "tag". + RefType *string `json:"ref_type,omitempty"` + MasterBranch *string `json:"master_branch,omitempty"` + Description *string `json:"description,omitempty"` + PusherType *string `json:"pusher_type,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// DeleteEvent represents a deleted branch or tag. +// The Webhook event name is "delete". +// +// Note: webhooks will not receive this event for tags if more than three tags +// are deleted at once. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/github-event-types#deleteevent +type DeleteEvent struct { + Ref *string `json:"ref,omitempty"` + // RefType is the object that was deleted. Possible values are: "branch", "tag". + RefType *string `json:"ref_type,omitempty"` + + // The following fields are only populated by Webhook events. + PusherType *string `json:"pusher_type,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// DependabotAlertEvent is triggered when there is activity relating to Dependabot alerts. +// The Webhook event name is "dependabot_alert". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert +type DependabotAlertEvent struct { + Action *string `json:"action,omitempty"` + Alert *DependabotAlert `json:"alert,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` +} + +// DeployKeyEvent is triggered when a deploy key is added or removed from a repository. +// The Webhook event name is "deploy_key". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#deploy_key +type DeployKeyEvent struct { + // Action is the action that was performed. Possible values are: + // "created" or "deleted". + Action *string `json:"action,omitempty"` + + // The deploy key resource. + Key *Key `json:"key,omitempty"` + + // The Repository where the event occurred + Repo *Repository `json:"repository,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` + + // The following fields are only populated by Webhook events. + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// DeploymentEvent represents a deployment. +// The Webhook event name is "deployment". +// +// Events of this type are not visible in timelines, they are only used to trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#deployment +type DeploymentEvent struct { + Deployment *Deployment `json:"deployment,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Workflow *Workflow `json:"workflow,omitempty"` + WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` + + // The following fields are only populated by Webhook events. + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// DeploymentProtectionRuleEvent represents a deployment protection rule event. +// The Webhook event name is "deployment_protection_rule". +// +// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment_protection_rule +type DeploymentProtectionRuleEvent struct { + Action *string `json:"action,omitempty"` + Environment *string `json:"environment,omitempty"` + Event *string `json:"event,omitempty"` + + // The URL Github provides for a third-party to use in order to pass/fail a deployment gate + DeploymentCallbackURL *string `json:"deployment_callback_url,omitempty"` + Deployment *Deployment `json:"deployment,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Organization *Organization `json:"organization,omitempty"` + PullRequests []*PullRequest `json:"pull_requests,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// DeploymentStatusEvent represents a deployment status. +// The Webhook event name is "deployment_status". +// +// Events of this type are not visible in timelines, they are only used to trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#deployment_status +type DeploymentStatusEvent struct { + Deployment *Deployment `json:"deployment,omitempty"` + DeploymentStatus *DeploymentStatus `json:"deployment_status,omitempty"` + Repo *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// DiscussionCommentEvent represents a webhook event for a comment on discussion. +// The Webhook event name is "discussion_comment". +// +// GitHub API docs: https://docs.github.com/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion_comment +type DiscussionCommentEvent struct { + // Action is the action that was performed on the comment. + // Possible values are: "created", "edited", "deleted". ** check what all can be added + Action *string `json:"action,omitempty"` + Discussion *Discussion `json:"discussion,omitempty"` + Comment *CommentDiscussion `json:"comment,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// CommentDiscussion represents a comment in a GitHub DiscussionCommentEvent. +type CommentDiscussion struct { + AuthorAssociation *string `json:"author_association,omitempty"` + Body *string `json:"body,omitempty"` + ChildCommentCount *int `json:"child_comment_count,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + DiscussionID *int64 `json:"discussion_id,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + ParentID *int64 `json:"parent_id,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + User *User `json:"user,omitempty"` +} + +// DiscussionEvent represents a webhook event for a discussion. +// The Webhook event name is "discussion". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion +type DiscussionEvent struct { + // Action is the action that was performed. Possible values are: + // created, edited, deleted, pinned, unpinned, locked, unlocked, + // transferred, category_changed, answered, or unanswered. + Action *string `json:"action,omitempty"` + Discussion *Discussion `json:"discussion,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// Discussion represents a discussion in a GitHub DiscussionEvent. +type Discussion struct { + RepositoryURL *string `json:"repository_url,omitempty"` + DiscussionCategory *DiscussionCategory `json:"category,omitempty"` + AnswerHTMLURL *string `json:"answer_html_url,omitempty"` + AnswerChosenAt *Timestamp `json:"answer_chosen_at,omitempty"` + AnswerChosenBy *string `json:"answer_chosen_by,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Number *int `json:"number,omitempty"` + Title *string `json:"title,omitempty"` + User *User `json:"user,omitempty"` + State *string `json:"state,omitempty"` + Locked *bool `json:"locked,omitempty"` + Comments *int `json:"comments,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + AuthorAssociation *string `json:"author_association,omitempty"` + ActiveLockReason *string `json:"active_lock_reason,omitempty"` + Body *string `json:"body,omitempty"` +} + +// DiscussionCategory represents a discussion category in a GitHub DiscussionEvent. +type DiscussionCategory struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + RepositoryID *int64 `json:"repository_id,omitempty"` + Emoji *string `json:"emoji,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Slug *string `json:"slug,omitempty"` + IsAnswerable *bool `json:"is_answerable,omitempty"` +} + +// ForkEvent is triggered when a user forks a repository. +// The Webhook event name is "fork". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#fork +type ForkEvent struct { + // Forkee is the created repository. + Forkee *Repository `json:"forkee,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// GitHubAppAuthorizationEvent is triggered when a user's authorization for a +// GitHub Application is revoked. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#github_app_authorization +type GitHubAppAuthorizationEvent struct { + // The action performed. Possible value is: "revoked". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// Page represents a single Wiki page. +type Page struct { + PageName *string `json:"page_name,omitempty"` + Title *string `json:"title,omitempty"` + Summary *string `json:"summary,omitempty"` + Action *string `json:"action,omitempty"` + SHA *string `json:"sha,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` +} + +// GollumEvent is triggered when a Wiki page is created or updated. +// The Webhook event name is "gollum". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#gollum +type GollumEvent struct { + Pages []*Page `json:"pages,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// EditChange represents the changes when an issue, pull request, comment, +// or repository has been edited. +type EditChange struct { + Title *EditTitle `json:"title,omitempty"` + Body *EditBody `json:"body,omitempty"` + Base *EditBase `json:"base,omitempty"` + Repo *EditRepo `json:"repository,omitempty"` + Owner *EditOwner `json:"owner,omitempty"` +} + +// EditTitle represents a pull-request title change. +type EditTitle struct { + From *string `json:"from,omitempty"` +} + +// EditBody represents a change of pull-request body. +type EditBody struct { + From *string `json:"from,omitempty"` +} + +// EditBase represents the change of a pull-request base branch. +type EditBase struct { + Ref *EditRef `json:"ref,omitempty"` + SHA *EditSHA `json:"sha,omitempty"` +} + +// EditRef represents a ref change of a pull-request. +type EditRef struct { + From *string `json:"from,omitempty"` +} + +// EditRepo represents a change of repository name. +type EditRepo struct { + Name *RepoName `json:"name,omitempty"` +} + +// EditOwner represents a change of repository ownership. +type EditOwner struct { + OwnerInfo *OwnerInfo `json:"from,omitempty"` +} + +// OwnerInfo represents the account info of the owner of the repo (could be User or Organization but both are User structs). +type OwnerInfo struct { + User *User `json:"user,omitempty"` + Org *User `json:"organization,omitempty"` +} + +// RepoName represents a change of repository name. +type RepoName struct { + From *string `json:"from,omitempty"` +} + +// EditSHA represents a sha change of a pull-request. +type EditSHA struct { + From *string `json:"from,omitempty"` +} + +// ProjectChange represents the changes when a project has been edited. +type ProjectChange struct { + Name *ProjectName `json:"name,omitempty"` + Body *ProjectBody `json:"body,omitempty"` +} + +// ProjectName represents a project name change. +type ProjectName struct { + From *string `json:"from,omitempty"` +} + +// ProjectBody represents a project body change. +type ProjectBody struct { + From *string `json:"from,omitempty"` +} + +// ProjectCardChange represents the changes when a project card has been edited. +type ProjectCardChange struct { + Note *ProjectCardNote `json:"note,omitempty"` +} + +// ProjectCardNote represents a change of a note of a project card. +type ProjectCardNote struct { + From *string `json:"from,omitempty"` +} + +// ProjectColumnChange represents the changes when a project column has been edited. +type ProjectColumnChange struct { + Name *ProjectColumnName `json:"name,omitempty"` +} + +// ProjectColumnName represents a project column name change. +type ProjectColumnName struct { + From *string `json:"from,omitempty"` +} + +// TeamChange represents the changes when a team has been edited. +type TeamChange struct { + Description *TeamDescription `json:"description,omitempty"` + Name *TeamName `json:"name,omitempty"` + Privacy *TeamPrivacy `json:"privacy,omitempty"` + Repository *TeamRepository `json:"repository,omitempty"` +} + +// TeamDescription represents a team description change. +type TeamDescription struct { + From *string `json:"from,omitempty"` +} + +// TeamName represents a team name change. +type TeamName struct { + From *string `json:"from,omitempty"` +} + +// TeamPrivacy represents a team privacy change. +type TeamPrivacy struct { + From *string `json:"from,omitempty"` +} + +// TeamRepository represents a team repository permission change. +type TeamRepository struct { + Permissions *TeamPermissions `json:"permissions,omitempty"` +} + +// TeamPermissions represents a team permission change. +type TeamPermissions struct { + From *TeamPermissionsFrom `json:"from,omitempty"` +} + +// TeamPermissionsFrom represents a team permission change. +type TeamPermissionsFrom struct { + Admin *bool `json:"admin,omitempty"` + Pull *bool `json:"pull,omitempty"` + Push *bool `json:"push,omitempty"` +} + +// InstallationEvent is triggered when a GitHub App has been installed, uninstalled, suspend, unsuspended +// or new permissions have been accepted. +// The Webhook event name is "installation". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#installation +type InstallationEvent struct { + // The action that was performed. Can be either "created", "deleted", "suspend", "unsuspend" or "new_permissions_accepted". + Action *string `json:"action,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Requester *User `json:"requester,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// InstallationRepositoriesEvent is triggered when a repository is added or +// removed from an installation. The Webhook event name is "installation_repositories". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#installation_repositories +type InstallationRepositoriesEvent struct { + // The action that was performed. Can be either "added" or "removed". + Action *string `json:"action,omitempty"` + RepositoriesAdded []*Repository `json:"repositories_added,omitempty"` + RepositoriesRemoved []*Repository `json:"repositories_removed,omitempty"` + RepositorySelection *string `json:"repository_selection,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// InstallationLoginChange represents a change in login on an installation. +type InstallationLoginChange struct { + From *string `json:"from,omitempty"` +} + +// InstallationSlugChange represents a change in slug on an installation. +type InstallationSlugChange struct { + From *string `json:"from,omitempty"` +} + +// InstallationChanges represents a change in slug or login on an installation. +type InstallationChanges struct { + Login *InstallationLoginChange `json:"login,omitempty"` + Slug *InstallationSlugChange `json:"slug,omitempty"` +} + +// InstallationTargetEvent is triggered when there is activity on an installation from a user or organization account. +// The Webhook event name is "installation_target". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_target +type InstallationTargetEvent struct { + Account *User `json:"account,omitempty"` + Action *string `json:"action,omitempty"` + Changes *InstallationChanges `json:"changes,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + TargetType *string `json:"target_type,omitempty"` +} + +// IssueCommentEvent is triggered when an issue comment is created on an issue +// or pull request. +// The Webhook event name is "issue_comment". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#issue_comment +type IssueCommentEvent struct { + // Action is the action that was performed on the comment. + // Possible values are: "created", "edited", "deleted". + Action *string `json:"action,omitempty"` + Issue *Issue `json:"issue,omitempty"` + Comment *IssueComment `json:"comment,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` +} + +// IssuesEvent is triggered when an issue is opened, edited, deleted, transferred, +// pinned, unpinned, closed, reopened, assigned, unassigned, labeled, unlabeled, +// locked, unlocked, milestoned, or demilestoned. +// The Webhook event name is "issues". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#issues +type IssuesEvent struct { + // Action is the action that was performed. Possible values are: "opened", + // "edited", "deleted", "transferred", "pinned", "unpinned", "closed", "reopened", + // "assigned", "unassigned", "labeled", "unlabeled", "locked", "unlocked", + // "milestoned", or "demilestoned". + Action *string `json:"action,omitempty"` + Issue *Issue `json:"issue,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Label *Label `json:"label,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// LabelEvent is triggered when a repository's label is created, edited, or deleted. +// The Webhook event name is "label" +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#label +type LabelEvent struct { + // Action is the action that was performed. Possible values are: + // "created", "edited", "deleted" + Action *string `json:"action,omitempty"` + Label *Label `json:"label,omitempty"` + Changes *EditChange `json:"changes,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// MarketplacePurchaseEvent is triggered when a user purchases, cancels, or changes +// their GitHub Marketplace plan. +// Webhook event name "marketplace_purchase". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#marketplace_purchase +type MarketplacePurchaseEvent struct { + // Action is the action that was performed. Possible values are: + // "purchased", "cancelled", "pending_change", "pending_change_cancelled", "changed". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + EffectiveDate *Timestamp `json:"effective_date,omitempty"` + MarketplacePurchase *MarketplacePurchase `json:"marketplace_purchase,omitempty"` + PreviousMarketplacePurchase *MarketplacePurchase `json:"previous_marketplace_purchase,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// MemberEvent is triggered when a user is added as a collaborator to a repository. +// The Webhook event name is "member". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#member +type MemberEvent struct { + // Action is the action that was performed. Possible value is: "added". + Action *string `json:"action,omitempty"` + Member *User `json:"member,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// MembershipEvent is triggered when a user is added or removed from a team. +// The Webhook event name is "membership". +// +// Events of this type are not visible in timelines, they are only used to +// trigger organization webhooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#membership +type MembershipEvent struct { + // Action is the action that was performed. Possible values are: "added", "removed". + Action *string `json:"action,omitempty"` + // Scope is the scope of the membership. Possible value is: "team". + Scope *string `json:"scope,omitempty"` + Member *User `json:"member,omitempty"` + Team *Team `json:"team,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// MergeGroup represents the merge group in a merge queue. +type MergeGroup struct { + // The SHA of the merge group. + HeadSHA *string `json:"head_sha,omitempty"` + // The full ref of the merge group. + HeadRef *string `json:"head_ref,omitempty"` + // The SHA of the merge group's parent commit. + BaseSHA *string `json:"base_sha,omitempty"` + // The full ref of the branch the merge group will be merged into. + BaseRef *string `json:"base_ref,omitempty"` + // An expanded representation of the head_sha commit. + HeadCommit *Commit `json:"head_commit,omitempty"` +} + +// MergeGroupEvent represents activity related to merge groups in a merge queue. The type of activity is specified +// in the action property of the payload object. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#merge_group +type MergeGroupEvent struct { + // The action that was performed. Currently, can only be checks_requested. + Action *string `json:"action,omitempty"` + // The merge group. + MergeGroup *MergeGroup `json:"merge_group,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// MetaEvent is triggered when the webhook that this event is configured on is deleted. +// This event will only listen for changes to the particular hook the event is installed on. +// Therefore, it must be selected for each hook that you'd like to receive meta events for. +// The Webhook event name is "meta". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#meta +type MetaEvent struct { + // Action is the action that was performed. Possible value is: "deleted". + Action *string `json:"action,omitempty"` + // The ID of the modified webhook. + HookID *int64 `json:"hook_id,omitempty"` + // The modified webhook. + // This will contain different keys based on the type of webhook it is: repository, + // organization, business, app, or GitHub Marketplace. + Hook *Hook `json:"hook,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// MilestoneEvent is triggered when a milestone is created, closed, opened, edited, or deleted. +// The Webhook event name is "milestone". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#milestone +type MilestoneEvent struct { + // Action is the action that was performed. Possible values are: + // "created", "closed", "opened", "edited", "deleted" + Action *string `json:"action,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Org *Organization `json:"organization,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// OrganizationEvent is triggered when an organization is deleted and renamed, and when a user is added, +// removed, or invited to an organization. +// Events of this type are not visible in timelines. These events are only used to trigger organization hooks. +// Webhook event name is "organization". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#organization +type OrganizationEvent struct { + // Action is the action that was performed. + // Possible values are: "deleted", "renamed", "member_added", "member_removed", or "member_invited". + Action *string `json:"action,omitempty"` + + // Invitation is the invitation for the user or email if the action is "member_invited". + Invitation *Invitation `json:"invitation,omitempty"` + + // Membership is the membership between the user and the organization. + // Not present when the action is "member_invited". + Membership *Membership `json:"membership,omitempty"` + + Organization *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// OrgBlockEvent is triggered when an organization blocks or unblocks a user. +// The Webhook event name is "org_block". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#org_block +type OrgBlockEvent struct { + // Action is the action that was performed. + // Can be "blocked" or "unblocked". + Action *string `json:"action,omitempty"` + BlockedUser *User `json:"blocked_user,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` +} + +// PackageEvent represents activity related to GitHub Packages. +// The Webhook event name is "package". +// +// This event is triggered when a GitHub Package is published or updated. +// +// GitHub API docs: https://developer.github.com/webhooks/event-payloads/#package +type PackageEvent struct { + // Action is the action that was performed. + // Can be "published" or "updated". + Action *string `json:"action,omitempty"` + Package *Package `json:"package,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` +} + +// PageBuildEvent represents an attempted build of a GitHub Pages site, whether +// successful or not. +// The Webhook event name is "page_build". +// +// This event is triggered on push to a GitHub Pages enabled branch (gh-pages +// for project pages, master for user and organization pages). +// +// Events of this type are not visible in timelines, they are only used to trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#page_build +type PageBuildEvent struct { + Build *PagesBuild `json:"build,omitempty"` + + // The following fields are only populated by Webhook events. + ID *int64 `json:"id,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// PersonalAccessTokenRequestEvent occurs when there is activity relating to a +// request for a fine-grained personal access token to access resources that +// belong to a resource owner that requires approval for token access. +// The webhook event name is "personal_access_token_request". +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#personal_access_token_request +type PersonalAccessTokenRequestEvent struct { + // Action is the action that was performed. Possible values are: + // "approved", "cancelled", "created" or "denied" + Action *string `json:"action,omitempty"` + PersonalAccessTokenRequest *PersonalAccessTokenRequest `json:"personal_access_token_request,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// PersonalAccessTokenRequest contains the details of a PersonalAccessTokenRequestEvent. +type PersonalAccessTokenRequest struct { + // Unique identifier of the request for access via fine-grained personal + // access token. Used as the pat_request_id parameter in the list and review + // API calls. + ID *int64 `json:"id,omitempty"` + Owner *User `json:"owner,omitempty"` + + // New requested permissions, categorized by type of permission. + PermissionsAdded *PersonalAccessTokenPermissions `json:"permissions_added,omitempty"` + + // Requested permissions that elevate access for a previously approved + // request for access, categorized by type of permission. + PermissionsUpgraded *PersonalAccessTokenPermissions `json:"permissions_upgraded,omitempty"` + + // Permissions requested, categorized by type of permission. + // This field incorporates permissions_added and permissions_upgraded. + PermissionsResult *PersonalAccessTokenPermissions `json:"permissions_result,omitempty"` + + // Type of repository selection requested. Possible values are: + // "none", "all" or "subset" + RepositorySelection *string `json:"repository_selection,omitempty"` + + // The number of repositories the token is requesting access to. + // This field is only populated when repository_selection is subset. + RepositoryCount *int64 `json:"repository_count,omitempty"` + + // An array of repository objects the token is requesting access to. + // This field is only populated when repository_selection is subset. + Repositories []*Repository `json:"repositories,omitempty"` + + // Date and time when the request for access was created. + CreatedAt *Timestamp `json:"created_at,omitempty"` + + // Whether the associated fine-grained personal access token has expired. + TokenExpired *bool `json:"token_expired,omitempty"` + + // Date and time when the associated fine-grained personal access token expires. + TokenExpiresAt *Timestamp `json:"token_expires_at,omitempty"` + + // Date and time when the associated fine-grained personal access token was last used for authentication. + TokenLastUsedAt *Timestamp `json:"token_last_used_at,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// PersonalAccessTokenPermissions represents the original or newly requested +// scope of permissions for a fine-grained personal access token within a PersonalAccessTokenRequest. +type PersonalAccessTokenPermissions struct { + Org map[string]string `json:"organization,omitempty"` + Repo map[string]string `json:"repository,omitempty"` + Other map[string]string `json:"other,omitempty"` +} + +// PingEvent is triggered when a Webhook is added to GitHub. +// +// GitHub API docs: https://developer.github.com/webhooks/#ping-event +type PingEvent struct { + // Random string of GitHub zen. + Zen *string `json:"zen,omitempty"` + // The ID of the webhook that triggered the ping. + HookID *int64 `json:"hook_id,omitempty"` + // The webhook configuration. + Hook *Hook `json:"hook,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// ProjectEvent is triggered when project is created, modified or deleted. +// The webhook event name is "project". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#project +type ProjectEvent struct { + Action *string `json:"action,omitempty"` + Changes *ProjectChange `json:"changes,omitempty"` + Project *Project `json:"project,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// ProjectCardEvent is triggered when a project card is created, updated, moved, converted to an issue, or deleted. +// The webhook event name is "project_card". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#project_card +type ProjectCardEvent struct { + Action *string `json:"action,omitempty"` + Changes *ProjectCardChange `json:"changes,omitempty"` + AfterID *int64 `json:"after_id,omitempty"` + ProjectCard *ProjectCard `json:"project_card,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// ProjectColumnEvent is triggered when a project column is created, updated, moved, or deleted. +// The webhook event name is "project_column". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#project_column +type ProjectColumnEvent struct { + Action *string `json:"action,omitempty"` + Changes *ProjectColumnChange `json:"changes,omitempty"` + AfterID *int64 `json:"after_id,omitempty"` + ProjectColumn *ProjectColumn `json:"project_column,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// ProjectV2Event is triggered when there is activity relating to an organization-level project. +// The Webhook event name is "projects_v2". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2 +type ProjectV2Event struct { + Action *string `json:"action,omitempty"` + ProjectsV2 *ProjectsV2 `json:"projects_v2,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// ProjectsV2 represents a projects v2 project. +type ProjectsV2 struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Creator *User `json:"creator,omitempty"` + Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + Public *bool `json:"public,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + DeletedAt *Timestamp `json:"deleted_at,omitempty"` + Number *int `json:"number,omitempty"` + ShortDescription *string `json:"short_description,omitempty"` + DeletedBy *User `json:"deleted_by,omitempty"` +} + +// ProjectV2ItemEvent is triggered when there is activity relating to an item on an organization-level project. +// The Webhook event name is "projects_v2_item". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item +type ProjectV2ItemEvent struct { + Action *string `json:"action,omitempty"` + Changes *ProjectV2ItemChange `json:"changes,omitempty"` + ProjectV2Item *ProjectV2Item `json:"projects_v2_item,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// ProjectV2ItemChange represents a project v2 item change. +type ProjectV2ItemChange struct { + ArchivedAt *ArchivedAt `json:"archived_at,omitempty"` +} + +// ArchivedAt represents an archiving date change. +type ArchivedAt struct { + From *Timestamp `json:"from,omitempty"` + To *Timestamp `json:"to,omitempty"` +} + +// ProjectsV2 represents an item belonging to a project. +type ProjectV2Item struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + ProjectNodeID *string `json:"project_node_id,omitempty"` + ContentNodeID *string `json:"content_node_id,omitempty"` + ContentType *string `json:"content_type,omitempty"` + Creator *User `json:"creator,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ArchivedAt *Timestamp `json:"archived_at,omitempty"` +} + +// PublicEvent is triggered when a private repository is open sourced. +// According to GitHub: "Without a doubt: the best GitHub event." +// The Webhook event name is "public". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#public +type PublicEvent struct { + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// PullRequestEvent is triggered when a pull request is assigned, unassigned, labeled, +// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, +// locked, unlocked, a pull request review is requested, or a review request is removed. +// The Webhook event name is "pull_request". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/github-event-types#pullrequestevent +type PullRequestEvent struct { + // Action is the action that was performed. Possible values are: + // "assigned", "unassigned", "review_requested", "review_request_removed", "labeled", "unlabeled", + // "opened", "edited", "closed", "ready_for_review", "locked", "unlocked", or "reopened". + // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. + // If the action is "closed" and the "merged" key is "true", the pull request was merged. + // While webhooks are also triggered when a pull request is synchronized, Events API timelines + // don't include pull request events with the "synchronize" action. + Action *string `json:"action,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Number *int `json:"number,omitempty"` + PullRequest *PullRequest `json:"pull_request,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. + // A request affecting multiple reviewers at once is split into multiple + // such event deliveries, each with a single, different RequestedReviewer. + RequestedReviewer *User `json:"requested_reviewer,omitempty"` + // In the event that a team is requested instead of a user, "requested_team" gets sent in place of + // "requested_user" with the same delivery behavior. + RequestedTeam *Team `json:"requested_team,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` + + // The following fields are only populated when the Action is "synchronize". + Before *string `json:"before,omitempty"` + After *string `json:"after,omitempty"` + + // The following will be populated if the event was performed by an App + PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` +} + +// PullRequestReviewEvent is triggered when a review is submitted on a pull +// request. +// The Webhook event name is "pull_request_review". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review +type PullRequestReviewEvent struct { + // Action is always "submitted". + Action *string `json:"action,omitempty"` + Review *PullRequestReview `json:"review,omitempty"` + PullRequest *PullRequest `json:"pull_request,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` +} + +// PullRequestReviewCommentEvent is triggered when a comment is created on a +// portion of the unified diff of a pull request. +// The Webhook event name is "pull_request_review_comment". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_comment +type PullRequestReviewCommentEvent struct { + // Action is the action that was performed on the comment. + // Possible values are: "created", "edited", "deleted". + Action *string `json:"action,omitempty"` + PullRequest *PullRequest `json:"pull_request,omitempty"` + Comment *PullRequestComment `json:"comment,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// PullRequestReviewThreadEvent is triggered when a comment made as part of a +// review of a pull request is marked resolved or unresolved. +// The Webhook event name is "pull_request_review_thread". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#pull_request_review_thread +type PullRequestReviewThreadEvent struct { + // Action is the action that was performed on the comment. + // Possible values are: "resolved", "unresolved". + Action *string `json:"action,omitempty"` + Thread *PullRequestThread `json:"thread,omitempty"` + PullRequest *PullRequest `json:"pull_request,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// PullRequestTargetEvent is triggered when a pull request is assigned, unassigned, labeled, +// unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, +// locked, unlocked, a pull request review is requested, or a review request is removed. +// The Webhook event name is "pull_request_target". +// +// GitHub API docs: https://docs.github.com/en/actions/events-that-trigger-workflows#pull_request_target +type PullRequestTargetEvent struct { + // Action is the action that was performed. Possible values are: + // "assigned", "unassigned", "labeled", "unlabeled", "opened", "edited", "closed", "reopened", + // "ready_for_review", "locked", "unlocked", "review_requested" or "review_request_removed". + // If the action is "closed" and the "merged" key is "false", the pull request was closed with unmerged commits. + // If the action is "closed" and the "merged" key is "true", the pull request was merged. + // While webhooks are also triggered when a pull request is synchronized, Events API timelines + // don't include pull request events with the "synchronize" action. + Action *string `json:"action,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Number *int `json:"number,omitempty"` + PullRequest *PullRequest `json:"pull_request,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + // RequestedReviewer is populated in "review_requested", "review_request_removed" event deliveries. + // A request affecting multiple reviewers at once is split into multiple + // such event deliveries, each with a single, different RequestedReviewer. + RequestedReviewer *User `json:"requested_reviewer,omitempty"` + // In the event that a team is requested instead of a user, "requested_team" gets sent in place of + // "requested_user" with the same delivery behavior. + RequestedTeam *Team `json:"requested_team,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Label *Label `json:"label,omitempty"` // Populated in "labeled" event deliveries. + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` + + // The following fields are only populated when the Action is "synchronize". + Before *string `json:"before,omitempty"` + After *string `json:"after,omitempty"` + + // The following will be populated if the event was performed by an App + PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` +} + +// PushEvent represents a git push to a GitHub repository. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#push +type PushEvent struct { + PushID *int64 `json:"push_id,omitempty"` + Head *string `json:"head,omitempty"` + Ref *string `json:"ref,omitempty"` + Size *int `json:"size,omitempty"` + Commits []*HeadCommit `json:"commits,omitempty"` + Before *string `json:"before,omitempty"` + DistinctSize *int `json:"distinct_size,omitempty"` + + // The following fields are only populated by Webhook events. + Action *string `json:"action,omitempty"` + After *string `json:"after,omitempty"` + Created *bool `json:"created,omitempty"` + Deleted *bool `json:"deleted,omitempty"` + Forced *bool `json:"forced,omitempty"` + BaseRef *string `json:"base_ref,omitempty"` + Compare *string `json:"compare,omitempty"` + Repo *PushEventRepository `json:"repository,omitempty"` + HeadCommit *HeadCommit `json:"head_commit,omitempty"` + Pusher *User `json:"pusher,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Organization *Organization `json:"organization,omitempty"` +} + +func (p PushEvent) String() string { + return Stringify(p) +} + +// HeadCommit represents a git commit in a GitHub PushEvent. +type HeadCommit struct { + Message *string `json:"message,omitempty"` + Author *CommitAuthor `json:"author,omitempty"` + URL *string `json:"url,omitempty"` + Distinct *bool `json:"distinct,omitempty"` + + // The following fields are only populated by Events API. + SHA *string `json:"sha,omitempty"` + + // The following fields are only populated by Webhook events. + ID *string `json:"id,omitempty"` + TreeID *string `json:"tree_id,omitempty"` + Timestamp *Timestamp `json:"timestamp,omitempty"` + Committer *CommitAuthor `json:"committer,omitempty"` + Added []string `json:"added,omitempty"` + Removed []string `json:"removed,omitempty"` + Modified []string `json:"modified,omitempty"` +} + +func (h HeadCommit) String() string { + return Stringify(h) +} + +// PushEventRepository represents the repo object in a PushEvent payload. +type PushEventRepository struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + FullName *string `json:"full_name,omitempty"` + Owner *User `json:"owner,omitempty"` + Private *bool `json:"private,omitempty"` + Description *string `json:"description,omitempty"` + Fork *bool `json:"fork,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PushedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Homepage *string `json:"homepage,omitempty"` + PullsURL *string `json:"pulls_url,omitempty"` + Size *int `json:"size,omitempty"` + StargazersCount *int `json:"stargazers_count,omitempty"` + WatchersCount *int `json:"watchers_count,omitempty"` + Language *string `json:"language,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasDownloads *bool `json:"has_downloads,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasPages *bool `json:"has_pages,omitempty"` + ForksCount *int `json:"forks_count,omitempty"` + Archived *bool `json:"archived,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + OpenIssuesCount *int `json:"open_issues_count,omitempty"` + DefaultBranch *string `json:"default_branch,omitempty"` + MasterBranch *string `json:"master_branch,omitempty"` + Organization *string `json:"organization,omitempty"` + URL *string `json:"url,omitempty"` + ArchiveURL *string `json:"archive_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + SSHURL *string `json:"ssh_url,omitempty"` + CloneURL *string `json:"clone_url,omitempty"` + SVNURL *string `json:"svn_url,omitempty"` + Topics []string `json:"topics,omitempty"` +} + +// PushEventRepoOwner is a basic representation of user/org in a PushEvent payload. +type PushEventRepoOwner struct { + Name *string `json:"name,omitempty"` + Email *string `json:"email,omitempty"` +} + +// ReleaseEvent is triggered when a release is published, unpublished, created, +// edited, deleted, or prereleased. +// The Webhook event name is "release". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#release +type ReleaseEvent struct { + // Action is the action that was performed. Possible values are: "published", "unpublished", + // "created", "edited", "deleted", or "prereleased". + Action *string `json:"action,omitempty"` + Release *RepositoryRelease `json:"release,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// RepositoryEvent is triggered when a repository is created, archived, unarchived, +// renamed, edited, transferred, made public, or made private. Organization hooks are +// also trigerred when a repository is deleted. +// The Webhook event name is "repository". +// +// Events of this type are not visible in timelines, they are only used to +// trigger organization webhooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#repository +type RepositoryEvent struct { + // Action is the action that was performed. Possible values are: "created", + // "deleted" (organization hooks only), "archived", "unarchived", "edited", "renamed", + // "transferred", "publicized", or "privatized". + Action *string `json:"action,omitempty"` + Repo *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Changes *EditChange `json:"changes,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// RepositoryDispatchEvent is triggered when a client sends a POST request to the repository dispatch event endpoint. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#repository_dispatch +type RepositoryDispatchEvent struct { + // Action is the event_type that submitted with the repository dispatch payload. Value can be any string. + Action *string `json:"action,omitempty"` + Branch *string `json:"branch,omitempty"` + ClientPayload json.RawMessage `json:"client_payload,omitempty"` + Repo *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// RepositoryImportEvent represents the activity related to a repository being imported to GitHub. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_import +type RepositoryImportEvent struct { + // Status represents the final state of the import. This can be one of "success", "cancelled", or "failure". + Status *string `json:"status,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// RepositoryVulnerabilityAlertEvent is triggered when a security alert is created, dismissed, or resolved. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#repository_vulnerability_alert +type RepositoryVulnerabilityAlertEvent struct { + // Action is the action that was performed. Possible values are: "create", "dismiss", "resolve". + Action *string `json:"action,omitempty"` + + // The security alert of the vulnerable dependency. + Alert *RepositoryVulnerabilityAlert `json:"alert,omitempty"` + + // The repository of the vulnerable dependency. + Repository *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` + + // The user that triggered the event. + Sender *User `json:"sender,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// RepositoryVulnerabilityAlert represents a repository security alert. +type RepositoryVulnerabilityAlert struct { + ID *int64 `json:"id,omitempty"` + AffectedRange *string `json:"affected_range,omitempty"` + AffectedPackageName *string `json:"affected_package_name,omitempty"` + ExternalReference *string `json:"external_reference,omitempty"` + ExternalIdentifier *string `json:"external_identifier,omitempty"` + GitHubSecurityAdvisoryID *string `json:"ghsa_id,omitempty"` + Severity *string `json:"severity,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + FixedIn *string `json:"fixed_in,omitempty"` + Dismisser *User `json:"dismisser,omitempty"` + DismissReason *string `json:"dismiss_reason,omitempty"` + DismissedAt *Timestamp `json:"dismissed_at,omitempty"` +} + +// SecretScanningAlertEvent is triggered when a secret scanning alert occurs in a repository. +// The Webhook name is secret_scanning_alert. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#secret_scanning_alert +type SecretScanningAlertEvent struct { + // Action is the action that was performed. Possible values are: "created", "resolved", or "reopened". + Action *string `json:"action,omitempty"` + + // Alert is the secret scanning alert involved in the event. + Alert *SecretScanningAlert `json:"alert,omitempty"` + + // Only populated by the "resolved" and "reopen" actions + Sender *User `json:"sender,omitempty"` + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// SecurityAndAnalysisEvent is triggered when code security and analysis features +// are enabled or disabled for a repository. +// +// GitHub API docs: https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#security_and_analysis +type SecurityAndAnalysisEvent struct { + Changes *SecurityAndAnalysisChange `json:"changes,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// SecurityAndAnalysisChange represents the changes when security and analysis +// features are enabled or disabled for a repository. +type SecurityAndAnalysisChange struct { + From *SecurityAndAnalysisChangeFrom `json:"from,omitempty"` +} + +// SecurityAndAnalysisChangeFrom represents which change was made when security +// and analysis features are enabled or disabled for a repository. +type SecurityAndAnalysisChangeFrom struct { + SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` +} + +// StarEvent is triggered when a star is added or removed from a repository. +// The Webhook event name is "star". +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#star +type StarEvent struct { + // Action is the action that was performed. Possible values are: "created" or "deleted". + Action *string `json:"action,omitempty"` + + // StarredAt is the time the star was created. It will be null for the "deleted" action. + StarredAt *Timestamp `json:"starred_at,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// StatusEvent is triggered when the status of a Git commit changes. +// The Webhook event name is "status". +// +// Events of this type are not visible in timelines, they are only used to +// trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#status +type StatusEvent struct { + SHA *string `json:"sha,omitempty"` + // State is the new state. Possible values are: "pending", "success", "failure", "error". + State *string `json:"state,omitempty"` + Description *string `json:"description,omitempty"` + TargetURL *string `json:"target_url,omitempty"` + Branches []*Branch `json:"branches,omitempty"` + + // The following fields are only populated by Webhook events. + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Context *string `json:"context,omitempty"` + Commit *RepositoryCommit `json:"commit,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// TeamEvent is triggered when an organization's team is created, modified or deleted. +// The Webhook event name is "team". +// +// Events of this type are not visible in timelines. These events are only used +// to trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#team +type TeamEvent struct { + Action *string `json:"action,omitempty"` + Team *Team `json:"team,omitempty"` + Changes *TeamChange `json:"changes,omitempty"` + Repo *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// TeamAddEvent is triggered when a repository is added to a team. +// The Webhook event name is "team_add". +// +// Events of this type are not visible in timelines. These events are only used +// to trigger hooks. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#team_add +type TeamAddEvent struct { + Team *Team `json:"team,omitempty"` + Repo *Repository `json:"repository,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// UserEvent is triggered when a user is created or deleted. +// The Webhook event name is "user". +// +// Only global webhooks can subscribe to this event type. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/activity/events/types/#userevent-enterprise +type UserEvent struct { + User *User `json:"user,omitempty"` + // The action performed. Possible values are: "created" or "deleted". + Action *string `json:"action,omitempty"` + Enterprise *Enterprise `json:"enterprise,omitempty"` + Sender *User `json:"sender,omitempty"` + + // The following fields are only populated by Webhook events. + Installation *Installation `json:"installation,omitempty"` +} + +// WatchEvent is related to starring a repository, not watching. See this API +// blog post for an explanation: https://developer.github.com/changes/2012-09-05-watcher-api/ +// +// The event’s actor is the user who starred a repository, and the event’s +// repository is the repository that was starred. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#watch +type WatchEvent struct { + // Action is the action that was performed. Possible value is: "started". + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` + + // The following field is only present when the webhook is triggered on + // a repository belonging to an organization. + Org *Organization `json:"organization,omitempty"` +} + +// WorkflowDispatchEvent is triggered when someone triggers a workflow run on GitHub or +// sends a POST request to the create a workflow dispatch event endpoint. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch +type WorkflowDispatchEvent struct { + Inputs json.RawMessage `json:"inputs,omitempty"` + Ref *string `json:"ref,omitempty"` + Workflow *string `json:"workflow,omitempty"` + + // The following fields are only populated by Webhook events. + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// WorkflowJobEvent is triggered when a job is queued, started or completed. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job +type WorkflowJobEvent struct { + WorkflowJob *WorkflowJob `json:"workflow_job,omitempty"` + + Action *string `json:"action,omitempty"` + + // The following fields are only populated by Webhook events. + + // Org is not nil when the webhook is configured for an organization or the event + // occurs from activity in a repository owned by an organization. + Org *Organization `json:"organization,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// WorkflowRunEvent is triggered when a GitHub Actions workflow run is requested or completed. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#workflow_run +type WorkflowRunEvent struct { + Action *string `json:"action,omitempty"` + Workflow *Workflow `json:"workflow,omitempty"` + WorkflowRun *WorkflowRun `json:"workflow_run,omitempty"` + + // The following fields are only populated by Webhook events. + Org *Organization `json:"organization,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` + Installation *Installation `json:"installation,omitempty"` +} + +// SecurityAdvisory represents the advisory object in SecurityAdvisoryEvent payload. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory +type SecurityAdvisory struct { + CVSS *AdvisoryCVSS `json:"cvss,omitempty"` + CWEs []*AdvisoryCWEs `json:"cwes,omitempty"` + GHSAID *string `json:"ghsa_id,omitempty"` + Summary *string `json:"summary,omitempty"` + Description *string `json:"description,omitempty"` + Severity *string `json:"severity,omitempty"` + Identifiers []*AdvisoryIdentifier `json:"identifiers,omitempty"` + References []*AdvisoryReference `json:"references,omitempty"` + PublishedAt *Timestamp `json:"published_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + WithdrawnAt *Timestamp `json:"withdrawn_at,omitempty"` + Vulnerabilities []*AdvisoryVulnerability `json:"vulnerabilities,omitempty"` + CVEID *string `json:"cve_id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Author *User `json:"author,omitempty"` + Publisher *User `json:"publisher,omitempty"` + State *string `json:"state,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + Submission *SecurityAdvisorySubmission `json:"submission,omitempty"` + CWEIDs []string `json:"cwe_ids,omitempty"` + Credits []*RepoAdvisoryCredit `json:"credits,omitempty"` + CreditsDetailed []*RepoAdvisoryCreditDetailed `json:"credits_detailed,omitempty"` + CollaboratingUsers []*User `json:"collaborating_users,omitempty"` + CollaboratingTeams []*Team `json:"collaborating_teams,omitempty"` + PrivateFork *Repository `json:"private_fork,omitempty"` +} + +// AdvisoryIdentifier represents the identifier for a Security Advisory. +type AdvisoryIdentifier struct { + Value *string `json:"value,omitempty"` + Type *string `json:"type,omitempty"` +} + +// AdvisoryReference represents the reference url for the security advisory. +type AdvisoryReference struct { + URL *string `json:"url,omitempty"` +} + +// AdvisoryVulnerability represents the vulnerability object for a Security Advisory. +type AdvisoryVulnerability struct { + Package *VulnerabilityPackage `json:"package,omitempty"` + Severity *string `json:"severity,omitempty"` + VulnerableVersionRange *string `json:"vulnerable_version_range,omitempty"` + FirstPatchedVersion *FirstPatchedVersion `json:"first_patched_version,omitempty"` + + // PatchedVersions and VulnerableFunctions are used in the following APIs: + // - https://docs.github.com/en/rest/security-advisories/repository-advisories?apiVersion=2022-11-28#list-repository-security-advisories-for-an-organization + // - https://docs.github.com/en/rest/security-advisories/repository-advisories?apiVersion=2022-11-28#list-repository-security-advisories + PatchedVersions *string `json:"patched_versions,omitempty"` + VulnerableFunctions []string `json:"vulnerable_functions,omitempty"` +} + +// VulnerabilityPackage represents the package object for an Advisory Vulnerability. +type VulnerabilityPackage struct { + Ecosystem *string `json:"ecosystem,omitempty"` + Name *string `json:"name,omitempty"` +} + +// FirstPatchedVersion represents the identifier for the first patched version of that vulnerability. +type FirstPatchedVersion struct { + Identifier *string `json:"identifier,omitempty"` +} + +// SecurityAdvisoryEvent is triggered when a security-related vulnerability is found in software on GitHub. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory +type SecurityAdvisoryEvent struct { + Action *string `json:"action,omitempty"` + SecurityAdvisory *SecurityAdvisory `json:"security_advisory,omitempty"` + + // The following fields are only populated by Webhook events. + Enterprise *Enterprise `json:"enterprise,omitempty"` + Installation *Installation `json:"installation,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Sender *User `json:"sender,omitempty"` +} + +// CodeScanningAlertEvent is triggered when a code scanning finds a potential vulnerability or error in your code. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#code_scanning_alert +type CodeScanningAlertEvent struct { + Action *string `json:"action,omitempty"` + Alert *Alert `json:"alert,omitempty"` + Ref *string `json:"ref,omitempty"` + // CommitOID is the commit SHA of the code scanning alert + CommitOID *string `json:"commit_oid,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Org *Organization `json:"organization,omitempty"` + Sender *User `json:"sender,omitempty"` + + Installation *Installation `json:"installation,omitempty"` +} diff --git a/vendor/github.com/google/go-github/v56/github/gists.go b/vendor/github.com/google/go-github/v56/github/gists.go new file mode 100644 index 000000000..80961fcb9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/gists.go @@ -0,0 +1,368 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "time" +) + +// GistsService handles communication with the Gist related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/gists +type GistsService service + +// Gist represents a GitHub's gist. +type Gist struct { + ID *string `json:"id,omitempty"` + Description *string `json:"description,omitempty"` + Public *bool `json:"public,omitempty"` + Owner *User `json:"owner,omitempty"` + Files map[GistFilename]GistFile `json:"files,omitempty"` + Comments *int `json:"comments,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + GitPullURL *string `json:"git_pull_url,omitempty"` + GitPushURL *string `json:"git_push_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (g Gist) String() string { + return Stringify(g) +} + +// GistFilename represents filename on a gist. +type GistFilename string + +// GistFile represents a file on a gist. +type GistFile struct { + Size *int `json:"size,omitempty"` + Filename *string `json:"filename,omitempty"` + Language *string `json:"language,omitempty"` + Type *string `json:"type,omitempty"` + RawURL *string `json:"raw_url,omitempty"` + Content *string `json:"content,omitempty"` +} + +func (g GistFile) String() string { + return Stringify(g) +} + +// GistCommit represents a commit on a gist. +type GistCommit struct { + URL *string `json:"url,omitempty"` + Version *string `json:"version,omitempty"` + User *User `json:"user,omitempty"` + ChangeStatus *CommitStats `json:"change_status,omitempty"` + CommittedAt *Timestamp `json:"committed_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (gc GistCommit) String() string { + return Stringify(gc) +} + +// GistFork represents a fork of a gist. +type GistFork struct { + URL *string `json:"url,omitempty"` + User *User `json:"user,omitempty"` + ID *string `json:"id,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (gf GistFork) String() string { + return Stringify(gf) +} + +// GistListOptions specifies the optional parameters to the +// GistsService.List, GistsService.ListAll, and GistsService.ListStarred methods. +type GistListOptions struct { + // Since filters Gists by time. + Since time.Time `url:"since,omitempty"` + + ListOptions +} + +// List gists for a user. Passing the empty string will list +// all public gists if called anonymously. However, if the call +// is authenticated, it will returns all gists for the authenticated +// user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-gists-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-gists-for-a-user +func (s *GistsService) List(ctx context.Context, user string, opts *GistListOptions) ([]*Gist, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/gists", user) + } else { + u = "gists" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var gists []*Gist + resp, err := s.client.Do(ctx, req, &gists) + if err != nil { + return nil, resp, err + } + + return gists, resp, nil +} + +// ListAll lists all public gists. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-public-gists +func (s *GistsService) ListAll(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { + u, err := addOptions("gists/public", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var gists []*Gist + resp, err := s.client.Do(ctx, req, &gists) + if err != nil { + return nil, resp, err + } + + return gists, resp, nil +} + +// ListStarred lists starred gists of authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-starred-gists +func (s *GistsService) ListStarred(ctx context.Context, opts *GistListOptions) ([]*Gist, *Response, error) { + u, err := addOptions("gists/starred", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var gists []*Gist + resp, err := s.client.Do(ctx, req, &gists) + if err != nil { + return nil, resp, err + } + + return gists, resp, nil +} + +// Get a single gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#get-a-gist +func (s *GistsService) Get(ctx context.Context, id string) (*Gist, *Response, error) { + u := fmt.Sprintf("gists/%v", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + gist := new(Gist) + resp, err := s.client.Do(ctx, req, gist) + if err != nil { + return nil, resp, err + } + + return gist, resp, nil +} + +// GetRevision gets a specific revision of a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#get-a-gist-revision +func (s *GistsService) GetRevision(ctx context.Context, id, sha string) (*Gist, *Response, error) { + u := fmt.Sprintf("gists/%v/%v", id, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + gist := new(Gist) + resp, err := s.client.Do(ctx, req, gist) + if err != nil { + return nil, resp, err + } + + return gist, resp, nil +} + +// Create a gist for authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#create-a-gist +func (s *GistsService) Create(ctx context.Context, gist *Gist) (*Gist, *Response, error) { + u := "gists" + req, err := s.client.NewRequest("POST", u, gist) + if err != nil { + return nil, nil, err + } + + g := new(Gist) + resp, err := s.client.Do(ctx, req, g) + if err != nil { + return nil, resp, err + } + + return g, resp, nil +} + +// Edit a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#update-a-gist +func (s *GistsService) Edit(ctx context.Context, id string, gist *Gist) (*Gist, *Response, error) { + u := fmt.Sprintf("gists/%v", id) + req, err := s.client.NewRequest("PATCH", u, gist) + if err != nil { + return nil, nil, err + } + + g := new(Gist) + resp, err := s.client.Do(ctx, req, g) + if err != nil { + return nil, resp, err + } + + return g, resp, nil +} + +// ListCommits lists commits of a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-gist-commits +func (s *GistsService) ListCommits(ctx context.Context, id string, opts *ListOptions) ([]*GistCommit, *Response, error) { + u := fmt.Sprintf("gists/%v/commits", id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var gistCommits []*GistCommit + resp, err := s.client.Do(ctx, req, &gistCommits) + if err != nil { + return nil, resp, err + } + + return gistCommits, resp, nil +} + +// Delete a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#delete-a-gist +func (s *GistsService) Delete(ctx context.Context, id string) (*Response, error) { + u := fmt.Sprintf("gists/%v", id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Star a gist on behalf of authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#star-a-gist +func (s *GistsService) Star(ctx context.Context, id string) (*Response, error) { + u := fmt.Sprintf("gists/%v/star", id) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unstar a gist on a behalf of authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#unstar-a-gist +func (s *GistsService) Unstar(ctx context.Context, id string) (*Response, error) { + u := fmt.Sprintf("gists/%v/star", id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// IsStarred checks if a gist is starred by authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#check-if-a-gist-is-starred +func (s *GistsService) IsStarred(ctx context.Context, id string) (bool, *Response, error) { + u := fmt.Sprintf("gists/%v/star", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + starred, err := parseBoolResponse(err) + return starred, resp, err +} + +// Fork a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#fork-a-gist +func (s *GistsService) Fork(ctx context.Context, id string) (*Gist, *Response, error) { + u := fmt.Sprintf("gists/%v/forks", id) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + g := new(Gist) + resp, err := s.client.Do(ctx, req, g) + if err != nil { + return nil, resp, err + } + + return g, resp, nil +} + +// ListForks lists forks of a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/gists#list-gist-forks +func (s *GistsService) ListForks(ctx context.Context, id string, opts *ListOptions) ([]*GistFork, *Response, error) { + u := fmt.Sprintf("gists/%v/forks", id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var gistForks []*GistFork + resp, err := s.client.Do(ctx, req, &gistForks) + if err != nil { + return nil, resp, err + } + + return gistForks, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/gists_comments.go b/vendor/github.com/google/go-github/v56/github/gists_comments.go new file mode 100644 index 000000000..ee0fbfa45 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/gists_comments.go @@ -0,0 +1,118 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GistComment represents a Gist comment. +type GistComment struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Body *string `json:"body,omitempty"` + User *User `json:"user,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` +} + +func (g GistComment) String() string { + return Stringify(g) +} + +// ListComments lists all comments for a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/comments#list-gist-comments +func (s *GistsService) ListComments(ctx context.Context, gistID string, opts *ListOptions) ([]*GistComment, *Response, error) { + u := fmt.Sprintf("gists/%v/comments", gistID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var comments []*GistComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// GetComment retrieves a single comment from a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/comments#get-a-gist-comment +func (s *GistsService) GetComment(ctx context.Context, gistID string, commentID int64) (*GistComment, *Response, error) { + u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + c := new(GistComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// CreateComment creates a comment for a gist. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/comments#create-a-gist-comment +func (s *GistsService) CreateComment(ctx context.Context, gistID string, comment *GistComment) (*GistComment, *Response, error) { + u := fmt.Sprintf("gists/%v/comments", gistID) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(GistComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// EditComment edits an existing gist comment. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/comments#update-a-gist-comment +func (s *GistsService) EditComment(ctx context.Context, gistID string, commentID int64, comment *GistComment) (*GistComment, *Response, error) { + u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(GistComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// DeleteComment deletes a gist comment. +// +// GitHub API docs: https://docs.github.com/en/rest/gists/comments#delete-a-gist-comment +func (s *GistsService) DeleteComment(ctx context.Context, gistID string, commentID int64) (*Response, error) { + u := fmt.Sprintf("gists/%v/comments/%v", gistID, commentID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/git.go b/vendor/github.com/google/go-github/v56/github/git.go new file mode 100644 index 000000000..8960de7b1 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git.go @@ -0,0 +1,12 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// GitService handles communication with the git data related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/git/ +type GitService service diff --git a/vendor/github.com/google/go-github/v56/github/git_blobs.go b/vendor/github.com/google/go-github/v56/github/git_blobs.go new file mode 100644 index 000000000..da0485ccb --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git_blobs.go @@ -0,0 +1,82 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "fmt" +) + +// Blob represents a blob object. +type Blob struct { + Content *string `json:"content,omitempty"` + Encoding *string `json:"encoding,omitempty"` + SHA *string `json:"sha,omitempty"` + Size *int `json:"size,omitempty"` + URL *string `json:"url,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +// GetBlob fetches a blob from a repo given a SHA. +// +// GitHub API docs: https://docs.github.com/en/rest/git/blobs#get-a-blob +func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha string) (*Blob, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + blob := new(Blob) + resp, err := s.client.Do(ctx, req, blob) + if err != nil { + return nil, resp, err + } + + return blob, resp, nil +} + +// GetBlobRaw fetches a blob's contents from a repo. +// Unlike GetBlob, it returns the raw bytes rather than the base64-encoded data. +// +// GitHub API docs: https://docs.github.com/en/rest/git/blobs#get-a-blob +func (s *GitService) GetBlobRaw(ctx context.Context, owner, repo, sha string) ([]byte, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/blobs/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", "application/vnd.github.v3.raw") + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return nil, resp, err + } + + return buf.Bytes(), resp, nil +} + +// CreateBlob creates a blob object. +// +// GitHub API docs: https://docs.github.com/en/rest/git/blobs#create-a-blob +func (s *GitService) CreateBlob(ctx context.Context, owner string, repo string, blob *Blob) (*Blob, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/blobs", owner, repo) + req, err := s.client.NewRequest("POST", u, blob) + if err != nil { + return nil, nil, err + } + + t := new(Blob) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/git_commits.go b/vendor/github.com/google/go-github/v56/github/git_commits.go new file mode 100644 index 000000000..1a683bc34 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git_commits.go @@ -0,0 +1,221 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "strings" +) + +// SignatureVerification represents GPG signature verification. +type SignatureVerification struct { + Verified *bool `json:"verified,omitempty"` + Reason *string `json:"reason,omitempty"` + Signature *string `json:"signature,omitempty"` + Payload *string `json:"payload,omitempty"` +} + +// MessageSigner is used by GitService.CreateCommit to sign a commit. +// +// To create a MessageSigner that signs a commit with a [golang.org/x/crypto/openpgp.Entity], +// or [github.com/ProtonMail/go-crypto/openpgp.Entity], use: +// +// commit.Signer = github.MessageSignerFunc(func(w io.Writer, r io.Reader) error { +// return openpgp.ArmoredDetachSign(w, openpgpEntity, r, nil) +// }) +type MessageSigner interface { + Sign(w io.Writer, r io.Reader) error +} + +// MessageSignerFunc is a single function implementation of MessageSigner. +type MessageSignerFunc func(w io.Writer, r io.Reader) error + +func (f MessageSignerFunc) Sign(w io.Writer, r io.Reader) error { + return f(w, r) +} + +// Commit represents a GitHub commit. +type Commit struct { + SHA *string `json:"sha,omitempty"` + Author *CommitAuthor `json:"author,omitempty"` + Committer *CommitAuthor `json:"committer,omitempty"` + Message *string `json:"message,omitempty"` + Tree *Tree `json:"tree,omitempty"` + Parents []*Commit `json:"parents,omitempty"` + Stats *CommitStats `json:"stats,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + URL *string `json:"url,omitempty"` + Verification *SignatureVerification `json:"verification,omitempty"` + NodeID *string `json:"node_id,omitempty"` + + // CommentCount is the number of GitHub comments on the commit. This + // is only populated for requests that fetch GitHub data like + // Pulls.ListCommits, Repositories.ListCommits, etc. + CommentCount *int `json:"comment_count,omitempty"` +} + +func (c Commit) String() string { + return Stringify(c) +} + +// CommitAuthor represents the author or committer of a commit. The commit +// author may not correspond to a GitHub User. +type CommitAuthor struct { + Date *Timestamp `json:"date,omitempty"` + Name *string `json:"name,omitempty"` + Email *string `json:"email,omitempty"` + + // The following fields are only populated by Webhook events. + Login *string `json:"username,omitempty"` // Renamed for go-github consistency. +} + +func (c CommitAuthor) String() string { + return Stringify(c) +} + +// GetCommit fetches the Commit object for a given SHA. +// +// GitHub API docs: https://docs.github.com/en/rest/git/commits#get-a-commit +func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, sha string) (*Commit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/commits/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + c := new(Commit) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// createCommit represents the body of a CreateCommit request. +type createCommit struct { + Author *CommitAuthor `json:"author,omitempty"` + Committer *CommitAuthor `json:"committer,omitempty"` + Message *string `json:"message,omitempty"` + Tree *string `json:"tree,omitempty"` + Parents []string `json:"parents,omitempty"` + Signature *string `json:"signature,omitempty"` +} + +type CreateCommitOptions struct { + // CreateCommit will sign the commit with this signer. See MessageSigner doc for more details. + // Ignored on commits where Verification.Signature is defined. + Signer MessageSigner +} + +// CreateCommit creates a new commit in a repository. +// commit must not be nil. +// +// The commit.Committer is optional and will be filled with the commit.Author +// data if omitted. If the commit.Author is omitted, it will be filled in with +// the authenticated user’s information and the current date. +// +// GitHub API docs: https://docs.github.com/en/rest/git/commits#create-a-commit +func (s *GitService) CreateCommit(ctx context.Context, owner string, repo string, commit *Commit, opts *CreateCommitOptions) (*Commit, *Response, error) { + if commit == nil { + return nil, nil, fmt.Errorf("commit must be provided") + } + if opts == nil { + opts = &CreateCommitOptions{} + } + + u := fmt.Sprintf("repos/%v/%v/git/commits", owner, repo) + + parents := make([]string, len(commit.Parents)) + for i, parent := range commit.Parents { + parents[i] = *parent.SHA + } + + body := &createCommit{ + Author: commit.Author, + Committer: commit.Committer, + Message: commit.Message, + Parents: parents, + } + if commit.Tree != nil { + body.Tree = commit.Tree.SHA + } + switch { + case commit.Verification != nil: + body.Signature = commit.Verification.Signature + case opts.Signer != nil: + signature, err := createSignature(opts.Signer, body) + if err != nil { + return nil, nil, err + } + body.Signature = &signature + } + + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + c := new(Commit) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +func createSignature(signer MessageSigner, commit *createCommit) (string, error) { + if signer == nil { + return "", errors.New("createSignature: invalid parameters") + } + + message, err := createSignatureMessage(commit) + if err != nil { + return "", err + } + + var writer bytes.Buffer + err = signer.Sign(&writer, strings.NewReader(message)) + if err != nil { + return "", err + } + + return writer.String(), nil +} + +func createSignatureMessage(commit *createCommit) (string, error) { + if commit == nil || commit.Message == nil || *commit.Message == "" || commit.Author == nil { + return "", errors.New("createSignatureMessage: invalid parameters") + } + + var message []string + + if commit.Tree != nil { + message = append(message, fmt.Sprintf("tree %s", *commit.Tree)) + } + + for _, parent := range commit.Parents { + message = append(message, fmt.Sprintf("parent %s", parent)) + } + + message = append(message, fmt.Sprintf("author %s <%s> %d %s", commit.Author.GetName(), commit.Author.GetEmail(), commit.Author.GetDate().Unix(), commit.Author.GetDate().Format("-0700"))) + + committer := commit.Committer + if committer == nil { + committer = commit.Author + } + + // There needs to be a double newline after committer + message = append(message, fmt.Sprintf("committer %s <%s> %d %s\n", committer.GetName(), committer.GetEmail(), committer.GetDate().Unix(), committer.GetDate().Format("-0700"))) + message = append(message, *commit.Message) + + return strings.Join(message, "\n"), nil +} diff --git a/vendor/github.com/google/go-github/v56/github/git_refs.go b/vendor/github.com/google/go-github/v56/github/git_refs.go new file mode 100644 index 000000000..e839c30f6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git_refs.go @@ -0,0 +1,175 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/url" + "strings" +) + +// Reference represents a GitHub reference. +type Reference struct { + Ref *string `json:"ref"` + URL *string `json:"url"` + Object *GitObject `json:"object"` + NodeID *string `json:"node_id,omitempty"` +} + +func (r Reference) String() string { + return Stringify(r) +} + +// GitObject represents a Git object. +type GitObject struct { + Type *string `json:"type"` + SHA *string `json:"sha"` + URL *string `json:"url"` +} + +func (o GitObject) String() string { + return Stringify(o) +} + +// createRefRequest represents the payload for creating a reference. +type createRefRequest struct { + Ref *string `json:"ref"` + SHA *string `json:"sha"` +} + +// updateRefRequest represents the payload for updating a reference. +type updateRefRequest struct { + SHA *string `json:"sha"` + Force *bool `json:"force"` +} + +// GetRef fetches a single reference in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/git/refs#get-a-reference +func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref string) (*Reference, *Response, error) { + ref = strings.TrimPrefix(ref, "refs/") + u := fmt.Sprintf("repos/%v/%v/git/ref/%v", owner, repo, refURLEscape(ref)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + r := new(Reference) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// refURLEscape escapes every path segment of the given ref. Those must +// not contain escaped "/" - as "%2F" - or github will not recognize it. +func refURLEscape(ref string) string { + parts := strings.Split(ref, "/") + for i, s := range parts { + parts[i] = url.PathEscape(s) + } + return strings.Join(parts, "/") +} + +// ReferenceListOptions specifies optional parameters to the +// GitService.ListMatchingRefs method. +type ReferenceListOptions struct { + Ref string `url:"-"` + + ListOptions +} + +// ListMatchingRefs lists references in a repository that match a supplied ref. +// Use an empty ref to list all references. +// +// GitHub API docs: https://docs.github.com/en/rest/git/refs#list-matching-references +func (s *GitService) ListMatchingRefs(ctx context.Context, owner, repo string, opts *ReferenceListOptions) ([]*Reference, *Response, error) { + var ref string + if opts != nil { + ref = strings.TrimPrefix(opts.Ref, "refs/") + } + u := fmt.Sprintf("repos/%v/%v/git/matching-refs/%v", owner, repo, refURLEscape(ref)) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rs []*Reference + resp, err := s.client.Do(ctx, req, &rs) + if err != nil { + return nil, resp, err + } + + return rs, resp, nil +} + +// CreateRef creates a new ref in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/git/refs#create-a-reference +func (s *GitService) CreateRef(ctx context.Context, owner string, repo string, ref *Reference) (*Reference, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/refs", owner, repo) + req, err := s.client.NewRequest("POST", u, &createRefRequest{ + // back-compat with previous behavior that didn't require 'refs/' prefix + Ref: String("refs/" + strings.TrimPrefix(*ref.Ref, "refs/")), + SHA: ref.Object.SHA, + }) + if err != nil { + return nil, nil, err + } + + r := new(Reference) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// UpdateRef updates an existing ref in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/git/refs#update-a-reference +func (s *GitService) UpdateRef(ctx context.Context, owner string, repo string, ref *Reference, force bool) (*Reference, *Response, error) { + refPath := strings.TrimPrefix(*ref.Ref, "refs/") + u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(refPath)) + req, err := s.client.NewRequest("PATCH", u, &updateRefRequest{ + SHA: ref.Object.SHA, + Force: &force, + }) + if err != nil { + return nil, nil, err + } + + r := new(Reference) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// DeleteRef deletes a ref from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/git/refs#delete-a-reference +func (s *GitService) DeleteRef(ctx context.Context, owner string, repo string, ref string) (*Response, error) { + ref = strings.TrimPrefix(ref, "refs/") + u := fmt.Sprintf("repos/%v/%v/git/refs/%v", owner, repo, refURLEscape(ref)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/git_tags.go b/vendor/github.com/google/go-github/v56/github/git_tags.go new file mode 100644 index 000000000..30d7b2c2d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git_tags.go @@ -0,0 +1,84 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Tag represents a tag object. +type Tag struct { + Tag *string `json:"tag,omitempty"` + SHA *string `json:"sha,omitempty"` + URL *string `json:"url,omitempty"` + Message *string `json:"message,omitempty"` + Tagger *CommitAuthor `json:"tagger,omitempty"` + Object *GitObject `json:"object,omitempty"` + Verification *SignatureVerification `json:"verification,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +// createTagRequest represents the body of a CreateTag request. This is mostly +// identical to Tag with the exception that the object SHA and Type are +// top-level fields, rather than being nested inside a JSON object. +type createTagRequest struct { + Tag *string `json:"tag,omitempty"` + Message *string `json:"message,omitempty"` + Object *string `json:"object,omitempty"` + Type *string `json:"type,omitempty"` + Tagger *CommitAuthor `json:"tagger,omitempty"` +} + +// GetTag fetches a tag from a repo given a SHA. +// +// GitHub API docs: https://docs.github.com/en/rest/git/tags#get-a-tag +func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha string) (*Tag, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/tags/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + tag := new(Tag) + resp, err := s.client.Do(ctx, req, tag) + if err != nil { + return nil, resp, err + } + + return tag, resp, nil +} + +// CreateTag creates a tag object. +// +// GitHub API docs: https://docs.github.com/en/rest/git/tags#create-a-tag-object +func (s *GitService) CreateTag(ctx context.Context, owner string, repo string, tag *Tag) (*Tag, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/tags", owner, repo) + + // convert Tag into a createTagRequest + tagRequest := &createTagRequest{ + Tag: tag.Tag, + Message: tag.Message, + Tagger: tag.Tagger, + } + if tag.Object != nil { + tagRequest.Object = tag.Object.SHA + tagRequest.Type = tag.Object.Type + } + + req, err := s.client.NewRequest("POST", u, tagRequest) + if err != nil { + return nil, nil, err + } + + t := new(Tag) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/git_trees.go b/vendor/github.com/google/go-github/v56/github/git_trees.go new file mode 100644 index 000000000..db28976e0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/git_trees.go @@ -0,0 +1,162 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" +) + +// Tree represents a GitHub tree. +type Tree struct { + SHA *string `json:"sha,omitempty"` + Entries []*TreeEntry `json:"tree,omitempty"` + + // Truncated is true if the number of items in the tree + // exceeded GitHub's maximum limit and the Entries were truncated + // in the response. Only populated for requests that fetch + // trees like Git.GetTree. + Truncated *bool `json:"truncated,omitempty"` +} + +func (t Tree) String() string { + return Stringify(t) +} + +// TreeEntry represents the contents of a tree structure. TreeEntry can +// represent either a blob, a commit (in the case of a submodule), or another +// tree. +type TreeEntry struct { + SHA *string `json:"sha,omitempty"` + Path *string `json:"path,omitempty"` + Mode *string `json:"mode,omitempty"` + Type *string `json:"type,omitempty"` + Size *int `json:"size,omitempty"` + Content *string `json:"content,omitempty"` + URL *string `json:"url,omitempty"` +} + +func (t TreeEntry) String() string { + return Stringify(t) +} + +// treeEntryWithFileDelete is used internally to delete a file whose +// Content and SHA fields are empty. It does this by removing the "omitempty" +// tag modifier on the SHA field which causes the GitHub API to receive +// {"sha":null} and thereby delete the file. +type treeEntryWithFileDelete struct { + SHA *string `json:"sha"` + Path *string `json:"path,omitempty"` + Mode *string `json:"mode,omitempty"` + Type *string `json:"type,omitempty"` + Size *int `json:"size,omitempty"` + Content *string `json:"content,omitempty"` + URL *string `json:"url,omitempty"` +} + +func (t *TreeEntry) MarshalJSON() ([]byte, error) { + if t.SHA == nil && t.Content == nil { + return json.Marshal(struct { + SHA *string `json:"sha"` + Path *string `json:"path,omitempty"` + Mode *string `json:"mode,omitempty"` + Type *string `json:"type,omitempty"` + }{ + nil, + t.Path, + t.Mode, + t.Type, + }) + } + return json.Marshal(struct { + SHA *string `json:"sha,omitempty"` + Path *string `json:"path,omitempty"` + Mode *string `json:"mode,omitempty"` + Type *string `json:"type,omitempty"` + Size *int `json:"size,omitempty"` + Content *string `json:"content,omitempty"` + URL *string `json:"url,omitempty"` + }{ + SHA: t.SHA, + Path: t.Path, + Mode: t.Mode, + Type: t.Type, + Size: t.Size, + Content: t.Content, + URL: t.URL, + }) +} + +// GetTree fetches the Tree object for a given sha hash from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/git/trees#get-a-tree +func (s *GitService) GetTree(ctx context.Context, owner string, repo string, sha string, recursive bool) (*Tree, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/trees/%v", owner, repo, sha) + if recursive { + u += "?recursive=1" + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + t := new(Tree) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// createTree represents the body of a CreateTree request. +type createTree struct { + BaseTree string `json:"base_tree,omitempty"` + Entries []interface{} `json:"tree"` +} + +// CreateTree creates a new tree in a repository. If both a tree and a nested +// path modifying that tree are specified, it will overwrite the contents of +// that tree with the new path contents and write a new tree out. +// +// GitHub API docs: https://docs.github.com/en/rest/git/trees#create-a-tree +func (s *GitService) CreateTree(ctx context.Context, owner string, repo string, baseTree string, entries []*TreeEntry) (*Tree, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/git/trees", owner, repo) + + newEntries := make([]interface{}, 0, len(entries)) + for _, entry := range entries { + if entry.Content == nil && entry.SHA == nil { + newEntries = append(newEntries, treeEntryWithFileDelete{ + Path: entry.Path, + Mode: entry.Mode, + Type: entry.Type, + Size: entry.Size, + URL: entry.URL, + }) + continue + } + newEntries = append(newEntries, entry) + } + + body := &createTree{ + BaseTree: baseTree, + Entries: newEntries, + } + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + t := new(Tree) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/github-accessors.go b/vendor/github.com/google/go-github/v56/github/github-accessors.go new file mode 100644 index 000000000..199c9b8a3 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/github-accessors.go @@ -0,0 +1,25079 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by gen-accessors; DO NOT EDIT. +// Instead, please run "go generate ./..." as described here: +// https://github.com/google/go-github/blob/master/CONTRIBUTING.md#submitting-a-patch + +package github + +import ( + "encoding/json" + "time" +) + +// GetRetryAfter returns the RetryAfter field if it's non-nil, zero value otherwise. +func (a *AbuseRateLimitError) GetRetryAfter() time.Duration { + if a == nil || a.RetryAfter == nil { + return 0 + } + return *a.RetryAfter +} + +// GetGithubOwnedAllowed returns the GithubOwnedAllowed field if it's non-nil, zero value otherwise. +func (a *ActionsAllowed) GetGithubOwnedAllowed() bool { + if a == nil || a.GithubOwnedAllowed == nil { + return false + } + return *a.GithubOwnedAllowed +} + +// GetVerifiedAllowed returns the VerifiedAllowed field if it's non-nil, zero value otherwise. +func (a *ActionsAllowed) GetVerifiedAllowed() bool { + if a == nil || a.VerifiedAllowed == nil { + return false + } + return *a.VerifiedAllowed +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetKey() string { + if a == nil || a.Key == nil { + return "" + } + return *a.Key +} + +// GetLastAccessedAt returns the LastAccessedAt field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetLastAccessedAt() Timestamp { + if a == nil || a.LastAccessedAt == nil { + return Timestamp{} + } + return *a.LastAccessedAt +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetRef() string { + if a == nil || a.Ref == nil { + return "" + } + return *a.Ref +} + +// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetSizeInBytes() int64 { + if a == nil || a.SizeInBytes == nil { + return 0 + } + return *a.SizeInBytes +} + +// GetVersion returns the Version field if it's non-nil, zero value otherwise. +func (a *ActionsCache) GetVersion() string { + if a == nil || a.Version == nil { + return "" + } + return *a.Version +} + +// GetDirection returns the Direction field if it's non-nil, zero value otherwise. +func (a *ActionsCacheListOptions) GetDirection() string { + if a == nil || a.Direction == nil { + return "" + } + return *a.Direction +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (a *ActionsCacheListOptions) GetKey() string { + if a == nil || a.Key == nil { + return "" + } + return *a.Key +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (a *ActionsCacheListOptions) GetRef() string { + if a == nil || a.Ref == nil { + return "" + } + return *a.Ref +} + +// GetSort returns the Sort field if it's non-nil, zero value otherwise. +func (a *ActionsCacheListOptions) GetSort() string { + if a == nil || a.Sort == nil { + return "" + } + return *a.Sort +} + +// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. +func (a *ActionsPermissions) GetAllowedActions() string { + if a == nil || a.AllowedActions == nil { + return "" + } + return *a.AllowedActions +} + +// GetEnabledRepositories returns the EnabledRepositories field if it's non-nil, zero value otherwise. +func (a *ActionsPermissions) GetEnabledRepositories() string { + if a == nil || a.EnabledRepositories == nil { + return "" + } + return *a.EnabledRepositories +} + +// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. +func (a *ActionsPermissions) GetSelectedActionsURL() string { + if a == nil || a.SelectedActionsURL == nil { + return "" + } + return *a.SelectedActionsURL +} + +// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsEnterprise) GetAllowedActions() string { + if a == nil || a.AllowedActions == nil { + return "" + } + return *a.AllowedActions +} + +// GetEnabledOrganizations returns the EnabledOrganizations field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsEnterprise) GetEnabledOrganizations() string { + if a == nil || a.EnabledOrganizations == nil { + return "" + } + return *a.EnabledOrganizations +} + +// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsEnterprise) GetSelectedActionsURL() string { + if a == nil || a.SelectedActionsURL == nil { + return "" + } + return *a.SelectedActionsURL +} + +// GetAllowedActions returns the AllowedActions field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsRepository) GetAllowedActions() string { + if a == nil || a.AllowedActions == nil { + return "" + } + return *a.AllowedActions +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsRepository) GetEnabled() bool { + if a == nil || a.Enabled == nil { + return false + } + return *a.Enabled +} + +// GetSelectedActionsURL returns the SelectedActionsURL field if it's non-nil, zero value otherwise. +func (a *ActionsPermissionsRepository) GetSelectedActionsURL() string { + if a == nil || a.SelectedActionsURL == nil { + return "" + } + return *a.SelectedActionsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *ActionsVariable) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. +func (a *ActionsVariable) GetSelectedRepositoriesURL() string { + if a == nil || a.SelectedRepositoriesURL == nil { + return "" + } + return *a.SelectedRepositoriesURL +} + +// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. +func (a *ActionsVariable) GetSelectedRepositoryIDs() *SelectedRepoIDs { + if a == nil { + return nil + } + return a.SelectedRepositoryIDs +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *ActionsVariable) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (a *ActionsVariable) GetVisibility() string { + if a == nil || a.Visibility == nil { + return "" + } + return *a.Visibility +} + +// GetCountryCode returns the CountryCode field if it's non-nil, zero value otherwise. +func (a *ActorLocation) GetCountryCode() string { + if a == nil || a.CountryCode == nil { + return "" + } + return *a.CountryCode +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *AdminEnforcedChanges) GetFrom() bool { + if a == nil || a.From == nil { + return false + } + return *a.From +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *AdminEnforcement) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetComments returns the Comments field. +func (a *AdminStats) GetComments() *CommentStats { + if a == nil { + return nil + } + return a.Comments +} + +// GetGists returns the Gists field. +func (a *AdminStats) GetGists() *GistStats { + if a == nil { + return nil + } + return a.Gists +} + +// GetHooks returns the Hooks field. +func (a *AdminStats) GetHooks() *HookStats { + if a == nil { + return nil + } + return a.Hooks +} + +// GetIssues returns the Issues field. +func (a *AdminStats) GetIssues() *IssueStats { + if a == nil { + return nil + } + return a.Issues +} + +// GetMilestones returns the Milestones field. +func (a *AdminStats) GetMilestones() *MilestoneStats { + if a == nil { + return nil + } + return a.Milestones +} + +// GetOrgs returns the Orgs field. +func (a *AdminStats) GetOrgs() *OrgStats { + if a == nil { + return nil + } + return a.Orgs +} + +// GetPages returns the Pages field. +func (a *AdminStats) GetPages() *PageStats { + if a == nil { + return nil + } + return a.Pages +} + +// GetPulls returns the Pulls field. +func (a *AdminStats) GetPulls() *PullStats { + if a == nil { + return nil + } + return a.Pulls +} + +// GetRepos returns the Repos field. +func (a *AdminStats) GetRepos() *RepoStats { + if a == nil { + return nil + } + return a.Repos +} + +// GetUsers returns the Users field. +func (a *AdminStats) GetUsers() *UserStats { + if a == nil { + return nil + } + return a.Users +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (a *AdvancedSecurity) GetStatus() string { + if a == nil || a.Status == nil { + return "" + } + return *a.Status +} + +// GetLastPushedDate returns the LastPushedDate field if it's non-nil, zero value otherwise. +func (a *AdvancedSecurityCommittersBreakdown) GetLastPushedDate() string { + if a == nil || a.LastPushedDate == nil { + return "" + } + return *a.LastPushedDate +} + +// GetUserLogin returns the UserLogin field if it's non-nil, zero value otherwise. +func (a *AdvancedSecurityCommittersBreakdown) GetUserLogin() string { + if a == nil || a.UserLogin == nil { + return "" + } + return *a.UserLogin +} + +// GetScore returns the Score field. +func (a *AdvisoryCVSS) GetScore() *float64 { + if a == nil { + return nil + } + return a.Score +} + +// GetVectorString returns the VectorString field if it's non-nil, zero value otherwise. +func (a *AdvisoryCVSS) GetVectorString() string { + if a == nil || a.VectorString == nil { + return "" + } + return *a.VectorString +} + +// GetCWEID returns the CWEID field if it's non-nil, zero value otherwise. +func (a *AdvisoryCWEs) GetCWEID() string { + if a == nil || a.CWEID == nil { + return "" + } + return *a.CWEID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *AdvisoryCWEs) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (a *AdvisoryIdentifier) GetType() string { + if a == nil || a.Type == nil { + return "" + } + return *a.Type +} + +// GetValue returns the Value field if it's non-nil, zero value otherwise. +func (a *AdvisoryIdentifier) GetValue() string { + if a == nil || a.Value == nil { + return "" + } + return *a.Value +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *AdvisoryReference) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetFirstPatchedVersion returns the FirstPatchedVersion field. +func (a *AdvisoryVulnerability) GetFirstPatchedVersion() *FirstPatchedVersion { + if a == nil { + return nil + } + return a.FirstPatchedVersion +} + +// GetPackage returns the Package field. +func (a *AdvisoryVulnerability) GetPackage() *VulnerabilityPackage { + if a == nil { + return nil + } + return a.Package +} + +// GetPatchedVersions returns the PatchedVersions field if it's non-nil, zero value otherwise. +func (a *AdvisoryVulnerability) GetPatchedVersions() string { + if a == nil || a.PatchedVersions == nil { + return "" + } + return *a.PatchedVersions +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (a *AdvisoryVulnerability) GetSeverity() string { + if a == nil || a.Severity == nil { + return "" + } + return *a.Severity +} + +// GetVulnerableVersionRange returns the VulnerableVersionRange field if it's non-nil, zero value otherwise. +func (a *AdvisoryVulnerability) GetVulnerableVersionRange() string { + if a == nil || a.VulnerableVersionRange == nil { + return "" + } + return *a.VulnerableVersionRange +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (a *Alert) GetClosedAt() Timestamp { + if a == nil || a.ClosedAt == nil { + return Timestamp{} + } + return *a.ClosedAt +} + +// GetClosedBy returns the ClosedBy field. +func (a *Alert) GetClosedBy() *User { + if a == nil { + return nil + } + return a.ClosedBy +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *Alert) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. +func (a *Alert) GetDismissedAt() Timestamp { + if a == nil || a.DismissedAt == nil { + return Timestamp{} + } + return *a.DismissedAt +} + +// GetDismissedBy returns the DismissedBy field. +func (a *Alert) GetDismissedBy() *User { + if a == nil { + return nil + } + return a.DismissedBy +} + +// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. +func (a *Alert) GetDismissedComment() string { + if a == nil || a.DismissedComment == nil { + return "" + } + return *a.DismissedComment +} + +// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. +func (a *Alert) GetDismissedReason() string { + if a == nil || a.DismissedReason == nil { + return "" + } + return *a.DismissedReason +} + +// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. +func (a *Alert) GetFixedAt() Timestamp { + if a == nil || a.FixedAt == nil { + return Timestamp{} + } + return *a.FixedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (a *Alert) GetHTMLURL() string { + if a == nil || a.HTMLURL == nil { + return "" + } + return *a.HTMLURL +} + +// GetInstancesURL returns the InstancesURL field if it's non-nil, zero value otherwise. +func (a *Alert) GetInstancesURL() string { + if a == nil || a.InstancesURL == nil { + return "" + } + return *a.InstancesURL +} + +// GetMostRecentInstance returns the MostRecentInstance field. +func (a *Alert) GetMostRecentInstance() *MostRecentInstance { + if a == nil { + return nil + } + return a.MostRecentInstance +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (a *Alert) GetNumber() int { + if a == nil || a.Number == nil { + return 0 + } + return *a.Number +} + +// GetRepository returns the Repository field. +func (a *Alert) GetRepository() *Repository { + if a == nil { + return nil + } + return a.Repository +} + +// GetRule returns the Rule field. +func (a *Alert) GetRule() *Rule { + if a == nil { + return nil + } + return a.Rule +} + +// GetRuleDescription returns the RuleDescription field if it's non-nil, zero value otherwise. +func (a *Alert) GetRuleDescription() string { + if a == nil || a.RuleDescription == nil { + return "" + } + return *a.RuleDescription +} + +// GetRuleID returns the RuleID field if it's non-nil, zero value otherwise. +func (a *Alert) GetRuleID() string { + if a == nil || a.RuleID == nil { + return "" + } + return *a.RuleID +} + +// GetRuleSeverity returns the RuleSeverity field if it's non-nil, zero value otherwise. +func (a *Alert) GetRuleSeverity() string { + if a == nil || a.RuleSeverity == nil { + return "" + } + return *a.RuleSeverity +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (a *Alert) GetState() string { + if a == nil || a.State == nil { + return "" + } + return *a.State +} + +// GetTool returns the Tool field. +func (a *Alert) GetTool() *Tool { + if a == nil { + return nil + } + return a.Tool +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *Alert) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *Alert) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *AllowDeletionsEnforcementLevelChanges) GetFrom() string { + if a == nil || a.From == nil { + return "" + } + return *a.From +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (a *AllowForkSyncing) GetEnabled() bool { + if a == nil || a.Enabled == nil { + return false + } + return *a.Enabled +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (a *AnalysesListOptions) GetRef() string { + if a == nil || a.Ref == nil { + return "" + } + return *a.Ref +} + +// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. +func (a *AnalysesListOptions) GetSarifID() string { + if a == nil || a.SarifID == nil { + return "" + } + return *a.SarifID +} + +// GetSSHKeyFingerprints returns the SSHKeyFingerprints map if it's non-nil, an empty map otherwise. +func (a *APIMeta) GetSSHKeyFingerprints() map[string]string { + if a == nil || a.SSHKeyFingerprints == nil { + return map[string]string{} + } + return a.SSHKeyFingerprints +} + +// GetVerifiablePasswordAuthentication returns the VerifiablePasswordAuthentication field if it's non-nil, zero value otherwise. +func (a *APIMeta) GetVerifiablePasswordAuthentication() bool { + if a == nil || a.VerifiablePasswordAuthentication == nil { + return false + } + return *a.VerifiablePasswordAuthentication +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *App) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (a *App) GetDescription() string { + if a == nil || a.Description == nil { + return "" + } + return *a.Description +} + +// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. +func (a *App) GetExternalURL() string { + if a == nil || a.ExternalURL == nil { + return "" + } + return *a.ExternalURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (a *App) GetHTMLURL() string { + if a == nil || a.HTMLURL == nil { + return "" + } + return *a.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *App) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetInstallationsCount returns the InstallationsCount field if it's non-nil, zero value otherwise. +func (a *App) GetInstallationsCount() int { + if a == nil || a.InstallationsCount == nil { + return 0 + } + return *a.InstallationsCount +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *App) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (a *App) GetNodeID() string { + if a == nil || a.NodeID == nil { + return "" + } + return *a.NodeID +} + +// GetOwner returns the Owner field. +func (a *App) GetOwner() *User { + if a == nil { + return nil + } + return a.Owner +} + +// GetPermissions returns the Permissions field. +func (a *App) GetPermissions() *InstallationPermissions { + if a == nil { + return nil + } + return a.Permissions +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (a *App) GetSlug() string { + if a == nil || a.Slug == nil { + return "" + } + return *a.Slug +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *App) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetClientID() string { + if a == nil || a.ClientID == nil { + return "" + } + return *a.ClientID +} + +// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetClientSecret() string { + if a == nil || a.ClientSecret == nil { + return "" + } + return *a.ClientSecret +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetDescription() string { + if a == nil || a.Description == nil { + return "" + } + return *a.Description +} + +// GetExternalURL returns the ExternalURL field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetExternalURL() string { + if a == nil || a.ExternalURL == nil { + return "" + } + return *a.ExternalURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetHTMLURL() string { + if a == nil || a.HTMLURL == nil { + return "" + } + return *a.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetNodeID() string { + if a == nil || a.NodeID == nil { + return "" + } + return *a.NodeID +} + +// GetOwner returns the Owner field. +func (a *AppConfig) GetOwner() *User { + if a == nil { + return nil + } + return a.Owner +} + +// GetPEM returns the PEM field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetPEM() string { + if a == nil || a.PEM == nil { + return "" + } + return *a.PEM +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetSlug() string { + if a == nil || a.Slug == nil { + return "" + } + return *a.Slug +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetWebhookSecret returns the WebhookSecret field if it's non-nil, zero value otherwise. +func (a *AppConfig) GetWebhookSecret() string { + if a == nil || a.WebhookSecret == nil { + return "" + } + return *a.WebhookSecret +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *ArchivedAt) GetFrom() Timestamp { + if a == nil || a.From == nil { + return Timestamp{} + } + return *a.From +} + +// GetTo returns the To field if it's non-nil, zero value otherwise. +func (a *ArchivedAt) GetTo() Timestamp { + if a == nil || a.To == nil { + return Timestamp{} + } + return *a.To +} + +// GetArchiveDownloadURL returns the ArchiveDownloadURL field if it's non-nil, zero value otherwise. +func (a *Artifact) GetArchiveDownloadURL() string { + if a == nil || a.ArchiveDownloadURL == nil { + return "" + } + return *a.ArchiveDownloadURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *Artifact) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetExpired returns the Expired field if it's non-nil, zero value otherwise. +func (a *Artifact) GetExpired() bool { + if a == nil || a.Expired == nil { + return false + } + return *a.Expired +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (a *Artifact) GetExpiresAt() Timestamp { + if a == nil || a.ExpiresAt == nil { + return Timestamp{} + } + return *a.ExpiresAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *Artifact) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *Artifact) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (a *Artifact) GetNodeID() string { + if a == nil || a.NodeID == nil { + return "" + } + return *a.NodeID +} + +// GetSizeInBytes returns the SizeInBytes field if it's non-nil, zero value otherwise. +func (a *Artifact) GetSizeInBytes() int64 { + if a == nil || a.SizeInBytes == nil { + return 0 + } + return *a.SizeInBytes +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *Artifact) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *Artifact) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetWorkflowRun returns the WorkflowRun field. +func (a *Artifact) GetWorkflowRun() *ArtifactWorkflowRun { + if a == nil { + return nil + } + return a.WorkflowRun +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (a *ArtifactList) GetTotalCount() int64 { + if a == nil || a.TotalCount == nil { + return 0 + } + return *a.TotalCount +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (a *ArtifactWorkflowRun) GetHeadBranch() string { + if a == nil || a.HeadBranch == nil { + return "" + } + return *a.HeadBranch +} + +// GetHeadRepositoryID returns the HeadRepositoryID field if it's non-nil, zero value otherwise. +func (a *ArtifactWorkflowRun) GetHeadRepositoryID() int64 { + if a == nil || a.HeadRepositoryID == nil { + return 0 + } + return *a.HeadRepositoryID +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (a *ArtifactWorkflowRun) GetHeadSHA() string { + if a == nil || a.HeadSHA == nil { + return "" + } + return *a.HeadSHA +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *ArtifactWorkflowRun) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. +func (a *ArtifactWorkflowRun) GetRepositoryID() int64 { + if a == nil || a.RepositoryID == nil { + return 0 + } + return *a.RepositoryID +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (a *Attachment) GetBody() string { + if a == nil || a.Body == nil { + return "" + } + return *a.Body +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *Attachment) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (a *Attachment) GetTitle() string { + if a == nil || a.Title == nil { + return "" + } + return *a.Title +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetAction() string { + if a == nil || a.Action == nil { + return "" + } + return *a.Action +} + +// GetActive returns the Active field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetActive() bool { + if a == nil || a.Active == nil { + return false + } + return *a.Active +} + +// GetActiveWas returns the ActiveWas field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetActiveWas() bool { + if a == nil || a.ActiveWas == nil { + return false + } + return *a.ActiveWas +} + +// GetActor returns the Actor field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetActor() string { + if a == nil || a.Actor == nil { + return "" + } + return *a.Actor +} + +// GetActorIP returns the ActorIP field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetActorIP() string { + if a == nil || a.ActorIP == nil { + return "" + } + return *a.ActorIP +} + +// GetActorLocation returns the ActorLocation field. +func (a *AuditEntry) GetActorLocation() *ActorLocation { + if a == nil { + return nil + } + return a.ActorLocation +} + +// GetBlockedUser returns the BlockedUser field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetBlockedUser() string { + if a == nil || a.BlockedUser == nil { + return "" + } + return *a.BlockedUser +} + +// GetBusiness returns the Business field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetBusiness() string { + if a == nil || a.Business == nil { + return "" + } + return *a.Business +} + +// GetCancelledAt returns the CancelledAt field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetCancelledAt() Timestamp { + if a == nil || a.CancelledAt == nil { + return Timestamp{} + } + return *a.CancelledAt +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetCompletedAt() Timestamp { + if a == nil || a.CompletedAt == nil { + return Timestamp{} + } + return *a.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetConclusion() string { + if a == nil || a.Conclusion == nil { + return "" + } + return *a.Conclusion +} + +// GetConfig returns the Config field. +func (a *AuditEntry) GetConfig() *HookConfig { + if a == nil { + return nil + } + return a.Config +} + +// GetConfigWas returns the ConfigWas field. +func (a *AuditEntry) GetConfigWas() *HookConfig { + if a == nil { + return nil + } + return a.ConfigWas +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetContentType() string { + if a == nil || a.ContentType == nil { + return "" + } + return *a.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetData returns the Data field. +func (a *AuditEntry) GetData() *AuditEntryData { + if a == nil { + return nil + } + return a.Data +} + +// GetDeployKeyFingerprint returns the DeployKeyFingerprint field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetDeployKeyFingerprint() string { + if a == nil || a.DeployKeyFingerprint == nil { + return "" + } + return *a.DeployKeyFingerprint +} + +// GetDocumentID returns the DocumentID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetDocumentID() string { + if a == nil || a.DocumentID == nil { + return "" + } + return *a.DocumentID +} + +// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetEmoji() string { + if a == nil || a.Emoji == nil { + return "" + } + return *a.Emoji +} + +// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetEnvironmentName() string { + if a == nil || a.EnvironmentName == nil { + return "" + } + return *a.EnvironmentName +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetEvent() string { + if a == nil || a.Event == nil { + return "" + } + return *a.Event +} + +// GetExplanation returns the Explanation field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetExplanation() string { + if a == nil || a.Explanation == nil { + return "" + } + return *a.Explanation +} + +// GetExternalIdentityNameID returns the ExternalIdentityNameID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetExternalIdentityNameID() string { + if a == nil || a.ExternalIdentityNameID == nil { + return "" + } + return *a.ExternalIdentityNameID +} + +// GetExternalIdentityUsername returns the ExternalIdentityUsername field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetExternalIdentityUsername() string { + if a == nil || a.ExternalIdentityUsername == nil { + return "" + } + return *a.ExternalIdentityUsername +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetFingerprint() string { + if a == nil || a.Fingerprint == nil { + return "" + } + return *a.Fingerprint +} + +// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetHashedToken() string { + if a == nil || a.HashedToken == nil { + return "" + } + return *a.HashedToken +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetHeadBranch() string { + if a == nil || a.HeadBranch == nil { + return "" + } + return *a.HeadBranch +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetHeadSHA() string { + if a == nil || a.HeadSHA == nil { + return "" + } + return *a.HeadSHA +} + +// GetHookID returns the HookID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetHookID() int64 { + if a == nil || a.HookID == nil { + return 0 + } + return *a.HookID +} + +// GetIsHostedRunner returns the IsHostedRunner field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetIsHostedRunner() bool { + if a == nil || a.IsHostedRunner == nil { + return false + } + return *a.IsHostedRunner +} + +// GetJobName returns the JobName field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetJobName() string { + if a == nil || a.JobName == nil { + return "" + } + return *a.JobName +} + +// GetJobWorkflowRef returns the JobWorkflowRef field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetJobWorkflowRef() string { + if a == nil || a.JobWorkflowRef == nil { + return "" + } + return *a.JobWorkflowRef +} + +// GetLimitedAvailability returns the LimitedAvailability field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetLimitedAvailability() bool { + if a == nil || a.LimitedAvailability == nil { + return false + } + return *a.LimitedAvailability +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetMessage() string { + if a == nil || a.Message == nil { + return "" + } + return *a.Message +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetOAuthApplicationID returns the OAuthApplicationID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOAuthApplicationID() int64 { + if a == nil || a.OAuthApplicationID == nil { + return 0 + } + return *a.OAuthApplicationID +} + +// GetOldPermission returns the OldPermission field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOldPermission() string { + if a == nil || a.OldPermission == nil { + return "" + } + return *a.OldPermission +} + +// GetOldUser returns the OldUser field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOldUser() string { + if a == nil || a.OldUser == nil { + return "" + } + return *a.OldUser +} + +// GetOpenSSHPublicKey returns the OpenSSHPublicKey field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOpenSSHPublicKey() string { + if a == nil || a.OpenSSHPublicKey == nil { + return "" + } + return *a.OpenSSHPublicKey +} + +// GetOperationType returns the OperationType field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOperationType() string { + if a == nil || a.OperationType == nil { + return "" + } + return *a.OperationType +} + +// GetOrg returns the Org field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOrg() string { + if a == nil || a.Org == nil { + return "" + } + return *a.Org +} + +// GetOrgID returns the OrgID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetOrgID() int64 { + if a == nil || a.OrgID == nil { + return 0 + } + return *a.OrgID +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetPermission() string { + if a == nil || a.Permission == nil { + return "" + } + return *a.Permission +} + +// GetPreviousVisibility returns the PreviousVisibility field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetPreviousVisibility() string { + if a == nil || a.PreviousVisibility == nil { + return "" + } + return *a.PreviousVisibility +} + +// GetProgrammaticAccessType returns the ProgrammaticAccessType field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetProgrammaticAccessType() string { + if a == nil || a.ProgrammaticAccessType == nil { + return "" + } + return *a.ProgrammaticAccessType +} + +// GetPullRequestID returns the PullRequestID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetPullRequestID() int64 { + if a == nil || a.PullRequestID == nil { + return 0 + } + return *a.PullRequestID +} + +// GetPullRequestTitle returns the PullRequestTitle field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetPullRequestTitle() string { + if a == nil || a.PullRequestTitle == nil { + return "" + } + return *a.PullRequestTitle +} + +// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetPullRequestURL() string { + if a == nil || a.PullRequestURL == nil { + return "" + } + return *a.PullRequestURL +} + +// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetReadOnly() string { + if a == nil || a.ReadOnly == nil { + return "" + } + return *a.ReadOnly +} + +// GetRepo returns the Repo field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRepo() string { + if a == nil || a.Repo == nil { + return "" + } + return *a.Repo +} + +// GetRepository returns the Repository field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRepository() string { + if a == nil || a.Repository == nil { + return "" + } + return *a.Repository +} + +// GetRepositoryPublic returns the RepositoryPublic field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRepositoryPublic() bool { + if a == nil || a.RepositoryPublic == nil { + return false + } + return *a.RepositoryPublic +} + +// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunAttempt() int64 { + if a == nil || a.RunAttempt == nil { + return 0 + } + return *a.RunAttempt +} + +// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunnerGroupID() int64 { + if a == nil || a.RunnerGroupID == nil { + return 0 + } + return *a.RunnerGroupID +} + +// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunnerGroupName() string { + if a == nil || a.RunnerGroupName == nil { + return "" + } + return *a.RunnerGroupName +} + +// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunnerID() int64 { + if a == nil || a.RunnerID == nil { + return 0 + } + return *a.RunnerID +} + +// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunnerName() string { + if a == nil || a.RunnerName == nil { + return "" + } + return *a.RunnerName +} + +// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetRunNumber() int64 { + if a == nil || a.RunNumber == nil { + return 0 + } + return *a.RunNumber +} + +// GetSourceVersion returns the SourceVersion field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetSourceVersion() string { + if a == nil || a.SourceVersion == nil { + return "" + } + return *a.SourceVersion +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetStartedAt() Timestamp { + if a == nil || a.StartedAt == nil { + return Timestamp{} + } + return *a.StartedAt +} + +// GetTargetLogin returns the TargetLogin field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTargetLogin() string { + if a == nil || a.TargetLogin == nil { + return "" + } + return *a.TargetLogin +} + +// GetTargetVersion returns the TargetVersion field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTargetVersion() string { + if a == nil || a.TargetVersion == nil { + return "" + } + return *a.TargetVersion +} + +// GetTeam returns the Team field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTeam() string { + if a == nil || a.Team == nil { + return "" + } + return *a.Team +} + +// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTimestamp() Timestamp { + if a == nil || a.Timestamp == nil { + return Timestamp{} + } + return *a.Timestamp +} + +// GetTokenID returns the TokenID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTokenID() int64 { + if a == nil || a.TokenID == nil { + return 0 + } + return *a.TokenID +} + +// GetTokenScopes returns the TokenScopes field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTokenScopes() string { + if a == nil || a.TokenScopes == nil { + return "" + } + return *a.TokenScopes +} + +// GetTopic returns the Topic field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTopic() string { + if a == nil || a.Topic == nil { + return "" + } + return *a.Topic +} + +// GetTransportProtocol returns the TransportProtocol field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTransportProtocol() int { + if a == nil || a.TransportProtocol == nil { + return 0 + } + return *a.TransportProtocol +} + +// GetTransportProtocolName returns the TransportProtocolName field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTransportProtocolName() string { + if a == nil || a.TransportProtocolName == nil { + return "" + } + return *a.TransportProtocolName +} + +// GetTriggerID returns the TriggerID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetTriggerID() int64 { + if a == nil || a.TriggerID == nil { + return 0 + } + return *a.TriggerID +} + +// GetUser returns the User field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetUser() string { + if a == nil || a.User == nil { + return "" + } + return *a.User +} + +// GetUserAgent returns the UserAgent field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetUserAgent() string { + if a == nil || a.UserAgent == nil { + return "" + } + return *a.UserAgent +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetVisibility() string { + if a == nil || a.Visibility == nil { + return "" + } + return *a.Visibility +} + +// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetWorkflowID() int64 { + if a == nil || a.WorkflowID == nil { + return 0 + } + return *a.WorkflowID +} + +// GetWorkflowRunID returns the WorkflowRunID field if it's non-nil, zero value otherwise. +func (a *AuditEntry) GetWorkflowRunID() int64 { + if a == nil || a.WorkflowRunID == nil { + return 0 + } + return *a.WorkflowRunID +} + +// GetOldLogin returns the OldLogin field if it's non-nil, zero value otherwise. +func (a *AuditEntryData) GetOldLogin() string { + if a == nil || a.OldLogin == nil { + return "" + } + return *a.OldLogin +} + +// GetOldName returns the OldName field if it's non-nil, zero value otherwise. +func (a *AuditEntryData) GetOldName() string { + if a == nil || a.OldName == nil { + return "" + } + return *a.OldName +} + +// GetApp returns the App field. +func (a *Authorization) GetApp() *AuthorizationApp { + if a == nil { + return nil + } + return a.App +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (a *Authorization) GetCreatedAt() Timestamp { + if a == nil || a.CreatedAt == nil { + return Timestamp{} + } + return *a.CreatedAt +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (a *Authorization) GetFingerprint() string { + if a == nil || a.Fingerprint == nil { + return "" + } + return *a.Fingerprint +} + +// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. +func (a *Authorization) GetHashedToken() string { + if a == nil || a.HashedToken == nil { + return "" + } + return *a.HashedToken +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *Authorization) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (a *Authorization) GetNote() string { + if a == nil || a.Note == nil { + return "" + } + return *a.Note +} + +// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. +func (a *Authorization) GetNoteURL() string { + if a == nil || a.NoteURL == nil { + return "" + } + return *a.NoteURL +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (a *Authorization) GetToken() string { + if a == nil || a.Token == nil { + return "" + } + return *a.Token +} + +// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. +func (a *Authorization) GetTokenLastEight() string { + if a == nil || a.TokenLastEight == nil { + return "" + } + return *a.TokenLastEight +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (a *Authorization) GetUpdatedAt() Timestamp { + if a == nil || a.UpdatedAt == nil { + return Timestamp{} + } + return *a.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *Authorization) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetUser returns the User field. +func (a *Authorization) GetUser() *User { + if a == nil { + return nil + } + return a.User +} + +// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. +func (a *AuthorizationApp) GetClientID() string { + if a == nil || a.ClientID == nil { + return "" + } + return *a.ClientID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (a *AuthorizationApp) GetName() string { + if a == nil || a.Name == nil { + return "" + } + return *a.Name +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (a *AuthorizationApp) GetURL() string { + if a == nil || a.URL == nil { + return "" + } + return *a.URL +} + +// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. +func (a *AuthorizationRequest) GetClientID() string { + if a == nil || a.ClientID == nil { + return "" + } + return *a.ClientID +} + +// GetClientSecret returns the ClientSecret field if it's non-nil, zero value otherwise. +func (a *AuthorizationRequest) GetClientSecret() string { + if a == nil || a.ClientSecret == nil { + return "" + } + return *a.ClientSecret +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (a *AuthorizationRequest) GetFingerprint() string { + if a == nil || a.Fingerprint == nil { + return "" + } + return *a.Fingerprint +} + +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (a *AuthorizationRequest) GetNote() string { + if a == nil || a.Note == nil { + return "" + } + return *a.Note +} + +// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. +func (a *AuthorizationRequest) GetNoteURL() string { + if a == nil || a.NoteURL == nil { + return "" + } + return *a.NoteURL +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (a *AuthorizationUpdateRequest) GetFingerprint() string { + if a == nil || a.Fingerprint == nil { + return "" + } + return *a.Fingerprint +} + +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (a *AuthorizationUpdateRequest) GetNote() string { + if a == nil || a.Note == nil { + return "" + } + return *a.Note +} + +// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. +func (a *AuthorizationUpdateRequest) GetNoteURL() string { + if a == nil || a.NoteURL == nil { + return "" + } + return *a.NoteURL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *AuthorizedActorsOnly) GetFrom() bool { + if a == nil || a.From == nil { + return false + } + return *a.From +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (a *AuthorizedDismissalActorsOnlyChanges) GetFrom() bool { + if a == nil || a.From == nil { + return false + } + return *a.From +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (a *Autolink) GetID() int64 { + if a == nil || a.ID == nil { + return 0 + } + return *a.ID +} + +// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. +func (a *Autolink) GetIsAlphanumeric() bool { + if a == nil || a.IsAlphanumeric == nil { + return false + } + return *a.IsAlphanumeric +} + +// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. +func (a *Autolink) GetKeyPrefix() string { + if a == nil || a.KeyPrefix == nil { + return "" + } + return *a.KeyPrefix +} + +// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. +func (a *Autolink) GetURLTemplate() string { + if a == nil || a.URLTemplate == nil { + return "" + } + return *a.URLTemplate +} + +// GetIsAlphanumeric returns the IsAlphanumeric field if it's non-nil, zero value otherwise. +func (a *AutolinkOptions) GetIsAlphanumeric() bool { + if a == nil || a.IsAlphanumeric == nil { + return false + } + return *a.IsAlphanumeric +} + +// GetKeyPrefix returns the KeyPrefix field if it's non-nil, zero value otherwise. +func (a *AutolinkOptions) GetKeyPrefix() string { + if a == nil || a.KeyPrefix == nil { + return "" + } + return *a.KeyPrefix +} + +// GetURLTemplate returns the URLTemplate field if it's non-nil, zero value otherwise. +func (a *AutolinkOptions) GetURLTemplate() string { + if a == nil || a.URLTemplate == nil { + return "" + } + return *a.URLTemplate +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (a *AutomatedSecurityFixes) GetEnabled() bool { + if a == nil || a.Enabled == nil { + return false + } + return *a.Enabled +} + +// GetPaused returns the Paused field if it's non-nil, zero value otherwise. +func (a *AutomatedSecurityFixes) GetPaused() bool { + if a == nil || a.Paused == nil { + return false + } + return *a.Paused +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (a *AutoTriggerCheck) GetAppID() int64 { + if a == nil || a.AppID == nil { + return 0 + } + return *a.AppID +} + +// GetSetting returns the Setting field if it's non-nil, zero value otherwise. +func (a *AutoTriggerCheck) GetSetting() bool { + if a == nil || a.Setting == nil { + return false + } + return *a.Setting +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (b *Blob) GetContent() string { + if b == nil || b.Content == nil { + return "" + } + return *b.Content +} + +// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. +func (b *Blob) GetEncoding() string { + if b == nil || b.Encoding == nil { + return "" + } + return *b.Encoding +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (b *Blob) GetNodeID() string { + if b == nil || b.NodeID == nil { + return "" + } + return *b.NodeID +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (b *Blob) GetSHA() string { + if b == nil || b.SHA == nil { + return "" + } + return *b.SHA +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (b *Blob) GetSize() int { + if b == nil || b.Size == nil { + return 0 + } + return *b.Size +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (b *Blob) GetURL() string { + if b == nil || b.URL == nil { + return "" + } + return *b.URL +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (b *BlockCreations) GetEnabled() bool { + if b == nil || b.Enabled == nil { + return false + } + return *b.Enabled +} + +// GetCommit returns the Commit field. +func (b *Branch) GetCommit() *RepositoryCommit { + if b == nil { + return nil + } + return b.Commit +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (b *Branch) GetName() string { + if b == nil || b.Name == nil { + return "" + } + return *b.Name +} + +// GetProtected returns the Protected field if it's non-nil, zero value otherwise. +func (b *Branch) GetProtected() bool { + if b == nil || b.Protected == nil { + return false + } + return *b.Protected +} + +// GetCommit returns the Commit field. +func (b *BranchCommit) GetCommit() *Commit { + if b == nil { + return nil + } + return b.Commit +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (b *BranchCommit) GetName() string { + if b == nil || b.Name == nil { + return "" + } + return *b.Name +} + +// GetProtected returns the Protected field if it's non-nil, zero value otherwise. +func (b *BranchCommit) GetProtected() bool { + if b == nil || b.Protected == nil { + return false + } + return *b.Protected +} + +// GetProtected returns the Protected field if it's non-nil, zero value otherwise. +func (b *BranchListOptions) GetProtected() bool { + if b == nil || b.Protected == nil { + return false + } + return *b.Protected +} + +// GetCustomBranchPolicies returns the CustomBranchPolicies field if it's non-nil, zero value otherwise. +func (b *BranchPolicy) GetCustomBranchPolicies() bool { + if b == nil || b.CustomBranchPolicies == nil { + return false + } + return *b.CustomBranchPolicies +} + +// GetProtectedBranches returns the ProtectedBranches field if it's non-nil, zero value otherwise. +func (b *BranchPolicy) GetProtectedBranches() bool { + if b == nil || b.ProtectedBranches == nil { + return false + } + return *b.ProtectedBranches +} + +// GetAdminEnforced returns the AdminEnforced field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetAdminEnforced() bool { + if b == nil || b.AdminEnforced == nil { + return false + } + return *b.AdminEnforced +} + +// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetAllowDeletionsEnforcementLevel() string { + if b == nil || b.AllowDeletionsEnforcementLevel == nil { + return "" + } + return *b.AllowDeletionsEnforcementLevel +} + +// GetAllowForcePushesEnforcementLevel returns the AllowForcePushesEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetAllowForcePushesEnforcementLevel() string { + if b == nil || b.AllowForcePushesEnforcementLevel == nil { + return "" + } + return *b.AllowForcePushesEnforcementLevel +} + +// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetAuthorizedActorsOnly() bool { + if b == nil || b.AuthorizedActorsOnly == nil { + return false + } + return *b.AuthorizedActorsOnly +} + +// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetAuthorizedDismissalActorsOnly() bool { + if b == nil || b.AuthorizedDismissalActorsOnly == nil { + return false + } + return *b.AuthorizedDismissalActorsOnly +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetCreatedAt() Timestamp { + if b == nil || b.CreatedAt == nil { + return Timestamp{} + } + return *b.CreatedAt +} + +// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetDismissStaleReviewsOnPush() bool { + if b == nil || b.DismissStaleReviewsOnPush == nil { + return false + } + return *b.DismissStaleReviewsOnPush +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetID() int64 { + if b == nil || b.ID == nil { + return 0 + } + return *b.ID +} + +// GetIgnoreApprovalsFromContributors returns the IgnoreApprovalsFromContributors field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetIgnoreApprovalsFromContributors() bool { + if b == nil || b.IgnoreApprovalsFromContributors == nil { + return false + } + return *b.IgnoreApprovalsFromContributors +} + +// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetLinearHistoryRequirementEnforcementLevel() string { + if b == nil || b.LinearHistoryRequirementEnforcementLevel == nil { + return "" + } + return *b.LinearHistoryRequirementEnforcementLevel +} + +// GetMergeQueueEnforcementLevel returns the MergeQueueEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetMergeQueueEnforcementLevel() string { + if b == nil || b.MergeQueueEnforcementLevel == nil { + return "" + } + return *b.MergeQueueEnforcementLevel +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetName() string { + if b == nil || b.Name == nil { + return "" + } + return *b.Name +} + +// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetPullRequestReviewsEnforcementLevel() string { + if b == nil || b.PullRequestReviewsEnforcementLevel == nil { + return "" + } + return *b.PullRequestReviewsEnforcementLevel +} + +// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRepositoryID() int64 { + if b == nil || b.RepositoryID == nil { + return 0 + } + return *b.RepositoryID +} + +// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRequireCodeOwnerReview() bool { + if b == nil || b.RequireCodeOwnerReview == nil { + return false + } + return *b.RequireCodeOwnerReview +} + +// GetRequiredApprovingReviewCount returns the RequiredApprovingReviewCount field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRequiredApprovingReviewCount() int { + if b == nil || b.RequiredApprovingReviewCount == nil { + return 0 + } + return *b.RequiredApprovingReviewCount +} + +// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRequiredConversationResolutionLevel() string { + if b == nil || b.RequiredConversationResolutionLevel == nil { + return "" + } + return *b.RequiredConversationResolutionLevel +} + +// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRequiredDeploymentsEnforcementLevel() string { + if b == nil || b.RequiredDeploymentsEnforcementLevel == nil { + return "" + } + return *b.RequiredDeploymentsEnforcementLevel +} + +// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetRequiredStatusChecksEnforcementLevel() string { + if b == nil || b.RequiredStatusChecksEnforcementLevel == nil { + return "" + } + return *b.RequiredStatusChecksEnforcementLevel +} + +// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetSignatureRequirementEnforcementLevel() string { + if b == nil || b.SignatureRequirementEnforcementLevel == nil { + return "" + } + return *b.SignatureRequirementEnforcementLevel +} + +// GetStrictRequiredStatusChecksPolicy returns the StrictRequiredStatusChecksPolicy field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetStrictRequiredStatusChecksPolicy() bool { + if b == nil || b.StrictRequiredStatusChecksPolicy == nil { + return false + } + return *b.StrictRequiredStatusChecksPolicy +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRule) GetUpdatedAt() Timestamp { + if b == nil || b.UpdatedAt == nil { + return Timestamp{} + } + return *b.UpdatedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (b *BranchProtectionRuleEvent) GetAction() string { + if b == nil || b.Action == nil { + return "" + } + return *b.Action +} + +// GetChanges returns the Changes field. +func (b *BranchProtectionRuleEvent) GetChanges() *ProtectionChanges { + if b == nil { + return nil + } + return b.Changes +} + +// GetInstallation returns the Installation field. +func (b *BranchProtectionRuleEvent) GetInstallation() *Installation { + if b == nil { + return nil + } + return b.Installation +} + +// GetOrg returns the Org field. +func (b *BranchProtectionRuleEvent) GetOrg() *Organization { + if b == nil { + return nil + } + return b.Org +} + +// GetRepo returns the Repo field. +func (b *BranchProtectionRuleEvent) GetRepo() *Repository { + if b == nil { + return nil + } + return b.Repo +} + +// GetRule returns the Rule field. +func (b *BranchProtectionRuleEvent) GetRule() *BranchProtectionRule { + if b == nil { + return nil + } + return b.Rule +} + +// GetSender returns the Sender field. +func (b *BranchProtectionRuleEvent) GetSender() *User { + if b == nil { + return nil + } + return b.Sender +} + +// GetActorID returns the ActorID field if it's non-nil, zero value otherwise. +func (b *BypassActor) GetActorID() int64 { + if b == nil || b.ActorID == nil { + return 0 + } + return *b.ActorID +} + +// GetActorType returns the ActorType field if it's non-nil, zero value otherwise. +func (b *BypassActor) GetActorType() string { + if b == nil || b.ActorType == nil { + return "" + } + return *b.ActorType +} + +// GetBypassMode returns the BypassMode field if it's non-nil, zero value otherwise. +func (b *BypassActor) GetBypassMode() string { + if b == nil || b.BypassMode == nil { + return "" + } + return *b.BypassMode +} + +// GetApp returns the App field. +func (c *CheckRun) GetApp() *App { + if c == nil { + return nil + } + return c.App +} + +// GetCheckSuite returns the CheckSuite field. +func (c *CheckRun) GetCheckSuite() *CheckSuite { + if c == nil { + return nil + } + return c.CheckSuite +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetCompletedAt() Timestamp { + if c == nil || c.CompletedAt == nil { + return Timestamp{} + } + return *c.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetConclusion() string { + if c == nil || c.Conclusion == nil { + return "" + } + return *c.Conclusion +} + +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetDetailsURL() string { + if c == nil || c.DetailsURL == nil { + return "" + } + return *c.DetailsURL +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetExternalID() string { + if c == nil || c.ExternalID == nil { + return "" + } + return *c.ExternalID +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetHeadSHA() string { + if c == nil || c.HeadSHA == nil { + return "" + } + return *c.HeadSHA +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetOutput returns the Output field. +func (c *CheckRun) GetOutput() *CheckRunOutput { + if c == nil { + return nil + } + return c.Output +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetStartedAt() Timestamp { + if c == nil || c.StartedAt == nil { + return Timestamp{} + } + return *c.StartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CheckRun) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAnnotationLevel returns the AnnotationLevel field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetAnnotationLevel() string { + if c == nil || c.AnnotationLevel == nil { + return "" + } + return *c.AnnotationLevel +} + +// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetEndColumn() int { + if c == nil || c.EndColumn == nil { + return 0 + } + return *c.EndColumn +} + +// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetEndLine() int { + if c == nil || c.EndLine == nil { + return 0 + } + return *c.EndLine +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetMessage() string { + if c == nil || c.Message == nil { + return "" + } + return *c.Message +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetPath() string { + if c == nil || c.Path == nil { + return "" + } + return *c.Path +} + +// GetRawDetails returns the RawDetails field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetRawDetails() string { + if c == nil || c.RawDetails == nil { + return "" + } + return *c.RawDetails +} + +// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetStartColumn() int { + if c == nil || c.StartColumn == nil { + return 0 + } + return *c.StartColumn +} + +// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetStartLine() int { + if c == nil || c.StartLine == nil { + return 0 + } + return *c.StartLine +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (c *CheckRunAnnotation) GetTitle() string { + if c == nil || c.Title == nil { + return "" + } + return *c.Title +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CheckRunEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" + } + return *c.Action +} + +// GetCheckRun returns the CheckRun field. +func (c *CheckRunEvent) GetCheckRun() *CheckRun { + if c == nil { + return nil + } + return c.CheckRun +} + +// GetInstallation returns the Installation field. +func (c *CheckRunEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetOrg returns the Org field. +func (c *CheckRunEvent) GetOrg() *Organization { + if c == nil { + return nil + } + return c.Org +} + +// GetRepo returns the Repo field. +func (c *CheckRunEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetRequestedAction returns the RequestedAction field. +func (c *CheckRunEvent) GetRequestedAction() *RequestedAction { + if c == nil { + return nil + } + return c.RequestedAction +} + +// GetSender returns the Sender field. +func (c *CheckRunEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetAlt returns the Alt field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetAlt() string { + if c == nil || c.Alt == nil { + return "" + } + return *c.Alt +} + +// GetCaption returns the Caption field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetCaption() string { + if c == nil || c.Caption == nil { + return "" + } + return *c.Caption +} + +// GetImageURL returns the ImageURL field if it's non-nil, zero value otherwise. +func (c *CheckRunImage) GetImageURL() string { + if c == nil || c.ImageURL == nil { + return "" + } + return *c.ImageURL +} + +// GetAnnotationsCount returns the AnnotationsCount field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetAnnotationsCount() int { + if c == nil || c.AnnotationsCount == nil { + return 0 + } + return *c.AnnotationsCount +} + +// GetAnnotationsURL returns the AnnotationsURL field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetAnnotationsURL() string { + if c == nil || c.AnnotationsURL == nil { + return "" + } + return *c.AnnotationsURL +} + +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetSummary() string { + if c == nil || c.Summary == nil { + return "" + } + return *c.Summary +} + +// GetText returns the Text field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetText() string { + if c == nil || c.Text == nil { + return "" + } + return *c.Text +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (c *CheckRunOutput) GetTitle() string { + if c == nil || c.Title == nil { + return "" + } + return *c.Title +} + +// GetAfterSHA returns the AfterSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetAfterSHA() string { + if c == nil || c.AfterSHA == nil { + return "" + } + return *c.AfterSHA +} + +// GetApp returns the App field. +func (c *CheckSuite) GetApp() *App { + if c == nil { + return nil + } + return c.App +} + +// GetBeforeSHA returns the BeforeSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetBeforeSHA() string { + if c == nil || c.BeforeSHA == nil { + return "" + } + return *c.BeforeSHA +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetConclusion() string { + if c == nil || c.Conclusion == nil { + return "" + } + return *c.Conclusion +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetHeadBranch() string { + if c == nil || c.HeadBranch == nil { + return "" + } + return *c.HeadBranch +} + +// GetHeadCommit returns the HeadCommit field. +func (c *CheckSuite) GetHeadCommit() *Commit { + if c == nil { + return nil + } + return c.HeadCommit +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetHeadSHA() string { + if c == nil || c.HeadSHA == nil { + return "" + } + return *c.HeadSHA +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetRepository returns the Repository field. +func (c *CheckSuite) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CheckSuite) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CheckSuiteEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" + } + return *c.Action +} + +// GetCheckSuite returns the CheckSuite field. +func (c *CheckSuiteEvent) GetCheckSuite() *CheckSuite { + if c == nil { + return nil + } + return c.CheckSuite +} + +// GetInstallation returns the Installation field. +func (c *CheckSuiteEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetOrg returns the Org field. +func (c *CheckSuiteEvent) GetOrg() *Organization { + if c == nil { + return nil + } + return c.Org +} + +// GetRepo returns the Repo field. +func (c *CheckSuiteEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *CheckSuiteEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetPreferences returns the Preferences field. +func (c *CheckSuitePreferenceResults) GetPreferences() *PreferenceList { + if c == nil { + return nil + } + return c.Preferences +} + +// GetRepository returns the Repository field. +func (c *CheckSuitePreferenceResults) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetBody() string { + if c == nil || c.Body == nil { + return "" + } + return *c.Body +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetKey() string { + if c == nil || c.Key == nil { + return "" + } + return *c.Key +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CodeOfConduct) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetSuggestion returns the Suggestion field if it's non-nil, zero value otherwise. +func (c *CodeownersError) GetSuggestion() string { + if c == nil || c.Suggestion == nil { + return "" + } + return *c.Suggestion +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetContentType() string { + if c == nil || c.ContentType == nil { + return "" + } + return *c.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetLanguage() string { + if c == nil || c.Language == nil { + return "" + } + return *c.Language +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetSize() int64 { + if c == nil || c.Size == nil { + return 0 + } + return *c.Size +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetUploader returns the Uploader field. +func (c *CodeQLDatabase) GetUploader() *User { + if c == nil { + return nil + } + return c.Uploader +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CodeQLDatabase) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetPath() string { + if c == nil || c.Path == nil { + return "" + } + return *c.Path +} + +// GetRepository returns the Repository field. +func (c *CodeResult) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CodeResult) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CodeScanningAlertEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" + } + return *c.Action +} + +// GetAlert returns the Alert field. +func (c *CodeScanningAlertEvent) GetAlert() *Alert { + if c == nil { + return nil + } + return c.Alert +} + +// GetCommitOID returns the CommitOID field if it's non-nil, zero value otherwise. +func (c *CodeScanningAlertEvent) GetCommitOID() string { + if c == nil || c.CommitOID == nil { + return "" + } + return *c.CommitOID +} + +// GetInstallation returns the Installation field. +func (c *CodeScanningAlertEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetOrg returns the Org field. +func (c *CodeScanningAlertEvent) GetOrg() *Organization { + if c == nil { + return nil + } + return c.Org +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (c *CodeScanningAlertEvent) GetRef() string { + if c == nil || c.Ref == nil { + return "" + } + return *c.Ref +} + +// GetRepo returns the Repo field. +func (c *CodeScanningAlertEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *CodeScanningAlertEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. +func (c *CodeScanningAlertState) GetDismissedComment() string { + if c == nil || c.DismissedComment == nil { + return "" + } + return *c.DismissedComment +} + +// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. +func (c *CodeScanningAlertState) GetDismissedReason() string { + if c == nil || c.DismissedReason == nil { + return "" + } + return *c.DismissedReason +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (c *CodeSearchResult) GetIncompleteResults() bool { + if c == nil || c.IncompleteResults == nil { + return false + } + return *c.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CodeSearchResult) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetBillableOwner returns the BillableOwner field. +func (c *Codespace) GetBillableOwner() *User { + if c == nil { + return nil + } + return c.BillableOwner +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *Codespace) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. +func (c *Codespace) GetDevcontainerPath() string { + if c == nil || c.DevcontainerPath == nil { + return "" + } + return *c.DevcontainerPath +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (c *Codespace) GetDisplayName() string { + if c == nil || c.DisplayName == nil { + return "" + } + return *c.DisplayName +} + +// GetEnvironmentID returns the EnvironmentID field if it's non-nil, zero value otherwise. +func (c *Codespace) GetEnvironmentID() string { + if c == nil || c.EnvironmentID == nil { + return "" + } + return *c.EnvironmentID +} + +// GetGitStatus returns the GitStatus field. +func (c *Codespace) GetGitStatus() *CodespacesGitStatus { + if c == nil { + return nil + } + return c.GitStatus +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *Codespace) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. +func (c *Codespace) GetIdleTimeoutMinutes() int { + if c == nil || c.IdleTimeoutMinutes == nil { + return 0 + } + return *c.IdleTimeoutMinutes +} + +// GetIdleTimeoutNotice returns the IdleTimeoutNotice field if it's non-nil, zero value otherwise. +func (c *Codespace) GetIdleTimeoutNotice() string { + if c == nil || c.IdleTimeoutNotice == nil { + return "" + } + return *c.IdleTimeoutNotice +} + +// GetLastKnownStopNotice returns the LastKnownStopNotice field if it's non-nil, zero value otherwise. +func (c *Codespace) GetLastKnownStopNotice() string { + if c == nil || c.LastKnownStopNotice == nil { + return "" + } + return *c.LastKnownStopNotice +} + +// GetLastUsedAt returns the LastUsedAt field if it's non-nil, zero value otherwise. +func (c *Codespace) GetLastUsedAt() Timestamp { + if c == nil || c.LastUsedAt == nil { + return Timestamp{} + } + return *c.LastUsedAt +} + +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (c *Codespace) GetLocation() string { + if c == nil || c.Location == nil { + return "" + } + return *c.Location +} + +// GetMachine returns the Machine field. +func (c *Codespace) GetMachine() *CodespacesMachine { + if c == nil { + return nil + } + return c.Machine +} + +// GetMachinesURL returns the MachinesURL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetMachinesURL() string { + if c == nil || c.MachinesURL == nil { + return "" + } + return *c.MachinesURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *Codespace) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetOwner returns the Owner field. +func (c *Codespace) GetOwner() *User { + if c == nil { + return nil + } + return c.Owner +} + +// GetPendingOperation returns the PendingOperation field if it's non-nil, zero value otherwise. +func (c *Codespace) GetPendingOperation() bool { + if c == nil || c.PendingOperation == nil { + return false + } + return *c.PendingOperation +} + +// GetPendingOperationDisabledReason returns the PendingOperationDisabledReason field if it's non-nil, zero value otherwise. +func (c *Codespace) GetPendingOperationDisabledReason() string { + if c == nil || c.PendingOperationDisabledReason == nil { + return "" + } + return *c.PendingOperationDisabledReason +} + +// GetPrebuild returns the Prebuild field if it's non-nil, zero value otherwise. +func (c *Codespace) GetPrebuild() bool { + if c == nil || c.Prebuild == nil { + return false + } + return *c.Prebuild +} + +// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetPullsURL() string { + if c == nil || c.PullsURL == nil { + return "" + } + return *c.PullsURL +} + +// GetRepository returns the Repository field. +func (c *Codespace) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetRetentionExpiresAt returns the RetentionExpiresAt field if it's non-nil, zero value otherwise. +func (c *Codespace) GetRetentionExpiresAt() Timestamp { + if c == nil || c.RetentionExpiresAt == nil { + return Timestamp{} + } + return *c.RetentionExpiresAt +} + +// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. +func (c *Codespace) GetRetentionPeriodMinutes() int { + if c == nil || c.RetentionPeriodMinutes == nil { + return 0 + } + return *c.RetentionPeriodMinutes +} + +// GetRuntimeConstraints returns the RuntimeConstraints field. +func (c *Codespace) GetRuntimeConstraints() *CodespacesRuntimeConstraints { + if c == nil { + return nil + } + return c.RuntimeConstraints +} + +// GetStartURL returns the StartURL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetStartURL() string { + if c == nil || c.StartURL == nil { + return "" + } + return *c.StartURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (c *Codespace) GetState() string { + if c == nil || c.State == nil { + return "" + } + return *c.State +} + +// GetStopURL returns the StopURL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetStopURL() string { + if c == nil || c.StopURL == nil { + return "" + } + return *c.StopURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *Codespace) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetWebURL returns the WebURL field if it's non-nil, zero value otherwise. +func (c *Codespace) GetWebURL() string { + if c == nil || c.WebURL == nil { + return "" + } + return *c.WebURL +} + +// GetAhead returns the Ahead field if it's non-nil, zero value otherwise. +func (c *CodespacesGitStatus) GetAhead() int { + if c == nil || c.Ahead == nil { + return 0 + } + return *c.Ahead +} + +// GetBehind returns the Behind field if it's non-nil, zero value otherwise. +func (c *CodespacesGitStatus) GetBehind() int { + if c == nil || c.Behind == nil { + return 0 + } + return *c.Behind +} + +// GetHasUncommittedChanges returns the HasUncommittedChanges field if it's non-nil, zero value otherwise. +func (c *CodespacesGitStatus) GetHasUncommittedChanges() bool { + if c == nil || c.HasUncommittedChanges == nil { + return false + } + return *c.HasUncommittedChanges +} + +// GetHasUnpushedChanges returns the HasUnpushedChanges field if it's non-nil, zero value otherwise. +func (c *CodespacesGitStatus) GetHasUnpushedChanges() bool { + if c == nil || c.HasUnpushedChanges == nil { + return false + } + return *c.HasUnpushedChanges +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (c *CodespacesGitStatus) GetRef() string { + if c == nil || c.Ref == nil { + return "" + } + return *c.Ref +} + +// GetCPUs returns the CPUs field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetCPUs() int { + if c == nil || c.CPUs == nil { + return 0 + } + return *c.CPUs +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetDisplayName() string { + if c == nil || c.DisplayName == nil { + return "" + } + return *c.DisplayName +} + +// GetMemoryInBytes returns the MemoryInBytes field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetMemoryInBytes() int64 { + if c == nil || c.MemoryInBytes == nil { + return 0 + } + return *c.MemoryInBytes +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetOperatingSystem returns the OperatingSystem field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetOperatingSystem() string { + if c == nil || c.OperatingSystem == nil { + return "" + } + return *c.OperatingSystem +} + +// GetPrebuildAvailability returns the PrebuildAvailability field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetPrebuildAvailability() string { + if c == nil || c.PrebuildAvailability == nil { + return "" + } + return *c.PrebuildAvailability +} + +// GetStorageInBytes returns the StorageInBytes field if it's non-nil, zero value otherwise. +func (c *CodespacesMachine) GetStorageInBytes() int64 { + if c == nil || c.StorageInBytes == nil { + return 0 + } + return *c.StorageInBytes +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *CollaboratorInvitation) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CollaboratorInvitation) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CollaboratorInvitation) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetInvitee returns the Invitee field. +func (c *CollaboratorInvitation) GetInvitee() *User { + if c == nil { + return nil + } + return c.Invitee +} + +// GetInviter returns the Inviter field. +func (c *CollaboratorInvitation) GetInviter() *User { + if c == nil { + return nil + } + return c.Inviter +} + +// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. +func (c *CollaboratorInvitation) GetPermissions() string { + if c == nil || c.Permissions == nil { + return "" + } + return *c.Permissions +} + +// GetRepo returns the Repo field. +func (c *CollaboratorInvitation) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CollaboratorInvitation) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetCommitURL() string { + if c == nil || c.CommitURL == nil { + return "" + } + return *c.CommitURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetRepositoryURL() string { + if c == nil || c.RepositoryURL == nil { + return "" + } + return *c.RepositoryURL +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetState() string { + if c == nil || c.State == nil { + return "" + } + return *c.State +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (c *CombinedStatus) GetTotalCount() int { + if c == nil || c.TotalCount == nil { + return 0 + } + return *c.TotalCount +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *Comment) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetAuthorAssociation() string { + if c == nil || c.AuthorAssociation == nil { + return "" + } + return *c.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetBody() string { + if c == nil || c.Body == nil { + return "" + } + return *c.Body +} + +// GetChildCommentCount returns the ChildCommentCount field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetChildCommentCount() int { + if c == nil || c.ChildCommentCount == nil { + return 0 + } + return *c.ChildCommentCount +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetCreatedAt() Timestamp { + if c == nil || c.CreatedAt == nil { + return Timestamp{} + } + return *c.CreatedAt +} + +// GetDiscussionID returns the DiscussionID field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetDiscussionID() int64 { + if c == nil || c.DiscussionID == nil { + return 0 + } + return *c.DiscussionID +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetParentID returns the ParentID field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetParentID() int64 { + if c == nil || c.ParentID == nil { + return 0 + } + return *c.ParentID +} + +// GetReactions returns the Reactions field. +func (c *CommentDiscussion) GetReactions() *Reactions { + if c == nil { + return nil + } + return c.Reactions +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetRepositoryURL() string { + if c == nil || c.RepositoryURL == nil { + return "" + } + return *c.RepositoryURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CommentDiscussion) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetUser returns the User field. +func (c *CommentDiscussion) GetUser() *User { + if c == nil { + return nil + } + return c.User +} + +// GetTotalCommitComments returns the TotalCommitComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalCommitComments() int { + if c == nil || c.TotalCommitComments == nil { + return 0 + } + return *c.TotalCommitComments +} + +// GetTotalGistComments returns the TotalGistComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalGistComments() int { + if c == nil || c.TotalGistComments == nil { + return 0 + } + return *c.TotalGistComments +} + +// GetTotalIssueComments returns the TotalIssueComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalIssueComments() int { + if c == nil || c.TotalIssueComments == nil { + return 0 + } + return *c.TotalIssueComments +} + +// GetTotalPullRequestComments returns the TotalPullRequestComments field if it's non-nil, zero value otherwise. +func (c *CommentStats) GetTotalPullRequestComments() int { + if c == nil || c.TotalPullRequestComments == nil { + return 0 + } + return *c.TotalPullRequestComments +} + +// GetAuthor returns the Author field. +func (c *Commit) GetAuthor() *CommitAuthor { + if c == nil { + return nil + } + return c.Author +} + +// GetCommentCount returns the CommentCount field if it's non-nil, zero value otherwise. +func (c *Commit) GetCommentCount() int { + if c == nil || c.CommentCount == nil { + return 0 + } + return *c.CommentCount +} + +// GetCommitter returns the Committer field. +func (c *Commit) GetCommitter() *CommitAuthor { + if c == nil { + return nil + } + return c.Committer +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *Commit) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (c *Commit) GetMessage() string { + if c == nil || c.Message == nil { + return "" + } + return *c.Message +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *Commit) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *Commit) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetStats returns the Stats field. +func (c *Commit) GetStats() *CommitStats { + if c == nil { + return nil + } + return c.Stats +} + +// GetTree returns the Tree field. +func (c *Commit) GetTree() *Tree { + if c == nil { + return nil + } + return c.Tree +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *Commit) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetVerification returns the Verification field. +func (c *Commit) GetVerification() *SignatureVerification { + if c == nil { + return nil + } + return c.Verification +} + +// GetDate returns the Date field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetDate() Timestamp { + if c == nil || c.Date == nil { + return Timestamp{} + } + return *c.Date +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetEmail() string { + if c == nil || c.Email == nil { + return "" + } + return *c.Email +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetLogin() string { + if c == nil || c.Login == nil { + return "" + } + return *c.Login +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CommitAuthor) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *CommitCommentEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" + } + return *c.Action +} + +// GetComment returns the Comment field. +func (c *CommitCommentEvent) GetComment() *RepositoryComment { + if c == nil { + return nil + } + return c.Comment +} + +// GetInstallation returns the Installation field. +func (c *CommitCommentEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetOrg returns the Org field. +func (c *CommitCommentEvent) GetOrg() *Organization { + if c == nil { + return nil + } + return c.Org +} + +// GetRepo returns the Repo field. +func (c *CommitCommentEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *CommitCommentEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetAdditions() int { + if c == nil || c.Additions == nil { + return 0 + } + return *c.Additions +} + +// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetBlobURL() string { + if c == nil || c.BlobURL == nil { + return "" + } + return *c.BlobURL +} + +// GetChanges returns the Changes field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetChanges() int { + if c == nil || c.Changes == nil { + return 0 + } + return *c.Changes +} + +// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetContentsURL() string { + if c == nil || c.ContentsURL == nil { + return "" + } + return *c.ContentsURL +} + +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetDeletions() int { + if c == nil || c.Deletions == nil { + return 0 + } + return *c.Deletions +} + +// GetFilename returns the Filename field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetFilename() string { + if c == nil || c.Filename == nil { + return "" + } + return *c.Filename +} + +// GetPatch returns the Patch field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetPatch() string { + if c == nil || c.Patch == nil { + return "" + } + return *c.Patch +} + +// GetPreviousFilename returns the PreviousFilename field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetPreviousFilename() string { + if c == nil || c.PreviousFilename == nil { + return "" + } + return *c.PreviousFilename +} + +// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetRawURL() string { + if c == nil || c.RawURL == nil { + return "" + } + return *c.RawURL +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CommitFile) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetAuthor returns the Author field. +func (c *CommitResult) GetAuthor() *User { + if c == nil { + return nil + } + return c.Author +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetCommentsURL() string { + if c == nil || c.CommentsURL == nil { + return "" + } + return *c.CommentsURL +} + +// GetCommit returns the Commit field. +func (c *CommitResult) GetCommit() *Commit { + if c == nil { + return nil + } + return c.Commit +} + +// GetCommitter returns the Committer field. +func (c *CommitResult) GetCommitter() *User { + if c == nil { + return nil + } + return c.Committer +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetRepository returns the Repository field. +func (c *CommitResult) GetRepository() *Repository { + if c == nil { + return nil + } + return c.Repository +} + +// GetScore returns the Score field. +func (c *CommitResult) GetScore() *float64 { + if c == nil { + return nil + } + return c.Score +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetSHA() string { + if c == nil || c.SHA == nil { + return "" + } + return *c.SHA +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CommitResult) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAheadBy returns the AheadBy field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetAheadBy() int { + if c == nil || c.AheadBy == nil { + return 0 + } + return *c.AheadBy +} + +// GetBaseCommit returns the BaseCommit field. +func (c *CommitsComparison) GetBaseCommit() *RepositoryCommit { + if c == nil { + return nil + } + return c.BaseCommit +} + +// GetBehindBy returns the BehindBy field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetBehindBy() int { + if c == nil || c.BehindBy == nil { + return 0 + } + return *c.BehindBy +} + +// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetDiffURL() string { + if c == nil || c.DiffURL == nil { + return "" + } + return *c.DiffURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetMergeBaseCommit returns the MergeBaseCommit field. +func (c *CommitsComparison) GetMergeBaseCommit() *RepositoryCommit { + if c == nil { + return nil + } + return c.MergeBaseCommit +} + +// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetPatchURL() string { + if c == nil || c.PatchURL == nil { + return "" + } + return *c.PatchURL +} + +// GetPermalinkURL returns the PermalinkURL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetPermalinkURL() string { + if c == nil || c.PermalinkURL == nil { + return "" + } + return *c.PermalinkURL +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetTotalCommits returns the TotalCommits field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetTotalCommits() int { + if c == nil || c.TotalCommits == nil { + return 0 + } + return *c.TotalCommits +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *CommitsComparison) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (c *CommitsSearchResult) GetIncompleteResults() bool { + if c == nil || c.IncompleteResults == nil { + return false + } + return *c.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CommitsSearchResult) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetAdditions() int { + if c == nil || c.Additions == nil { + return 0 + } + return *c.Additions +} + +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetDeletions() int { + if c == nil || c.Deletions == nil { + return 0 + } + return *c.Deletions +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *CommitStats) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetCodeOfConduct returns the CodeOfConduct field. +func (c *CommunityHealthFiles) GetCodeOfConduct() *Metric { + if c == nil { + return nil + } + return c.CodeOfConduct +} + +// GetCodeOfConductFile returns the CodeOfConductFile field. +func (c *CommunityHealthFiles) GetCodeOfConductFile() *Metric { + if c == nil { + return nil + } + return c.CodeOfConductFile +} + +// GetContributing returns the Contributing field. +func (c *CommunityHealthFiles) GetContributing() *Metric { + if c == nil { + return nil + } + return c.Contributing +} + +// GetIssueTemplate returns the IssueTemplate field. +func (c *CommunityHealthFiles) GetIssueTemplate() *Metric { + if c == nil { + return nil + } + return c.IssueTemplate +} + +// GetLicense returns the License field. +func (c *CommunityHealthFiles) GetLicense() *Metric { + if c == nil { + return nil + } + return c.License +} + +// GetPullRequestTemplate returns the PullRequestTemplate field. +func (c *CommunityHealthFiles) GetPullRequestTemplate() *Metric { + if c == nil { + return nil + } + return c.PullRequestTemplate +} + +// GetReadme returns the Readme field. +func (c *CommunityHealthFiles) GetReadme() *Metric { + if c == nil { + return nil + } + return c.Readme +} + +// GetContentReportsEnabled returns the ContentReportsEnabled field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetContentReportsEnabled() bool { + if c == nil || c.ContentReportsEnabled == nil { + return false + } + return *c.ContentReportsEnabled +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetDescription() string { + if c == nil || c.Description == nil { + return "" + } + return *c.Description +} + +// GetDocumentation returns the Documentation field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetDocumentation() string { + if c == nil || c.Documentation == nil { + return "" + } + return *c.Documentation +} + +// GetFiles returns the Files field. +func (c *CommunityHealthMetrics) GetFiles() *CommunityHealthFiles { + if c == nil { + return nil + } + return c.Files +} + +// GetHealthPercentage returns the HealthPercentage field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetHealthPercentage() int { + if c == nil || c.HealthPercentage == nil { + return 0 + } + return *c.HealthPercentage +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (c *CommunityHealthMetrics) GetUpdatedAt() Timestamp { + if c == nil || c.UpdatedAt == nil { + return Timestamp{} + } + return *c.UpdatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *ContentReference) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *ContentReference) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetReference returns the Reference field if it's non-nil, zero value otherwise. +func (c *ContentReference) GetReference() string { + if c == nil || c.Reference == nil { + return "" + } + return *c.Reference +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (c *ContentReferenceEvent) GetAction() string { + if c == nil || c.Action == nil { + return "" + } + return *c.Action +} + +// GetContentReference returns the ContentReference field. +func (c *ContentReferenceEvent) GetContentReference() *ContentReference { + if c == nil { + return nil + } + return c.ContentReference +} + +// GetInstallation returns the Installation field. +func (c *ContentReferenceEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetRepo returns the Repo field. +func (c *ContentReferenceEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *ContentReferenceEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetAvatarURL() string { + if c == nil || c.AvatarURL == nil { + return "" + } + return *c.AvatarURL +} + +// GetContributions returns the Contributions field if it's non-nil, zero value otherwise. +func (c *Contributor) GetContributions() int { + if c == nil || c.Contributions == nil { + return 0 + } + return *c.Contributions +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (c *Contributor) GetEmail() string { + if c == nil || c.Email == nil { + return "" + } + return *c.Email +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetEventsURL() string { + if c == nil || c.EventsURL == nil { + return "" + } + return *c.EventsURL +} + +// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetFollowersURL() string { + if c == nil || c.FollowersURL == nil { + return "" + } + return *c.FollowersURL +} + +// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetFollowingURL() string { + if c == nil || c.FollowingURL == nil { + return "" + } + return *c.FollowingURL +} + +// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetGistsURL() string { + if c == nil || c.GistsURL == nil { + return "" + } + return *c.GistsURL +} + +// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. +func (c *Contributor) GetGravatarID() string { + if c == nil || c.GravatarID == nil { + return "" + } + return *c.GravatarID +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetHTMLURL() string { + if c == nil || c.HTMLURL == nil { + return "" + } + return *c.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *Contributor) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *Contributor) GetLogin() string { + if c == nil || c.Login == nil { + return "" + } + return *c.Login +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *Contributor) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (c *Contributor) GetNodeID() string { + if c == nil || c.NodeID == nil { + return "" + } + return *c.NodeID +} + +// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetOrganizationsURL() string { + if c == nil || c.OrganizationsURL == nil { + return "" + } + return *c.OrganizationsURL +} + +// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetReceivedEventsURL() string { + if c == nil || c.ReceivedEventsURL == nil { + return "" + } + return *c.ReceivedEventsURL +} + +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetReposURL() string { + if c == nil || c.ReposURL == nil { + return "" + } + return *c.ReposURL +} + +// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. +func (c *Contributor) GetSiteAdmin() bool { + if c == nil || c.SiteAdmin == nil { + return false + } + return *c.SiteAdmin +} + +// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetStarredURL() string { + if c == nil || c.StarredURL == nil { + return "" + } + return *c.StarredURL +} + +// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetSubscriptionsURL() string { + if c == nil || c.SubscriptionsURL == nil { + return "" + } + return *c.SubscriptionsURL +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (c *Contributor) GetType() string { + if c == nil || c.Type == nil { + return "" + } + return *c.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (c *Contributor) GetURL() string { + if c == nil || c.URL == nil { + return "" + } + return *c.URL +} + +// GetAuthor returns the Author field. +func (c *ContributorStats) GetAuthor() *Contributor { + if c == nil { + return nil + } + return c.Author +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (c *ContributorStats) GetTotal() int { + if c == nil || c.Total == nil { + return 0 + } + return *c.Total +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetCompletedAt() Timestamp { + if c == nil || c.CompletedAt == nil { + return Timestamp{} + } + return *c.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetConclusion() string { + if c == nil || c.Conclusion == nil { + return "" + } + return *c.Conclusion +} + +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetDetailsURL() string { + if c == nil || c.DetailsURL == nil { + return "" + } + return *c.DetailsURL +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetExternalID() string { + if c == nil || c.ExternalID == nil { + return "" + } + return *c.ExternalID +} + +// GetOutput returns the Output field. +func (c *CreateCheckRunOptions) GetOutput() *CheckRunOutput { + if c == nil { + return nil + } + return c.Output +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetStartedAt() Timestamp { + if c == nil || c.StartedAt == nil { + return Timestamp{} + } + return *c.StartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (c *CreateCheckRunOptions) GetStatus() string { + if c == nil || c.Status == nil { + return "" + } + return *c.Status +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (c *CreateCheckSuiteOptions) GetHeadBranch() string { + if c == nil || c.HeadBranch == nil { + return "" + } + return *c.HeadBranch +} + +// GetClientIP returns the ClientIP field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetClientIP() string { + if c == nil || c.ClientIP == nil { + return "" + } + return *c.ClientIP +} + +// GetDevcontainerPath returns the DevcontainerPath field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetDevcontainerPath() string { + if c == nil || c.DevcontainerPath == nil { + return "" + } + return *c.DevcontainerPath +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetDisplayName() string { + if c == nil || c.DisplayName == nil { + return "" + } + return *c.DisplayName +} + +// GetGeo returns the Geo field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetGeo() string { + if c == nil || c.Geo == nil { + return "" + } + return *c.Geo +} + +// GetIdleTimeoutMinutes returns the IdleTimeoutMinutes field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetIdleTimeoutMinutes() int { + if c == nil || c.IdleTimeoutMinutes == nil { + return 0 + } + return *c.IdleTimeoutMinutes +} + +// GetMachine returns the Machine field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetMachine() string { + if c == nil || c.Machine == nil { + return "" + } + return *c.Machine +} + +// GetMultiRepoPermissionsOptOut returns the MultiRepoPermissionsOptOut field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetMultiRepoPermissionsOptOut() bool { + if c == nil || c.MultiRepoPermissionsOptOut == nil { + return false + } + return *c.MultiRepoPermissionsOptOut +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetRef() string { + if c == nil || c.Ref == nil { + return "" + } + return *c.Ref +} + +// GetRetentionPeriodMinutes returns the RetentionPeriodMinutes field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetRetentionPeriodMinutes() int { + if c == nil || c.RetentionPeriodMinutes == nil { + return 0 + } + return *c.RetentionPeriodMinutes +} + +// GetWorkingDirectory returns the WorkingDirectory field if it's non-nil, zero value otherwise. +func (c *CreateCodespaceOptions) GetWorkingDirectory() string { + if c == nil || c.WorkingDirectory == nil { + return "" + } + return *c.WorkingDirectory +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (c *CreateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { + if c == nil || c.AllowsPublicRepositories == nil { + return false + } + return *c.AllowsPublicRepositories +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CreateEnterpriseRunnerGroupRequest) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (c *CreateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { + if c == nil || c.RestrictedToWorkflows == nil { + return false + } + return *c.RestrictedToWorkflows +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (c *CreateEnterpriseRunnerGroupRequest) GetVisibility() string { + if c == nil || c.Visibility == nil { + return "" + } + return *c.Visibility +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetDescription() string { + if c == nil || c.Description == nil { + return "" + } + return *c.Description +} + +// GetInstallation returns the Installation field. +func (c *CreateEvent) GetInstallation() *Installation { + if c == nil { + return nil + } + return c.Installation +} + +// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetMasterBranch() string { + if c == nil || c.MasterBranch == nil { + return "" + } + return *c.MasterBranch +} + +// GetOrg returns the Org field. +func (c *CreateEvent) GetOrg() *Organization { + if c == nil { + return nil + } + return c.Org +} + +// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetPusherType() string { + if c == nil || c.PusherType == nil { + return "" + } + return *c.PusherType +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetRef() string { + if c == nil || c.Ref == nil { + return "" + } + return *c.Ref +} + +// GetRefType returns the RefType field if it's non-nil, zero value otherwise. +func (c *CreateEvent) GetRefType() string { + if c == nil || c.RefType == nil { + return "" + } + return *c.RefType +} + +// GetRepo returns the Repo field. +func (c *CreateEvent) GetRepo() *Repository { + if c == nil { + return nil + } + return c.Repo +} + +// GetSender returns the Sender field. +func (c *CreateEvent) GetSender() *User { + if c == nil { + return nil + } + return c.Sender +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetEmail() string { + if c == nil || c.Email == nil { + return "" + } + return *c.Email +} + +// GetInviteeID returns the InviteeID field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetInviteeID() int64 { + if c == nil || c.InviteeID == nil { + return 0 + } + return *c.InviteeID +} + +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (c *CreateOrgInvitationOptions) GetRole() string { + if c == nil || c.Role == nil { + return "" + } + return *c.Role +} + +// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. +func (c *CreateOrUpdateCustomRoleOptions) GetBaseRole() string { + if c == nil || c.BaseRole == nil { + return "" + } + return *c.BaseRole +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (c *CreateOrUpdateCustomRoleOptions) GetDescription() string { + if c == nil || c.Description == nil { + return "" + } + return *c.Description +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CreateOrUpdateCustomRoleOptions) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (c *CreateProtectedChanges) GetFrom() bool { + if c == nil || c.From == nil { + return false + } + return *c.From +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (c *CreateRunnerGroupRequest) GetAllowsPublicRepositories() bool { + if c == nil || c.AllowsPublicRepositories == nil { + return false + } + return *c.AllowsPublicRepositories +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CreateRunnerGroupRequest) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (c *CreateRunnerGroupRequest) GetRestrictedToWorkflows() bool { + if c == nil || c.RestrictedToWorkflows == nil { + return false + } + return *c.RestrictedToWorkflows +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (c *CreateRunnerGroupRequest) GetVisibility() string { + if c == nil || c.Visibility == nil { + return "" + } + return *c.Visibility +} + +// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. +func (c *CreateUpdateEnvironment) GetCanAdminsBypass() bool { + if c == nil || c.CanAdminsBypass == nil { + return false + } + return *c.CanAdminsBypass +} + +// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. +func (c *CreateUpdateEnvironment) GetDeploymentBranchPolicy() *BranchPolicy { + if c == nil { + return nil + } + return c.DeploymentBranchPolicy +} + +// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. +func (c *CreateUpdateEnvironment) GetPreventSelfReview() bool { + if c == nil || c.PreventSelfReview == nil { + return false + } + return *c.PreventSelfReview +} + +// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. +func (c *CreateUpdateEnvironment) GetWaitTimer() int { + if c == nil || c.WaitTimer == nil { + return 0 + } + return *c.WaitTimer +} + +// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. +func (c *CreateUpdateRequiredWorkflowOptions) GetRepositoryID() int64 { + if c == nil || c.RepositoryID == nil { + return 0 + } + return *c.RepositoryID +} + +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (c *CreateUpdateRequiredWorkflowOptions) GetScope() string { + if c == nil || c.Scope == nil { + return "" + } + return *c.Scope +} + +// GetSelectedRepositoryIDs returns the SelectedRepositoryIDs field. +func (c *CreateUpdateRequiredWorkflowOptions) GetSelectedRepositoryIDs() *SelectedRepoIDs { + if c == nil { + return nil + } + return c.SelectedRepositoryIDs +} + +// GetWorkflowFilePath returns the WorkflowFilePath field if it's non-nil, zero value otherwise. +func (c *CreateUpdateRequiredWorkflowOptions) GetWorkflowFilePath() string { + if c == nil || c.WorkflowFilePath == nil { + return "" + } + return *c.WorkflowFilePath +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (c *CreateUserProjectOptions) GetBody() string { + if c == nil || c.Body == nil { + return "" + } + return *c.Body +} + +// GetCreated returns the Created field if it's non-nil, zero value otherwise. +func (c *CreationInfo) GetCreated() Timestamp { + if c == nil || c.Created == nil { + return Timestamp{} + } + return *c.Created +} + +// GetAuthorizedCredentialExpiresAt returns the AuthorizedCredentialExpiresAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialExpiresAt() Timestamp { + if c == nil || c.AuthorizedCredentialExpiresAt == nil { + return Timestamp{} + } + return *c.AuthorizedCredentialExpiresAt +} + +// GetAuthorizedCredentialID returns the AuthorizedCredentialID field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialID() int64 { + if c == nil || c.AuthorizedCredentialID == nil { + return 0 + } + return *c.AuthorizedCredentialID +} + +// GetAuthorizedCredentialNote returns the AuthorizedCredentialNote field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialNote() string { + if c == nil || c.AuthorizedCredentialNote == nil { + return "" + } + return *c.AuthorizedCredentialNote +} + +// GetAuthorizedCredentialTitle returns the AuthorizedCredentialTitle field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetAuthorizedCredentialTitle() string { + if c == nil || c.AuthorizedCredentialTitle == nil { + return "" + } + return *c.AuthorizedCredentialTitle +} + +// GetCredentialAccessedAt returns the CredentialAccessedAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialAccessedAt() Timestamp { + if c == nil || c.CredentialAccessedAt == nil { + return Timestamp{} + } + return *c.CredentialAccessedAt +} + +// GetCredentialAuthorizedAt returns the CredentialAuthorizedAt field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialAuthorizedAt() Timestamp { + if c == nil || c.CredentialAuthorizedAt == nil { + return Timestamp{} + } + return *c.CredentialAuthorizedAt +} + +// GetCredentialID returns the CredentialID field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialID() int64 { + if c == nil || c.CredentialID == nil { + return 0 + } + return *c.CredentialID +} + +// GetCredentialType returns the CredentialType field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetCredentialType() string { + if c == nil || c.CredentialType == nil { + return "" + } + return *c.CredentialType +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetFingerprint() string { + if c == nil || c.Fingerprint == nil { + return "" + } + return *c.Fingerprint +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetLogin() string { + if c == nil || c.Login == nil { + return "" + } + return *c.Login +} + +// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. +func (c *CredentialAuthorization) GetTokenLastEight() string { + if c == nil || c.TokenLastEight == nil { + return "" + } + return *c.TokenLastEight +} + +// GetBaseRole returns the BaseRole field if it's non-nil, zero value otherwise. +func (c *CustomRepoRoles) GetBaseRole() string { + if c == nil || c.BaseRole == nil { + return "" + } + return *c.BaseRole +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (c *CustomRepoRoles) GetDescription() string { + if c == nil || c.Description == nil { + return "" + } + return *c.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (c *CustomRepoRoles) GetID() int64 { + if c == nil || c.ID == nil { + return 0 + } + return *c.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (c *CustomRepoRoles) GetName() string { + if c == nil || c.Name == nil { + return "" + } + return *c.Name +} + +// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. +func (d *DefaultSetupConfiguration) GetQuerySuite() string { + if d == nil || d.QuerySuite == nil { + return "" + } + return *d.QuerySuite +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DefaultSetupConfiguration) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DefaultSetupConfiguration) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetConfirmDeleteURL returns the ConfirmDeleteURL field if it's non-nil, zero value otherwise. +func (d *DeleteAnalysis) GetConfirmDeleteURL() string { + if d == nil || d.ConfirmDeleteURL == nil { + return "" + } + return *d.ConfirmDeleteURL +} + +// GetNextAnalysisURL returns the NextAnalysisURL field if it's non-nil, zero value otherwise. +func (d *DeleteAnalysis) GetNextAnalysisURL() string { + if d == nil || d.NextAnalysisURL == nil { + return "" + } + return *d.NextAnalysisURL +} + +// GetInstallation returns the Installation field. +func (d *DeleteEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrg returns the Org field. +func (d *DeleteEvent) GetOrg() *Organization { + if d == nil { + return nil + } + return d.Org +} + +// GetPusherType returns the PusherType field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetPusherType() string { + if d == nil || d.PusherType == nil { + return "" + } + return *d.PusherType +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRefType returns the RefType field if it's non-nil, zero value otherwise. +func (d *DeleteEvent) GetRefType() string { + if d == nil || d.RefType == nil { + return "" + } + return *d.RefType +} + +// GetRepo returns the Repo field. +func (d *DeleteEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeleteEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAutoDismissedAt returns the AutoDismissedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetAutoDismissedAt() Timestamp { + if d == nil || d.AutoDismissedAt == nil { + return Timestamp{} + } + return *d.AutoDismissedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetDependency returns the Dependency field. +func (d *DependabotAlert) GetDependency() *Dependency { + if d == nil { + return nil + } + return d.Dependency +} + +// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetDismissedAt() Timestamp { + if d == nil || d.DismissedAt == nil { + return Timestamp{} + } + return *d.DismissedAt +} + +// GetDismissedBy returns the DismissedBy field. +func (d *DependabotAlert) GetDismissedBy() *User { + if d == nil { + return nil + } + return d.DismissedBy +} + +// GetDismissedComment returns the DismissedComment field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetDismissedComment() string { + if d == nil || d.DismissedComment == nil { + return "" + } + return *d.DismissedComment +} + +// GetDismissedReason returns the DismissedReason field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetDismissedReason() string { + if d == nil || d.DismissedReason == nil { + return "" + } + return *d.DismissedReason +} + +// GetFixedAt returns the FixedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetFixedAt() Timestamp { + if d == nil || d.FixedAt == nil { + return Timestamp{} + } + return *d.FixedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetHTMLURL() string { + if d == nil || d.HTMLURL == nil { + return "" + } + return *d.HTMLURL +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetNumber() int { + if d == nil || d.Number == nil { + return 0 + } + return *d.Number +} + +// GetRepository returns the Repository field. +func (d *DependabotAlert) GetRepository() *Repository { + if d == nil { + return nil + } + return d.Repository +} + +// GetSecurityAdvisory returns the SecurityAdvisory field. +func (d *DependabotAlert) GetSecurityAdvisory() *DependabotSecurityAdvisory { + if d == nil { + return nil + } + return d.SecurityAdvisory +} + +// GetSecurityVulnerability returns the SecurityVulnerability field. +func (d *DependabotAlert) GetSecurityVulnerability() *AdvisoryVulnerability { + if d == nil { + return nil + } + return d.SecurityVulnerability +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *DependabotAlert) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DependabotAlertEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetAlert returns the Alert field. +func (d *DependabotAlertEvent) GetAlert() *DependabotAlert { + if d == nil { + return nil + } + return d.Alert +} + +// GetEnterprise returns the Enterprise field. +func (d *DependabotAlertEvent) GetEnterprise() *Enterprise { + if d == nil { + return nil + } + return d.Enterprise +} + +// GetInstallation returns the Installation field. +func (d *DependabotAlertEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrganization returns the Organization field. +func (d *DependabotAlertEvent) GetOrganization() *Organization { + if d == nil { + return nil + } + return d.Organization +} + +// GetRepo returns the Repo field. +func (d *DependabotAlertEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DependabotAlertEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetCVEID() string { + if d == nil || d.CVEID == nil { + return "" + } + return *d.CVEID +} + +// GetCVSS returns the CVSS field. +func (d *DependabotSecurityAdvisory) GetCVSS() *AdvisoryCVSS { + if d == nil { + return nil + } + return d.CVSS +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetGHSAID() string { + if d == nil || d.GHSAID == nil { + return "" + } + return *d.GHSAID +} + +// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetPublishedAt() Timestamp { + if d == nil || d.PublishedAt == nil { + return Timestamp{} + } + return *d.PublishedAt +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetSeverity() string { + if d == nil || d.Severity == nil { + return "" + } + return *d.Severity +} + +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetSummary() string { + if d == nil || d.Summary == nil { + return "" + } + return *d.Summary +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityAdvisory) GetWithdrawnAt() Timestamp { + if d == nil || d.WithdrawnAt == nil { + return Timestamp{} + } + return *d.WithdrawnAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (d *DependabotSecurityUpdates) GetStatus() string { + if d == nil || d.Status == nil { + return "" + } + return *d.Status +} + +// GetManifestPath returns the ManifestPath field if it's non-nil, zero value otherwise. +func (d *Dependency) GetManifestPath() string { + if d == nil || d.ManifestPath == nil { + return "" + } + return *d.ManifestPath +} + +// GetPackage returns the Package field. +func (d *Dependency) GetPackage() *VulnerabilityPackage { + if d == nil { + return nil + } + return d.Package +} + +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (d *Dependency) GetScope() string { + if d == nil || d.Scope == nil { + return "" + } + return *d.Scope +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DeployKeyEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetInstallation returns the Installation field. +func (d *DeployKeyEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetKey returns the Key field. +func (d *DeployKeyEvent) GetKey() *Key { + if d == nil { + return nil + } + return d.Key +} + +// GetOrganization returns the Organization field. +func (d *DeployKeyEvent) GetOrganization() *Organization { + if d == nil { + return nil + } + return d.Organization +} + +// GetRepo returns the Repo field. +func (d *DeployKeyEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeployKeyEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *Deployment) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetCreator returns the Creator field. +func (d *Deployment) GetCreator() *User { + if d == nil { + return nil + } + return d.Creator +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *Deployment) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *Deployment) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *Deployment) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *Deployment) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *Deployment) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetRepositoryURL() string { + if d == nil || d.RepositoryURL == nil { + return "" + } + return *d.RepositoryURL +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (d *Deployment) GetSHA() string { + if d == nil || d.SHA == nil { + return "" + } + return *d.SHA +} + +// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetStatusesURL() string { + if d == nil || d.StatusesURL == nil { + return "" + } + return *d.StatusesURL +} + +// GetTask returns the Task field if it's non-nil, zero value otherwise. +func (d *Deployment) GetTask() string { + if d == nil || d.Task == nil { + return "" + } + return *d.Task +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *Deployment) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *Deployment) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicy) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicy) GetName() string { + if d == nil || d.Name == nil { + return "" + } + return *d.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicy) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicy) GetType() string { + if d == nil || d.Type == nil { + return "" + } + return *d.Type +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicyRequest) GetName() string { + if d == nil || d.Name == nil { + return "" + } + return *d.Name +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicyRequest) GetType() string { + if d == nil || d.Type == nil { + return "" + } + return *d.Type +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (d *DeploymentBranchPolicyResponse) GetTotalCount() int { + if d == nil || d.TotalCount == nil { + return 0 + } + return *d.TotalCount +} + +// GetDeployment returns the Deployment field. +func (d *DeploymentEvent) GetDeployment() *Deployment { + if d == nil { + return nil + } + return d.Deployment +} + +// GetInstallation returns the Installation field. +func (d *DeploymentEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrg returns the Org field. +func (d *DeploymentEvent) GetOrg() *Organization { + if d == nil { + return nil + } + return d.Org +} + +// GetRepo returns the Repo field. +func (d *DeploymentEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeploymentEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetWorkflow returns the Workflow field. +func (d *DeploymentEvent) GetWorkflow() *Workflow { + if d == nil { + return nil + } + return d.Workflow +} + +// GetWorkflowRun returns the WorkflowRun field. +func (d *DeploymentEvent) GetWorkflowRun() *WorkflowRun { + if d == nil { + return nil + } + return d.WorkflowRun +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DeploymentProtectionRuleEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetDeployment returns the Deployment field. +func (d *DeploymentProtectionRuleEvent) GetDeployment() *Deployment { + if d == nil { + return nil + } + return d.Deployment +} + +// GetDeploymentCallbackURL returns the DeploymentCallbackURL field if it's non-nil, zero value otherwise. +func (d *DeploymentProtectionRuleEvent) GetDeploymentCallbackURL() string { + if d == nil || d.DeploymentCallbackURL == nil { + return "" + } + return *d.DeploymentCallbackURL +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *DeploymentProtectionRuleEvent) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (d *DeploymentProtectionRuleEvent) GetEvent() string { + if d == nil || d.Event == nil { + return "" + } + return *d.Event +} + +// GetInstallation returns the Installation field. +func (d *DeploymentProtectionRuleEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrganization returns the Organization field. +func (d *DeploymentProtectionRuleEvent) GetOrganization() *Organization { + if d == nil { + return nil + } + return d.Organization +} + +// GetRepo returns the Repo field. +func (d *DeploymentProtectionRuleEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeploymentProtectionRuleEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAutoMerge returns the AutoMerge field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetAutoMerge() bool { + if d == nil || d.AutoMerge == nil { + return false + } + return *d.AutoMerge +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetProductionEnvironment returns the ProductionEnvironment field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetProductionEnvironment() bool { + if d == nil || d.ProductionEnvironment == nil { + return false + } + return *d.ProductionEnvironment +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetRef() string { + if d == nil || d.Ref == nil { + return "" + } + return *d.Ref +} + +// GetRequiredContexts returns the RequiredContexts field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetRequiredContexts() []string { + if d == nil || d.RequiredContexts == nil { + return nil + } + return *d.RequiredContexts +} + +// GetTask returns the Task field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetTask() string { + if d == nil || d.Task == nil { + return "" + } + return *d.Task +} + +// GetTransientEnvironment returns the TransientEnvironment field if it's non-nil, zero value otherwise. +func (d *DeploymentRequest) GetTransientEnvironment() bool { + if d == nil || d.TransientEnvironment == nil { + return false + } + return *d.TransientEnvironment +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetCreator returns the Creator field. +func (d *DeploymentStatus) GetCreator() *User { + if d == nil { + return nil + } + return d.Creator +} + +// GetDeploymentURL returns the DeploymentURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetDeploymentURL() string { + if d == nil || d.DeploymentURL == nil { + return "" + } + return *d.DeploymentURL +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetEnvironmentURL() string { + if d == nil || d.EnvironmentURL == nil { + return "" + } + return *d.EnvironmentURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetLogURL() string { + if d == nil || d.LogURL == nil { + return "" + } + return *d.LogURL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetRepositoryURL() string { + if d == nil || d.RepositoryURL == nil { + return "" + } + return *d.RepositoryURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetTargetURL() string { + if d == nil || d.TargetURL == nil { + return "" + } + return *d.TargetURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatus) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetDeployment returns the Deployment field. +func (d *DeploymentStatusEvent) GetDeployment() *Deployment { + if d == nil { + return nil + } + return d.Deployment +} + +// GetDeploymentStatus returns the DeploymentStatus field. +func (d *DeploymentStatusEvent) GetDeploymentStatus() *DeploymentStatus { + if d == nil { + return nil + } + return d.DeploymentStatus +} + +// GetInstallation returns the Installation field. +func (d *DeploymentStatusEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrg returns the Org field. +func (d *DeploymentStatusEvent) GetOrg() *Organization { + if d == nil { + return nil + } + return d.Org +} + +// GetRepo returns the Repo field. +func (d *DeploymentStatusEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DeploymentStatusEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAutoInactive returns the AutoInactive field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetAutoInactive() bool { + if d == nil || d.AutoInactive == nil { + return false + } + return *d.AutoInactive +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetEnvironment() string { + if d == nil || d.Environment == nil { + return "" + } + return *d.Environment +} + +// GetEnvironmentURL returns the EnvironmentURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetEnvironmentURL() string { + if d == nil || d.EnvironmentURL == nil { + return "" + } + return *d.EnvironmentURL +} + +// GetLogURL returns the LogURL field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetLogURL() string { + if d == nil || d.LogURL == nil { + return "" + } + return *d.LogURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DeploymentStatusRequest) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (d *Discussion) GetActiveLockReason() string { + if d == nil || d.ActiveLockReason == nil { + return "" + } + return *d.ActiveLockReason +} + +// GetAnswerChosenAt returns the AnswerChosenAt field if it's non-nil, zero value otherwise. +func (d *Discussion) GetAnswerChosenAt() Timestamp { + if d == nil || d.AnswerChosenAt == nil { + return Timestamp{} + } + return *d.AnswerChosenAt +} + +// GetAnswerChosenBy returns the AnswerChosenBy field if it's non-nil, zero value otherwise. +func (d *Discussion) GetAnswerChosenBy() string { + if d == nil || d.AnswerChosenBy == nil { + return "" + } + return *d.AnswerChosenBy +} + +// GetAnswerHTMLURL returns the AnswerHTMLURL field if it's non-nil, zero value otherwise. +func (d *Discussion) GetAnswerHTMLURL() string { + if d == nil || d.AnswerHTMLURL == nil { + return "" + } + return *d.AnswerHTMLURL +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (d *Discussion) GetAuthorAssociation() string { + if d == nil || d.AuthorAssociation == nil { + return "" + } + return *d.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (d *Discussion) GetBody() string { + if d == nil || d.Body == nil { + return "" + } + return *d.Body +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (d *Discussion) GetComments() int { + if d == nil || d.Comments == nil { + return 0 + } + return *d.Comments +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *Discussion) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetDiscussionCategory returns the DiscussionCategory field. +func (d *Discussion) GetDiscussionCategory() *DiscussionCategory { + if d == nil { + return nil + } + return d.DiscussionCategory +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (d *Discussion) GetHTMLURL() string { + if d == nil || d.HTMLURL == nil { + return "" + } + return *d.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *Discussion) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetLocked returns the Locked field if it's non-nil, zero value otherwise. +func (d *Discussion) GetLocked() bool { + if d == nil || d.Locked == nil { + return false + } + return *d.Locked +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *Discussion) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (d *Discussion) GetNumber() int { + if d == nil || d.Number == nil { + return 0 + } + return *d.Number +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (d *Discussion) GetRepositoryURL() string { + if d == nil || d.RepositoryURL == nil { + return "" + } + return *d.RepositoryURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *Discussion) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (d *Discussion) GetTitle() string { + if d == nil || d.Title == nil { + return "" + } + return *d.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *Discussion) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetUser returns the User field. +func (d *Discussion) GetUser() *User { + if d == nil { + return nil + } + return d.User +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetDescription() string { + if d == nil || d.Description == nil { + return "" + } + return *d.Description +} + +// GetEmoji returns the Emoji field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetEmoji() string { + if d == nil || d.Emoji == nil { + return "" + } + return *d.Emoji +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetID() int64 { + if d == nil || d.ID == nil { + return 0 + } + return *d.ID +} + +// GetIsAnswerable returns the IsAnswerable field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetIsAnswerable() bool { + if d == nil || d.IsAnswerable == nil { + return false + } + return *d.IsAnswerable +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetName() string { + if d == nil || d.Name == nil { + return "" + } + return *d.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetRepositoryID() int64 { + if d == nil || d.RepositoryID == nil { + return 0 + } + return *d.RepositoryID +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetSlug() string { + if d == nil || d.Slug == nil { + return "" + } + return *d.Slug +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionCategory) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetAuthor returns the Author field. +func (d *DiscussionComment) GetAuthor() *User { + if d == nil { + return nil + } + return d.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBody() string { + if d == nil || d.Body == nil { + return "" + } + return *d.Body +} + +// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBodyHTML() string { + if d == nil || d.BodyHTML == nil { + return "" + } + return *d.BodyHTML +} + +// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetBodyVersion() string { + if d == nil || d.BodyVersion == nil { + return "" + } + return *d.BodyVersion +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetCreatedAt() Timestamp { + if d == nil || d.CreatedAt == nil { + return Timestamp{} + } + return *d.CreatedAt +} + +// GetDiscussionURL returns the DiscussionURL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetDiscussionURL() string { + if d == nil || d.DiscussionURL == nil { + return "" + } + return *d.DiscussionURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetHTMLURL() string { + if d == nil || d.HTMLURL == nil { + return "" + } + return *d.HTMLURL +} + +// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetLastEditedAt() Timestamp { + if d == nil || d.LastEditedAt == nil { + return Timestamp{} + } + return *d.LastEditedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetNodeID() string { + if d == nil || d.NodeID == nil { + return "" + } + return *d.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetNumber() int { + if d == nil || d.Number == nil { + return 0 + } + return *d.Number +} + +// GetReactions returns the Reactions field. +func (d *DiscussionComment) GetReactions() *Reactions { + if d == nil { + return nil + } + return d.Reactions +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetUpdatedAt() Timestamp { + if d == nil || d.UpdatedAt == nil { + return Timestamp{} + } + return *d.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (d *DiscussionComment) GetURL() string { + if d == nil || d.URL == nil { + return "" + } + return *d.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DiscussionCommentEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetComment returns the Comment field. +func (d *DiscussionCommentEvent) GetComment() *CommentDiscussion { + if d == nil { + return nil + } + return d.Comment +} + +// GetDiscussion returns the Discussion field. +func (d *DiscussionCommentEvent) GetDiscussion() *Discussion { + if d == nil { + return nil + } + return d.Discussion +} + +// GetInstallation returns the Installation field. +func (d *DiscussionCommentEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrg returns the Org field. +func (d *DiscussionCommentEvent) GetOrg() *Organization { + if d == nil { + return nil + } + return d.Org +} + +// GetRepo returns the Repo field. +func (d *DiscussionCommentEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DiscussionCommentEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (d *DiscussionEvent) GetAction() string { + if d == nil || d.Action == nil { + return "" + } + return *d.Action +} + +// GetDiscussion returns the Discussion field. +func (d *DiscussionEvent) GetDiscussion() *Discussion { + if d == nil { + return nil + } + return d.Discussion +} + +// GetInstallation returns the Installation field. +func (d *DiscussionEvent) GetInstallation() *Installation { + if d == nil { + return nil + } + return d.Installation +} + +// GetOrg returns the Org field. +func (d *DiscussionEvent) GetOrg() *Organization { + if d == nil { + return nil + } + return d.Org +} + +// GetRepo returns the Repo field. +func (d *DiscussionEvent) GetRepo() *Repository { + if d == nil { + return nil + } + return d.Repo +} + +// GetSender returns the Sender field. +func (d *DiscussionEvent) GetSender() *User { + if d == nil { + return nil + } + return d.Sender +} + +// GetApps returns the Apps field if it's non-nil, zero value otherwise. +func (d *DismissalRestrictionsRequest) GetApps() []string { + if d == nil || d.Apps == nil { + return nil + } + return *d.Apps +} + +// GetTeams returns the Teams field if it's non-nil, zero value otherwise. +func (d *DismissalRestrictionsRequest) GetTeams() []string { + if d == nil || d.Teams == nil { + return nil + } + return *d.Teams +} + +// GetUsers returns the Users field if it's non-nil, zero value otherwise. +func (d *DismissalRestrictionsRequest) GetUsers() []string { + if d == nil || d.Users == nil { + return nil + } + return *d.Users +} + +// GetDismissalCommitID returns the DismissalCommitID field if it's non-nil, zero value otherwise. +func (d *DismissedReview) GetDismissalCommitID() string { + if d == nil || d.DismissalCommitID == nil { + return "" + } + return *d.DismissalCommitID +} + +// GetDismissalMessage returns the DismissalMessage field if it's non-nil, zero value otherwise. +func (d *DismissedReview) GetDismissalMessage() string { + if d == nil || d.DismissalMessage == nil { + return "" + } + return *d.DismissalMessage +} + +// GetReviewID returns the ReviewID field if it's non-nil, zero value otherwise. +func (d *DismissedReview) GetReviewID() int64 { + if d == nil || d.ReviewID == nil { + return 0 + } + return *d.ReviewID +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (d *DismissedReview) GetState() string { + if d == nil || d.State == nil { + return "" + } + return *d.State +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (d *DismissStaleReviewsOnPushChanges) GetFrom() bool { + if d == nil || d.From == nil { + return false + } + return *d.From +} + +// GetClientPayload returns the ClientPayload field if it's non-nil, zero value otherwise. +func (d *DispatchRequestOptions) GetClientPayload() json.RawMessage { + if d == nil || d.ClientPayload == nil { + return json.RawMessage{} + } + return *d.ClientPayload +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetBody() string { + if d == nil || d.Body == nil { + return "" + } + return *d.Body +} + +// GetLine returns the Line field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetLine() int { + if d == nil || d.Line == nil { + return 0 + } + return *d.Line +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetPath() string { + if d == nil || d.Path == nil { + return "" + } + return *d.Path +} + +// GetPosition returns the Position field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetPosition() int { + if d == nil || d.Position == nil { + return 0 + } + return *d.Position +} + +// GetSide returns the Side field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetSide() string { + if d == nil || d.Side == nil { + return "" + } + return *d.Side +} + +// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetStartLine() int { + if d == nil || d.StartLine == nil { + return 0 + } + return *d.StartLine +} + +// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. +func (d *DraftReviewComment) GetStartSide() string { + if d == nil || d.StartSide == nil { + return "" + } + return *d.StartSide +} + +// GetRef returns the Ref field. +func (e *EditBase) GetRef() *EditRef { + if e == nil { + return nil + } + return e.Ref +} + +// GetSHA returns the SHA field. +func (e *EditBase) GetSHA() *EditSHA { + if e == nil { + return nil + } + return e.SHA +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (e *EditBody) GetFrom() string { + if e == nil || e.From == nil { + return "" + } + return *e.From +} + +// GetBase returns the Base field. +func (e *EditChange) GetBase() *EditBase { + if e == nil { + return nil + } + return e.Base +} + +// GetBody returns the Body field. +func (e *EditChange) GetBody() *EditBody { + if e == nil { + return nil + } + return e.Body +} + +// GetOwner returns the Owner field. +func (e *EditChange) GetOwner() *EditOwner { + if e == nil { + return nil + } + return e.Owner +} + +// GetRepo returns the Repo field. +func (e *EditChange) GetRepo() *EditRepo { + if e == nil { + return nil + } + return e.Repo +} + +// GetTitle returns the Title field. +func (e *EditChange) GetTitle() *EditTitle { + if e == nil { + return nil + } + return e.Title +} + +// GetOwnerInfo returns the OwnerInfo field. +func (e *EditOwner) GetOwnerInfo() *OwnerInfo { + if e == nil { + return nil + } + return e.OwnerInfo +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (e *EditRef) GetFrom() string { + if e == nil || e.From == nil { + return "" + } + return *e.From +} + +// GetName returns the Name field. +func (e *EditRepo) GetName() *RepoName { + if e == nil { + return nil + } + return e.Name +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (e *EditSHA) GetFrom() string { + if e == nil || e.From == nil { + return "" + } + return *e.From +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (e *EditTitle) GetFrom() string { + if e == nil || e.From == nil { + return "" + } + return *e.From +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetAvatarURL() string { + if e == nil || e.AvatarURL == nil { + return "" + } + return *e.AvatarURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetCreatedAt() Timestamp { + if e == nil || e.CreatedAt == nil { + return Timestamp{} + } + return *e.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetDescription() string { + if e == nil || e.Description == nil { + return "" + } + return *e.Description +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetHTMLURL() string { + if e == nil || e.HTMLURL == nil { + return "" + } + return *e.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetID() int { + if e == nil || e.ID == nil { + return 0 + } + return *e.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetName() string { + if e == nil || e.Name == nil { + return "" + } + return *e.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetNodeID() string { + if e == nil || e.NodeID == nil { + return "" + } + return *e.NodeID +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetSlug() string { + if e == nil || e.Slug == nil { + return "" + } + return *e.Slug +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetUpdatedAt() Timestamp { + if e == nil || e.UpdatedAt == nil { + return Timestamp{} + } + return *e.UpdatedAt +} + +// GetWebsiteURL returns the WebsiteURL field if it's non-nil, zero value otherwise. +func (e *Enterprise) GetWebsiteURL() string { + if e == nil || e.WebsiteURL == nil { + return "" + } + return *e.WebsiteURL +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetAllowsPublicRepositories() bool { + if e == nil || e.AllowsPublicRepositories == nil { + return false + } + return *e.AllowsPublicRepositories +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetDefault() bool { + if e == nil || e.Default == nil { + return false + } + return *e.Default +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetID() int64 { + if e == nil || e.ID == nil { + return 0 + } + return *e.ID +} + +// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetInherited() bool { + if e == nil || e.Inherited == nil { + return false + } + return *e.Inherited +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetName() string { + if e == nil || e.Name == nil { + return "" + } + return *e.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetRestrictedToWorkflows() bool { + if e == nil || e.RestrictedToWorkflows == nil { + return false + } + return *e.RestrictedToWorkflows +} + +// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetRunnersURL() string { + if e == nil || e.RunnersURL == nil { + return "" + } + return *e.RunnersURL +} + +// GetSelectedOrganizationsURL returns the SelectedOrganizationsURL field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetSelectedOrganizationsURL() string { + if e == nil || e.SelectedOrganizationsURL == nil { + return "" + } + return *e.SelectedOrganizationsURL +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetVisibility() string { + if e == nil || e.Visibility == nil { + return "" + } + return *e.Visibility +} + +// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroup) GetWorkflowRestrictionsReadOnly() bool { + if e == nil || e.WorkflowRestrictionsReadOnly == nil { + return false + } + return *e.WorkflowRestrictionsReadOnly +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (e *EnterpriseRunnerGroups) GetTotalCount() int { + if e == nil || e.TotalCount == nil { + return 0 + } + return *e.TotalCount +} + +// GetAdvancedSecurityEnabledForNewRepositories returns the AdvancedSecurityEnabledForNewRepositories field if it's non-nil, zero value otherwise. +func (e *EnterpriseSecurityAnalysisSettings) GetAdvancedSecurityEnabledForNewRepositories() bool { + if e == nil || e.AdvancedSecurityEnabledForNewRepositories == nil { + return false + } + return *e.AdvancedSecurityEnabledForNewRepositories +} + +// GetSecretScanningEnabledForNewRepositories returns the SecretScanningEnabledForNewRepositories field if it's non-nil, zero value otherwise. +func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningEnabledForNewRepositories() bool { + if e == nil || e.SecretScanningEnabledForNewRepositories == nil { + return false + } + return *e.SecretScanningEnabledForNewRepositories +} + +// GetSecretScanningPushProtectionCustomLink returns the SecretScanningPushProtectionCustomLink field if it's non-nil, zero value otherwise. +func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionCustomLink() string { + if e == nil || e.SecretScanningPushProtectionCustomLink == nil { + return "" + } + return *e.SecretScanningPushProtectionCustomLink +} + +// GetSecretScanningPushProtectionEnabledForNewRepositories returns the SecretScanningPushProtectionEnabledForNewRepositories field if it's non-nil, zero value otherwise. +func (e *EnterpriseSecurityAnalysisSettings) GetSecretScanningPushProtectionEnabledForNewRepositories() bool { + if e == nil || e.SecretScanningPushProtectionEnabledForNewRepositories == nil { + return false + } + return *e.SecretScanningPushProtectionEnabledForNewRepositories +} + +// GetCanAdminsBypass returns the CanAdminsBypass field if it's non-nil, zero value otherwise. +func (e *Environment) GetCanAdminsBypass() bool { + if e == nil || e.CanAdminsBypass == nil { + return false + } + return *e.CanAdminsBypass +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (e *Environment) GetCreatedAt() Timestamp { + if e == nil || e.CreatedAt == nil { + return Timestamp{} + } + return *e.CreatedAt +} + +// GetDeploymentBranchPolicy returns the DeploymentBranchPolicy field. +func (e *Environment) GetDeploymentBranchPolicy() *BranchPolicy { + if e == nil { + return nil + } + return e.DeploymentBranchPolicy +} + +// GetEnvironmentName returns the EnvironmentName field if it's non-nil, zero value otherwise. +func (e *Environment) GetEnvironmentName() string { + if e == nil || e.EnvironmentName == nil { + return "" + } + return *e.EnvironmentName +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (e *Environment) GetHTMLURL() string { + if e == nil || e.HTMLURL == nil { + return "" + } + return *e.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *Environment) GetID() int64 { + if e == nil || e.ID == nil { + return 0 + } + return *e.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (e *Environment) GetName() string { + if e == nil || e.Name == nil { + return "" + } + return *e.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (e *Environment) GetNodeID() string { + if e == nil || e.NodeID == nil { + return "" + } + return *e.NodeID +} + +// GetOwner returns the Owner field if it's non-nil, zero value otherwise. +func (e *Environment) GetOwner() string { + if e == nil || e.Owner == nil { + return "" + } + return *e.Owner +} + +// GetRepo returns the Repo field if it's non-nil, zero value otherwise. +func (e *Environment) GetRepo() string { + if e == nil || e.Repo == nil { + return "" + } + return *e.Repo +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (e *Environment) GetUpdatedAt() Timestamp { + if e == nil || e.UpdatedAt == nil { + return Timestamp{} + } + return *e.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (e *Environment) GetURL() string { + if e == nil || e.URL == nil { + return "" + } + return *e.URL +} + +// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. +func (e *Environment) GetWaitTimer() int { + if e == nil || e.WaitTimer == nil { + return 0 + } + return *e.WaitTimer +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (e *EnvResponse) GetTotalCount() int { + if e == nil || e.TotalCount == nil { + return 0 + } + return *e.TotalCount +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *EnvReviewers) GetID() int64 { + if e == nil || e.ID == nil { + return 0 + } + return *e.ID +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (e *EnvReviewers) GetType() string { + if e == nil || e.Type == nil { + return "" + } + return *e.Type +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (e *ErrorBlock) GetCreatedAt() Timestamp { + if e == nil || e.CreatedAt == nil { + return Timestamp{} + } + return *e.CreatedAt +} + +// GetBlock returns the Block field. +func (e *ErrorResponse) GetBlock() *ErrorBlock { + if e == nil { + return nil + } + return e.Block +} + +// GetActor returns the Actor field. +func (e *Event) GetActor() *User { + if e == nil { + return nil + } + return e.Actor +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (e *Event) GetCreatedAt() Timestamp { + if e == nil || e.CreatedAt == nil { + return Timestamp{} + } + return *e.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (e *Event) GetID() string { + if e == nil || e.ID == nil { + return "" + } + return *e.ID +} + +// GetOrg returns the Org field. +func (e *Event) GetOrg() *Organization { + if e == nil { + return nil + } + return e.Org +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (e *Event) GetPublic() bool { + if e == nil || e.Public == nil { + return false + } + return *e.Public +} + +// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. +func (e *Event) GetRawPayload() json.RawMessage { + if e == nil || e.RawPayload == nil { + return json.RawMessage{} + } + return *e.RawPayload +} + +// GetRepo returns the Repo field. +func (e *Event) GetRepo() *Repository { + if e == nil { + return nil + } + return e.Repo +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (e *Event) GetType() string { + if e == nil || e.Type == nil { + return "" + } + return *e.Type +} + +// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. +func (e *ExternalGroup) GetGroupID() int64 { + if e == nil || e.GroupID == nil { + return 0 + } + return *e.GroupID +} + +// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. +func (e *ExternalGroup) GetGroupName() string { + if e == nil || e.GroupName == nil { + return "" + } + return *e.GroupName +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (e *ExternalGroup) GetUpdatedAt() Timestamp { + if e == nil || e.UpdatedAt == nil { + return Timestamp{} + } + return *e.UpdatedAt +} + +// GetMemberEmail returns the MemberEmail field if it's non-nil, zero value otherwise. +func (e *ExternalGroupMember) GetMemberEmail() string { + if e == nil || e.MemberEmail == nil { + return "" + } + return *e.MemberEmail +} + +// GetMemberID returns the MemberID field if it's non-nil, zero value otherwise. +func (e *ExternalGroupMember) GetMemberID() int64 { + if e == nil || e.MemberID == nil { + return 0 + } + return *e.MemberID +} + +// GetMemberLogin returns the MemberLogin field if it's non-nil, zero value otherwise. +func (e *ExternalGroupMember) GetMemberLogin() string { + if e == nil || e.MemberLogin == nil { + return "" + } + return *e.MemberLogin +} + +// GetMemberName returns the MemberName field if it's non-nil, zero value otherwise. +func (e *ExternalGroupMember) GetMemberName() string { + if e == nil || e.MemberName == nil { + return "" + } + return *e.MemberName +} + +// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. +func (e *ExternalGroupTeam) GetTeamID() int64 { + if e == nil || e.TeamID == nil { + return 0 + } + return *e.TeamID +} + +// GetTeamName returns the TeamName field if it's non-nil, zero value otherwise. +func (e *ExternalGroupTeam) GetTeamName() string { + if e == nil || e.TeamName == nil { + return "" + } + return *e.TeamName +} + +// GetHRef returns the HRef field if it's non-nil, zero value otherwise. +func (f *FeedLink) GetHRef() string { + if f == nil || f.HRef == nil { + return "" + } + return *f.HRef +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (f *FeedLink) GetType() string { + if f == nil || f.Type == nil { + return "" + } + return *f.Type +} + +// GetCurrentUser returns the CurrentUser field. +func (f *FeedLinks) GetCurrentUser() *FeedLink { + if f == nil { + return nil + } + return f.CurrentUser +} + +// GetCurrentUserActor returns the CurrentUserActor field. +func (f *FeedLinks) GetCurrentUserActor() *FeedLink { + if f == nil { + return nil + } + return f.CurrentUserActor +} + +// GetCurrentUserOrganization returns the CurrentUserOrganization field. +func (f *FeedLinks) GetCurrentUserOrganization() *FeedLink { + if f == nil { + return nil + } + return f.CurrentUserOrganization +} + +// GetCurrentUserPublic returns the CurrentUserPublic field. +func (f *FeedLinks) GetCurrentUserPublic() *FeedLink { + if f == nil { + return nil + } + return f.CurrentUserPublic +} + +// GetTimeline returns the Timeline field. +func (f *FeedLinks) GetTimeline() *FeedLink { + if f == nil { + return nil + } + return f.Timeline +} + +// GetUser returns the User field. +func (f *FeedLinks) GetUser() *FeedLink { + if f == nil { + return nil + } + return f.User +} + +// GetCurrentUserActorURL returns the CurrentUserActorURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserActorURL() string { + if f == nil || f.CurrentUserActorURL == nil { + return "" + } + return *f.CurrentUserActorURL +} + +// GetCurrentUserOrganizationURL returns the CurrentUserOrganizationURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserOrganizationURL() string { + if f == nil || f.CurrentUserOrganizationURL == nil { + return "" + } + return *f.CurrentUserOrganizationURL +} + +// GetCurrentUserPublicURL returns the CurrentUserPublicURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserPublicURL() string { + if f == nil || f.CurrentUserPublicURL == nil { + return "" + } + return *f.CurrentUserPublicURL +} + +// GetCurrentUserURL returns the CurrentUserURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetCurrentUserURL() string { + if f == nil || f.CurrentUserURL == nil { + return "" + } + return *f.CurrentUserURL +} + +// GetLinks returns the Links field. +func (f *Feeds) GetLinks() *FeedLinks { + if f == nil { + return nil + } + return f.Links +} + +// GetTimelineURL returns the TimelineURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetTimelineURL() string { + if f == nil || f.TimelineURL == nil { + return "" + } + return *f.TimelineURL +} + +// GetUserURL returns the UserURL field if it's non-nil, zero value otherwise. +func (f *Feeds) GetUserURL() string { + if f == nil || f.UserURL == nil { + return "" + } + return *f.UserURL +} + +// GetIdentifier returns the Identifier field if it's non-nil, zero value otherwise. +func (f *FirstPatchedVersion) GetIdentifier() string { + if f == nil || f.Identifier == nil { + return "" + } + return *f.Identifier +} + +// GetForkee returns the Forkee field. +func (f *ForkEvent) GetForkee() *Repository { + if f == nil { + return nil + } + return f.Forkee +} + +// GetInstallation returns the Installation field. +func (f *ForkEvent) GetInstallation() *Installation { + if f == nil { + return nil + } + return f.Installation +} + +// GetRepo returns the Repo field. +func (f *ForkEvent) GetRepo() *Repository { + if f == nil { + return nil + } + return f.Repo +} + +// GetSender returns the Sender field. +func (f *ForkEvent) GetSender() *User { + if f == nil { + return nil + } + return f.Sender +} + +// GetWorkFolder returns the WorkFolder field if it's non-nil, zero value otherwise. +func (g *GenerateJITConfigRequest) GetWorkFolder() string { + if g == nil || g.WorkFolder == nil { + return "" + } + return *g.WorkFolder +} + +// GetPreviousTagName returns the PreviousTagName field if it's non-nil, zero value otherwise. +func (g *GenerateNotesOptions) GetPreviousTagName() string { + if g == nil || g.PreviousTagName == nil { + return "" + } + return *g.PreviousTagName +} + +// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. +func (g *GenerateNotesOptions) GetTargetCommitish() string { + if g == nil || g.TargetCommitish == nil { + return "" + } + return *g.TargetCommitish +} + +// GetInclude returns the Include field if it's non-nil, zero value otherwise. +func (g *GetAuditLogOptions) GetInclude() string { + if g == nil || g.Include == nil { + return "" + } + return *g.Include +} + +// GetOrder returns the Order field if it's non-nil, zero value otherwise. +func (g *GetAuditLogOptions) GetOrder() string { + if g == nil || g.Order == nil { + return "" + } + return *g.Order +} + +// GetPhrase returns the Phrase field if it's non-nil, zero value otherwise. +func (g *GetAuditLogOptions) GetPhrase() string { + if g == nil || g.Phrase == nil { + return "" + } + return *g.Phrase +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (g *Gist) GetComments() int { + if g == nil || g.Comments == nil { + return 0 + } + return *g.Comments +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *Gist) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (g *Gist) GetDescription() string { + if g == nil || g.Description == nil { + return "" + } + return *g.Description +} + +// GetFiles returns the Files map if it's non-nil, an empty map otherwise. +func (g *Gist) GetFiles() map[GistFilename]GistFile { + if g == nil || g.Files == nil { + return map[GistFilename]GistFile{} + } + return g.Files +} + +// GetGitPullURL returns the GitPullURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetGitPullURL() string { + if g == nil || g.GitPullURL == nil { + return "" + } + return *g.GitPullURL +} + +// GetGitPushURL returns the GitPushURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetGitPushURL() string { + if g == nil || g.GitPushURL == nil { + return "" + } + return *g.GitPushURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (g *Gist) GetHTMLURL() string { + if g == nil || g.HTMLURL == nil { + return "" + } + return *g.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *Gist) GetID() string { + if g == nil || g.ID == nil { + return "" + } + return *g.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *Gist) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetOwner returns the Owner field. +func (g *Gist) GetOwner() *User { + if g == nil { + return nil + } + return g.Owner +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (g *Gist) GetPublic() bool { + if g == nil || g.Public == nil { + return false + } + return *g.Public +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *Gist) GetUpdatedAt() Timestamp { + if g == nil || g.UpdatedAt == nil { + return Timestamp{} + } + return *g.UpdatedAt +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (g *GistComment) GetBody() string { + if g == nil || g.Body == nil { + return "" + } + return *g.Body +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GistComment) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GistComment) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistComment) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistComment) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetChangeStatus returns the ChangeStatus field. +func (g *GistCommit) GetChangeStatus() *CommitStats { + if g == nil { + return nil + } + return g.ChangeStatus +} + +// GetCommittedAt returns the CommittedAt field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetCommittedAt() Timestamp { + if g == nil || g.CommittedAt == nil { + return Timestamp{} + } + return *g.CommittedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistCommit) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetVersion returns the Version field if it's non-nil, zero value otherwise. +func (g *GistCommit) GetVersion() string { + if g == nil || g.Version == nil { + return "" + } + return *g.Version +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (g *GistFile) GetContent() string { + if g == nil || g.Content == nil { + return "" + } + return *g.Content +} + +// GetFilename returns the Filename field if it's non-nil, zero value otherwise. +func (g *GistFile) GetFilename() string { + if g == nil || g.Filename == nil { + return "" + } + return *g.Filename +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (g *GistFile) GetLanguage() string { + if g == nil || g.Language == nil { + return "" + } + return *g.Language +} + +// GetRawURL returns the RawURL field if it's non-nil, zero value otherwise. +func (g *GistFile) GetRawURL() string { + if g == nil || g.RawURL == nil { + return "" + } + return *g.RawURL +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (g *GistFile) GetSize() int { + if g == nil || g.Size == nil { + return 0 + } + return *g.Size +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (g *GistFile) GetType() string { + if g == nil || g.Type == nil { + return "" + } + return *g.Type +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GistFork) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GistFork) GetID() string { + if g == nil || g.ID == nil { + return "" + } + return *g.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (g *GistFork) GetNodeID() string { + if g == nil || g.NodeID == nil { + return "" + } + return *g.NodeID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *GistFork) GetUpdatedAt() Timestamp { + if g == nil || g.UpdatedAt == nil { + return Timestamp{} + } + return *g.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GistFork) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetUser returns the User field. +func (g *GistFork) GetUser() *User { + if g == nil { + return nil + } + return g.User +} + +// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetPrivateGists() int { + if g == nil || g.PrivateGists == nil { + return 0 + } + return *g.PrivateGists +} + +// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetPublicGists() int { + if g == nil || g.PublicGists == nil { + return 0 + } + return *g.PublicGists +} + +// GetTotalGists returns the TotalGists field if it's non-nil, zero value otherwise. +func (g *GistStats) GetTotalGists() int { + if g == nil || g.TotalGists == nil { + return 0 + } + return *g.TotalGists +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (g *GitHubAppAuthorizationEvent) GetAction() string { + if g == nil || g.Action == nil { + return "" + } + return *g.Action +} + +// GetInstallation returns the Installation field. +func (g *GitHubAppAuthorizationEvent) GetInstallation() *Installation { + if g == nil { + return nil + } + return g.Installation +} + +// GetSender returns the Sender field. +func (g *GitHubAppAuthorizationEvent) GetSender() *User { + if g == nil { + return nil + } + return g.Sender +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (g *Gitignore) GetName() string { + if g == nil || g.Name == nil { + return "" + } + return *g.Name +} + +// GetSource returns the Source field if it's non-nil, zero value otherwise. +func (g *Gitignore) GetSource() string { + if g == nil || g.Source == nil { + return "" + } + return *g.Source +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (g *GitObject) GetSHA() string { + if g == nil || g.SHA == nil { + return "" + } + return *g.SHA +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (g *GitObject) GetType() string { + if g == nil || g.Type == nil { + return "" + } + return *g.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *GitObject) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetInstallation returns the Installation field. +func (g *GollumEvent) GetInstallation() *Installation { + if g == nil { + return nil + } + return g.Installation +} + +// GetOrg returns the Org field. +func (g *GollumEvent) GetOrg() *Organization { + if g == nil { + return nil + } + return g.Org +} + +// GetRepo returns the Repo field. +func (g *GollumEvent) GetRepo() *Repository { + if g == nil { + return nil + } + return g.Repo +} + +// GetSender returns the Sender field. +func (g *GollumEvent) GetSender() *User { + if g == nil { + return nil + } + return g.Sender +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (g *GPGEmail) GetEmail() string { + if g == nil || g.Email == nil { + return "" + } + return *g.Email +} + +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (g *GPGEmail) GetVerified() bool { + if g == nil || g.Verified == nil { + return false + } + return *g.Verified +} + +// GetCanCertify returns the CanCertify field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanCertify() bool { + if g == nil || g.CanCertify == nil { + return false + } + return *g.CanCertify +} + +// GetCanEncryptComms returns the CanEncryptComms field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanEncryptComms() bool { + if g == nil || g.CanEncryptComms == nil { + return false + } + return *g.CanEncryptComms +} + +// GetCanEncryptStorage returns the CanEncryptStorage field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanEncryptStorage() bool { + if g == nil || g.CanEncryptStorage == nil { + return false + } + return *g.CanEncryptStorage +} + +// GetCanSign returns the CanSign field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCanSign() bool { + if g == nil || g.CanSign == nil { + return false + } + return *g.CanSign +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetExpiresAt() Timestamp { + if g == nil || g.ExpiresAt == nil { + return Timestamp{} + } + return *g.ExpiresAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetKeyID() string { + if g == nil || g.KeyID == nil { + return "" + } + return *g.KeyID +} + +// GetPrimaryKeyID returns the PrimaryKeyID field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetPrimaryKeyID() int64 { + if g == nil || g.PrimaryKeyID == nil { + return 0 + } + return *g.PrimaryKeyID +} + +// GetPublicKey returns the PublicKey field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetPublicKey() string { + if g == nil || g.PublicKey == nil { + return "" + } + return *g.PublicKey +} + +// GetRawKey returns the RawKey field if it's non-nil, zero value otherwise. +func (g *GPGKey) GetRawKey() string { + if g == nil || g.RawKey == nil { + return "" + } + return *g.RawKey +} + +// GetApp returns the App field. +func (g *Grant) GetApp() *AuthorizationApp { + if g == nil { + return nil + } + return g.App +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (g *Grant) GetCreatedAt() Timestamp { + if g == nil || g.CreatedAt == nil { + return Timestamp{} + } + return *g.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (g *Grant) GetID() int64 { + if g == nil || g.ID == nil { + return 0 + } + return *g.ID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (g *Grant) GetUpdatedAt() Timestamp { + if g == nil || g.UpdatedAt == nil { + return Timestamp{} + } + return *g.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (g *Grant) GetURL() string { + if g == nil || g.URL == nil { + return "" + } + return *g.URL +} + +// GetAuthor returns the Author field. +func (h *HeadCommit) GetAuthor() *CommitAuthor { + if h == nil { + return nil + } + return h.Author +} + +// GetCommitter returns the Committer field. +func (h *HeadCommit) GetCommitter() *CommitAuthor { + if h == nil { + return nil + } + return h.Committer +} + +// GetDistinct returns the Distinct field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetDistinct() bool { + if h == nil || h.Distinct == nil { + return false + } + return *h.Distinct +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetID() string { + if h == nil || h.ID == nil { + return "" + } + return *h.ID +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetMessage() string { + if h == nil || h.Message == nil { + return "" + } + return *h.Message +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetSHA() string { + if h == nil || h.SHA == nil { + return "" + } + return *h.SHA +} + +// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetTimestamp() Timestamp { + if h == nil || h.Timestamp == nil { + return Timestamp{} + } + return *h.Timestamp +} + +// GetTreeID returns the TreeID field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetTreeID() string { + if h == nil || h.TreeID == nil { + return "" + } + return *h.TreeID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (h *HeadCommit) GetURL() string { + if h == nil || h.URL == nil { + return "" + } + return *h.URL +} + +// GetActive returns the Active field if it's non-nil, zero value otherwise. +func (h *Hook) GetActive() bool { + if h == nil || h.Active == nil { + return false + } + return *h.Active +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (h *Hook) GetCreatedAt() Timestamp { + if h == nil || h.CreatedAt == nil { + return Timestamp{} + } + return *h.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (h *Hook) GetID() int64 { + if h == nil || h.ID == nil { + return 0 + } + return *h.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (h *Hook) GetName() string { + if h == nil || h.Name == nil { + return "" + } + return *h.Name +} + +// GetPingURL returns the PingURL field if it's non-nil, zero value otherwise. +func (h *Hook) GetPingURL() string { + if h == nil || h.PingURL == nil { + return "" + } + return *h.PingURL +} + +// GetTestURL returns the TestURL field if it's non-nil, zero value otherwise. +func (h *Hook) GetTestURL() string { + if h == nil || h.TestURL == nil { + return "" + } + return *h.TestURL +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (h *Hook) GetType() string { + if h == nil || h.Type == nil { + return "" + } + return *h.Type +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (h *Hook) GetUpdatedAt() Timestamp { + if h == nil || h.UpdatedAt == nil { + return Timestamp{} + } + return *h.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (h *Hook) GetURL() string { + if h == nil || h.URL == nil { + return "" + } + return *h.URL +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (h *HookConfig) GetContentType() string { + if h == nil || h.ContentType == nil { + return "" + } + return *h.ContentType +} + +// GetInsecureSSL returns the InsecureSSL field if it's non-nil, zero value otherwise. +func (h *HookConfig) GetInsecureSSL() string { + if h == nil || h.InsecureSSL == nil { + return "" + } + return *h.InsecureSSL +} + +// GetSecret returns the Secret field if it's non-nil, zero value otherwise. +func (h *HookConfig) GetSecret() string { + if h == nil || h.Secret == nil { + return "" + } + return *h.Secret +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (h *HookConfig) GetURL() string { + if h == nil || h.URL == nil { + return "" + } + return *h.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetAction() string { + if h == nil || h.Action == nil { + return "" + } + return *h.Action +} + +// GetDeliveredAt returns the DeliveredAt field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetDeliveredAt() Timestamp { + if h == nil || h.DeliveredAt == nil { + return Timestamp{} + } + return *h.DeliveredAt +} + +// GetDuration returns the Duration field. +func (h *HookDelivery) GetDuration() *float64 { + if h == nil { + return nil + } + return h.Duration +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetEvent() string { + if h == nil || h.Event == nil { + return "" + } + return *h.Event +} + +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetGUID() string { + if h == nil || h.GUID == nil { + return "" + } + return *h.GUID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetID() int64 { + if h == nil || h.ID == nil { + return 0 + } + return *h.ID +} + +// GetInstallationID returns the InstallationID field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetInstallationID() int64 { + if h == nil || h.InstallationID == nil { + return 0 + } + return *h.InstallationID +} + +// GetRedelivery returns the Redelivery field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetRedelivery() bool { + if h == nil || h.Redelivery == nil { + return false + } + return *h.Redelivery +} + +// GetRepositoryID returns the RepositoryID field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetRepositoryID() int64 { + if h == nil || h.RepositoryID == nil { + return 0 + } + return *h.RepositoryID +} + +// GetRequest returns the Request field. +func (h *HookDelivery) GetRequest() *HookRequest { + if h == nil { + return nil + } + return h.Request +} + +// GetResponse returns the Response field. +func (h *HookDelivery) GetResponse() *HookResponse { + if h == nil { + return nil + } + return h.Response +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetStatus() string { + if h == nil || h.Status == nil { + return "" + } + return *h.Status +} + +// GetStatusCode returns the StatusCode field if it's non-nil, zero value otherwise. +func (h *HookDelivery) GetStatusCode() int { + if h == nil || h.StatusCode == nil { + return 0 + } + return *h.StatusCode +} + +// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. +func (h *HookRequest) GetHeaders() map[string]string { + if h == nil || h.Headers == nil { + return map[string]string{} + } + return h.Headers +} + +// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. +func (h *HookRequest) GetRawPayload() json.RawMessage { + if h == nil || h.RawPayload == nil { + return json.RawMessage{} + } + return *h.RawPayload +} + +// GetHeaders returns the Headers map if it's non-nil, an empty map otherwise. +func (h *HookResponse) GetHeaders() map[string]string { + if h == nil || h.Headers == nil { + return map[string]string{} + } + return h.Headers +} + +// GetRawPayload returns the RawPayload field if it's non-nil, zero value otherwise. +func (h *HookResponse) GetRawPayload() json.RawMessage { + if h == nil || h.RawPayload == nil { + return json.RawMessage{} + } + return *h.RawPayload +} + +// GetActiveHooks returns the ActiveHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetActiveHooks() int { + if h == nil || h.ActiveHooks == nil { + return 0 + } + return *h.ActiveHooks +} + +// GetInactiveHooks returns the InactiveHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetInactiveHooks() int { + if h == nil || h.InactiveHooks == nil { + return 0 + } + return *h.InactiveHooks +} + +// GetTotalHooks returns the TotalHooks field if it's non-nil, zero value otherwise. +func (h *HookStats) GetTotalHooks() int { + if h == nil || h.TotalHooks == nil { + return 0 + } + return *h.TotalHooks +} + +// GetGroupDescription returns the GroupDescription field if it's non-nil, zero value otherwise. +func (i *IDPGroup) GetGroupDescription() string { + if i == nil || i.GroupDescription == nil { + return "" + } + return *i.GroupDescription +} + +// GetGroupID returns the GroupID field if it's non-nil, zero value otherwise. +func (i *IDPGroup) GetGroupID() string { + if i == nil || i.GroupID == nil { + return "" + } + return *i.GroupID +} + +// GetGroupName returns the GroupName field if it's non-nil, zero value otherwise. +func (i *IDPGroup) GetGroupName() string { + if i == nil || i.GroupName == nil { + return "" + } + return *i.GroupName +} + +// GetAuthorsCount returns the AuthorsCount field if it's non-nil, zero value otherwise. +func (i *Import) GetAuthorsCount() int { + if i == nil || i.AuthorsCount == nil { + return 0 + } + return *i.AuthorsCount +} + +// GetAuthorsURL returns the AuthorsURL field if it's non-nil, zero value otherwise. +func (i *Import) GetAuthorsURL() string { + if i == nil || i.AuthorsURL == nil { + return "" + } + return *i.AuthorsURL +} + +// GetCommitCount returns the CommitCount field if it's non-nil, zero value otherwise. +func (i *Import) GetCommitCount() int { + if i == nil || i.CommitCount == nil { + return 0 + } + return *i.CommitCount +} + +// GetFailedStep returns the FailedStep field if it's non-nil, zero value otherwise. +func (i *Import) GetFailedStep() string { + if i == nil || i.FailedStep == nil { + return "" + } + return *i.FailedStep +} + +// GetHasLargeFiles returns the HasLargeFiles field if it's non-nil, zero value otherwise. +func (i *Import) GetHasLargeFiles() bool { + if i == nil || i.HasLargeFiles == nil { + return false + } + return *i.HasLargeFiles +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Import) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetHumanName returns the HumanName field if it's non-nil, zero value otherwise. +func (i *Import) GetHumanName() string { + if i == nil || i.HumanName == nil { + return "" + } + return *i.HumanName +} + +// GetLargeFilesCount returns the LargeFilesCount field if it's non-nil, zero value otherwise. +func (i *Import) GetLargeFilesCount() int { + if i == nil || i.LargeFilesCount == nil { + return 0 + } + return *i.LargeFilesCount +} + +// GetLargeFilesSize returns the LargeFilesSize field if it's non-nil, zero value otherwise. +func (i *Import) GetLargeFilesSize() int { + if i == nil || i.LargeFilesSize == nil { + return 0 + } + return *i.LargeFilesSize +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (i *Import) GetMessage() string { + if i == nil || i.Message == nil { + return "" + } + return *i.Message +} + +// GetPercent returns the Percent field if it's non-nil, zero value otherwise. +func (i *Import) GetPercent() int { + if i == nil || i.Percent == nil { + return 0 + } + return *i.Percent +} + +// GetPushPercent returns the PushPercent field if it's non-nil, zero value otherwise. +func (i *Import) GetPushPercent() int { + if i == nil || i.PushPercent == nil { + return 0 + } + return *i.PushPercent +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (i *Import) GetRepositoryURL() string { + if i == nil || i.RepositoryURL == nil { + return "" + } + return *i.RepositoryURL +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (i *Import) GetStatus() string { + if i == nil || i.Status == nil { + return "" + } + return *i.Status +} + +// GetStatusText returns the StatusText field if it's non-nil, zero value otherwise. +func (i *Import) GetStatusText() string { + if i == nil || i.StatusText == nil { + return "" + } + return *i.StatusText +} + +// GetTFVCProject returns the TFVCProject field if it's non-nil, zero value otherwise. +func (i *Import) GetTFVCProject() string { + if i == nil || i.TFVCProject == nil { + return "" + } + return *i.TFVCProject +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *Import) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUseLFS returns the UseLFS field if it's non-nil, zero value otherwise. +func (i *Import) GetUseLFS() string { + if i == nil || i.UseLFS == nil { + return "" + } + return *i.UseLFS +} + +// GetVCS returns the VCS field if it's non-nil, zero value otherwise. +func (i *Import) GetVCS() string { + if i == nil || i.VCS == nil { + return "" + } + return *i.VCS +} + +// GetVCSPassword returns the VCSPassword field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSPassword() string { + if i == nil || i.VCSPassword == nil { + return "" + } + return *i.VCSPassword +} + +// GetVCSURL returns the VCSURL field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSURL() string { + if i == nil || i.VCSURL == nil { + return "" + } + return *i.VCSURL +} + +// GetVCSUsername returns the VCSUsername field if it's non-nil, zero value otherwise. +func (i *Import) GetVCSUsername() string { + if i == nil || i.VCSUsername == nil { + return "" + } + return *i.VCSUsername +} + +// GetAccessTokensURL returns the AccessTokensURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetAccessTokensURL() string { + if i == nil || i.AccessTokensURL == nil { + return "" + } + return *i.AccessTokensURL +} + +// GetAccount returns the Account field. +func (i *Installation) GetAccount() *User { + if i == nil { + return nil + } + return i.Account +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (i *Installation) GetAppID() int64 { + if i == nil || i.AppID == nil { + return 0 + } + return *i.AppID +} + +// GetAppSlug returns the AppSlug field if it's non-nil, zero value otherwise. +func (i *Installation) GetAppSlug() string { + if i == nil || i.AppSlug == nil { + return "" + } + return *i.AppSlug +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Installation) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetHasMultipleSingleFiles returns the HasMultipleSingleFiles field if it's non-nil, zero value otherwise. +func (i *Installation) GetHasMultipleSingleFiles() bool { + if i == nil || i.HasMultipleSingleFiles == nil { + return false + } + return *i.HasMultipleSingleFiles +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Installation) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *Installation) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetPermissions returns the Permissions field. +func (i *Installation) GetPermissions() *InstallationPermissions { + if i == nil { + return nil + } + return i.Permissions +} + +// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. +func (i *Installation) GetRepositoriesURL() string { + if i == nil || i.RepositoriesURL == nil { + return "" + } + return *i.RepositoriesURL +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (i *Installation) GetRepositorySelection() string { + if i == nil || i.RepositorySelection == nil { + return "" + } + return *i.RepositorySelection +} + +// GetSingleFileName returns the SingleFileName field if it's non-nil, zero value otherwise. +func (i *Installation) GetSingleFileName() string { + if i == nil || i.SingleFileName == nil { + return "" + } + return *i.SingleFileName +} + +// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. +func (i *Installation) GetSuspendedAt() Timestamp { + if i == nil || i.SuspendedAt == nil { + return Timestamp{} + } + return *i.SuspendedAt +} + +// GetSuspendedBy returns the SuspendedBy field. +func (i *Installation) GetSuspendedBy() *User { + if i == nil { + return nil + } + return i.SuspendedBy +} + +// GetTargetID returns the TargetID field if it's non-nil, zero value otherwise. +func (i *Installation) GetTargetID() int64 { + if i == nil || i.TargetID == nil { + return 0 + } + return *i.TargetID +} + +// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. +func (i *Installation) GetTargetType() string { + if i == nil || i.TargetType == nil { + return "" + } + return *i.TargetType +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *Installation) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetLogin returns the Login field. +func (i *InstallationChanges) GetLogin() *InstallationLoginChange { + if i == nil { + return nil + } + return i.Login +} + +// GetSlug returns the Slug field. +func (i *InstallationChanges) GetSlug() *InstallationSlugChange { + if i == nil { + return nil + } + return i.Slug +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetInstallation returns the Installation field. +func (i *InstallationEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetOrg returns the Org field. +func (i *InstallationEvent) GetOrg() *Organization { + if i == nil { + return nil + } + return i.Org +} + +// GetRequester returns the Requester field. +func (i *InstallationEvent) GetRequester() *User { + if i == nil { + return nil + } + return i.Requester +} + +// GetSender returns the Sender field. +func (i *InstallationEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (i *InstallationLoginChange) GetFrom() string { + if i == nil || i.From == nil { + return "" + } + return *i.From +} + +// GetActions returns the Actions field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetActions() string { + if i == nil || i.Actions == nil { + return "" + } + return *i.Actions +} + +// GetAdministration returns the Administration field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetAdministration() string { + if i == nil || i.Administration == nil { + return "" + } + return *i.Administration +} + +// GetBlocking returns the Blocking field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetBlocking() string { + if i == nil || i.Blocking == nil { + return "" + } + return *i.Blocking +} + +// GetChecks returns the Checks field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetChecks() string { + if i == nil || i.Checks == nil { + return "" + } + return *i.Checks +} + +// GetContentReferences returns the ContentReferences field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetContentReferences() string { + if i == nil || i.ContentReferences == nil { + return "" + } + return *i.ContentReferences +} + +// GetContents returns the Contents field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetContents() string { + if i == nil || i.Contents == nil { + return "" + } + return *i.Contents +} + +// GetDeployments returns the Deployments field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetDeployments() string { + if i == nil || i.Deployments == nil { + return "" + } + return *i.Deployments +} + +// GetEmails returns the Emails field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetEmails() string { + if i == nil || i.Emails == nil { + return "" + } + return *i.Emails +} + +// GetEnvironments returns the Environments field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetEnvironments() string { + if i == nil || i.Environments == nil { + return "" + } + return *i.Environments +} + +// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetFollowers() string { + if i == nil || i.Followers == nil { + return "" + } + return *i.Followers +} + +// GetIssues returns the Issues field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetIssues() string { + if i == nil || i.Issues == nil { + return "" + } + return *i.Issues +} + +// GetMembers returns the Members field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetMembers() string { + if i == nil || i.Members == nil { + return "" + } + return *i.Members +} + +// GetMetadata returns the Metadata field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetMetadata() string { + if i == nil || i.Metadata == nil { + return "" + } + return *i.Metadata +} + +// GetOrganizationAdministration returns the OrganizationAdministration field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationAdministration() string { + if i == nil || i.OrganizationAdministration == nil { + return "" + } + return *i.OrganizationAdministration +} + +// GetOrganizationCustomRoles returns the OrganizationCustomRoles field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationCustomRoles() string { + if i == nil || i.OrganizationCustomRoles == nil { + return "" + } + return *i.OrganizationCustomRoles +} + +// GetOrganizationHooks returns the OrganizationHooks field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationHooks() string { + if i == nil || i.OrganizationHooks == nil { + return "" + } + return *i.OrganizationHooks +} + +// GetOrganizationPackages returns the OrganizationPackages field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationPackages() string { + if i == nil || i.OrganizationPackages == nil { + return "" + } + return *i.OrganizationPackages +} + +// GetOrganizationPlan returns the OrganizationPlan field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationPlan() string { + if i == nil || i.OrganizationPlan == nil { + return "" + } + return *i.OrganizationPlan +} + +// GetOrganizationPreReceiveHooks returns the OrganizationPreReceiveHooks field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationPreReceiveHooks() string { + if i == nil || i.OrganizationPreReceiveHooks == nil { + return "" + } + return *i.OrganizationPreReceiveHooks +} + +// GetOrganizationProjects returns the OrganizationProjects field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationProjects() string { + if i == nil || i.OrganizationProjects == nil { + return "" + } + return *i.OrganizationProjects +} + +// GetOrganizationSecrets returns the OrganizationSecrets field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationSecrets() string { + if i == nil || i.OrganizationSecrets == nil { + return "" + } + return *i.OrganizationSecrets +} + +// GetOrganizationSelfHostedRunners returns the OrganizationSelfHostedRunners field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationSelfHostedRunners() string { + if i == nil || i.OrganizationSelfHostedRunners == nil { + return "" + } + return *i.OrganizationSelfHostedRunners +} + +// GetOrganizationUserBlocking returns the OrganizationUserBlocking field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetOrganizationUserBlocking() string { + if i == nil || i.OrganizationUserBlocking == nil { + return "" + } + return *i.OrganizationUserBlocking +} + +// GetPackages returns the Packages field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetPackages() string { + if i == nil || i.Packages == nil { + return "" + } + return *i.Packages +} + +// GetPages returns the Pages field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetPages() string { + if i == nil || i.Pages == nil { + return "" + } + return *i.Pages +} + +// GetPullRequests returns the PullRequests field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetPullRequests() string { + if i == nil || i.PullRequests == nil { + return "" + } + return *i.PullRequests +} + +// GetRepositoryHooks returns the RepositoryHooks field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetRepositoryHooks() string { + if i == nil || i.RepositoryHooks == nil { + return "" + } + return *i.RepositoryHooks +} + +// GetRepositoryPreReceiveHooks returns the RepositoryPreReceiveHooks field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetRepositoryPreReceiveHooks() string { + if i == nil || i.RepositoryPreReceiveHooks == nil { + return "" + } + return *i.RepositoryPreReceiveHooks +} + +// GetRepositoryProjects returns the RepositoryProjects field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetRepositoryProjects() string { + if i == nil || i.RepositoryProjects == nil { + return "" + } + return *i.RepositoryProjects +} + +// GetSecrets returns the Secrets field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetSecrets() string { + if i == nil || i.Secrets == nil { + return "" + } + return *i.Secrets +} + +// GetSecretScanningAlerts returns the SecretScanningAlerts field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetSecretScanningAlerts() string { + if i == nil || i.SecretScanningAlerts == nil { + return "" + } + return *i.SecretScanningAlerts +} + +// GetSecurityEvents returns the SecurityEvents field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetSecurityEvents() string { + if i == nil || i.SecurityEvents == nil { + return "" + } + return *i.SecurityEvents +} + +// GetSingleFile returns the SingleFile field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetSingleFile() string { + if i == nil || i.SingleFile == nil { + return "" + } + return *i.SingleFile +} + +// GetStatuses returns the Statuses field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetStatuses() string { + if i == nil || i.Statuses == nil { + return "" + } + return *i.Statuses +} + +// GetTeamDiscussions returns the TeamDiscussions field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetTeamDiscussions() string { + if i == nil || i.TeamDiscussions == nil { + return "" + } + return *i.TeamDiscussions +} + +// GetVulnerabilityAlerts returns the VulnerabilityAlerts field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetVulnerabilityAlerts() string { + if i == nil || i.VulnerabilityAlerts == nil { + return "" + } + return *i.VulnerabilityAlerts +} + +// GetWorkflows returns the Workflows field if it's non-nil, zero value otherwise. +func (i *InstallationPermissions) GetWorkflows() string { + if i == nil || i.Workflows == nil { + return "" + } + return *i.Workflows +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationRepositoriesEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetInstallation returns the Installation field. +func (i *InstallationRepositoriesEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetOrg returns the Org field. +func (i *InstallationRepositoriesEvent) GetOrg() *Organization { + if i == nil { + return nil + } + return i.Org +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (i *InstallationRepositoriesEvent) GetRepositorySelection() string { + if i == nil || i.RepositorySelection == nil { + return "" + } + return *i.RepositorySelection +} + +// GetSender returns the Sender field. +func (i *InstallationRepositoriesEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetAccount returns the Account field. +func (i *InstallationRequest) GetAccount() *User { + if i == nil { + return nil + } + return i.Account +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *InstallationRequest) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *InstallationRequest) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *InstallationRequest) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetRequester returns the Requester field. +func (i *InstallationRequest) GetRequester() *User { + if i == nil { + return nil + } + return i.Requester +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (i *InstallationSlugChange) GetFrom() string { + if i == nil || i.From == nil { + return "" + } + return *i.From +} + +// GetAccount returns the Account field. +func (i *InstallationTargetEvent) GetAccount() *User { + if i == nil { + return nil + } + return i.Account +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *InstallationTargetEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetChanges returns the Changes field. +func (i *InstallationTargetEvent) GetChanges() *InstallationChanges { + if i == nil { + return nil + } + return i.Changes +} + +// GetEnterprise returns the Enterprise field. +func (i *InstallationTargetEvent) GetEnterprise() *Enterprise { + if i == nil { + return nil + } + return i.Enterprise +} + +// GetInstallation returns the Installation field. +func (i *InstallationTargetEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetOrganization returns the Organization field. +func (i *InstallationTargetEvent) GetOrganization() *Organization { + if i == nil { + return nil + } + return i.Organization +} + +// GetRepository returns the Repository field. +func (i *InstallationTargetEvent) GetRepository() *Repository { + if i == nil { + return nil + } + return i.Repository +} + +// GetSender returns the Sender field. +func (i *InstallationTargetEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetTargetType returns the TargetType field if it's non-nil, zero value otherwise. +func (i *InstallationTargetEvent) GetTargetType() string { + if i == nil || i.TargetType == nil { + return "" + } + return *i.TargetType +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetExpiresAt() Timestamp { + if i == nil || i.ExpiresAt == nil { + return Timestamp{} + } + return *i.ExpiresAt +} + +// GetPermissions returns the Permissions field. +func (i *InstallationToken) GetPermissions() *InstallationPermissions { + if i == nil { + return nil + } + return i.Permissions +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (i *InstallationToken) GetToken() string { + if i == nil || i.Token == nil { + return "" + } + return *i.Token +} + +// GetPermissions returns the Permissions field. +func (i *InstallationTokenOptions) GetPermissions() *InstallationPermissions { + if i == nil { + return nil + } + return i.Permissions +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetExpiresAt() Timestamp { + if i == nil || i.ExpiresAt == nil { + return Timestamp{} + } + return *i.ExpiresAt +} + +// GetLimit returns the Limit field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetLimit() string { + if i == nil || i.Limit == nil { + return "" + } + return *i.Limit +} + +// GetOrigin returns the Origin field if it's non-nil, zero value otherwise. +func (i *InteractionRestriction) GetOrigin() string { + if i == nil || i.Origin == nil { + return "" + } + return *i.Origin +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Invitation) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (i *Invitation) GetEmail() string { + if i == nil || i.Email == nil { + return "" + } + return *i.Email +} + +// GetFailedAt returns the FailedAt field if it's non-nil, zero value otherwise. +func (i *Invitation) GetFailedAt() Timestamp { + if i == nil || i.FailedAt == nil { + return Timestamp{} + } + return *i.FailedAt +} + +// GetFailedReason returns the FailedReason field if it's non-nil, zero value otherwise. +func (i *Invitation) GetFailedReason() string { + if i == nil || i.FailedReason == nil { + return "" + } + return *i.FailedReason +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Invitation) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetInvitationTeamURL returns the InvitationTeamURL field if it's non-nil, zero value otherwise. +func (i *Invitation) GetInvitationTeamURL() string { + if i == nil || i.InvitationTeamURL == nil { + return "" + } + return *i.InvitationTeamURL +} + +// GetInviter returns the Inviter field. +func (i *Invitation) GetInviter() *User { + if i == nil { + return nil + } + return i.Inviter +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (i *Invitation) GetLogin() string { + if i == nil || i.Login == nil { + return "" + } + return *i.Login +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *Invitation) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (i *Invitation) GetRole() string { + if i == nil || i.Role == nil { + return "" + } + return *i.Role +} + +// GetTeamCount returns the TeamCount field if it's non-nil, zero value otherwise. +func (i *Invitation) GetTeamCount() int { + if i == nil || i.TeamCount == nil { + return 0 + } + return *i.TeamCount +} + +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (i *Issue) GetActiveLockReason() string { + if i == nil || i.ActiveLockReason == nil { + return "" + } + return *i.ActiveLockReason +} + +// GetAssignee returns the Assignee field. +func (i *Issue) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (i *Issue) GetAuthorAssociation() string { + if i == nil || i.AuthorAssociation == nil { + return "" + } + return *i.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *Issue) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetClosedAt() Timestamp { + if i == nil || i.ClosedAt == nil { + return Timestamp{} + } + return *i.ClosedAt +} + +// GetClosedBy returns the ClosedBy field. +func (i *Issue) GetClosedBy() *User { + if i == nil { + return nil + } + return i.ClosedBy +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (i *Issue) GetComments() int { + if i == nil || i.Comments == nil { + return 0 + } + return *i.Comments +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetCommentsURL() string { + if i == nil || i.CommentsURL == nil { + return "" + } + return *i.CommentsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetEventsURL() string { + if i == nil || i.EventsURL == nil { + return "" + } + return *i.EventsURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *Issue) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetLabelsURL() string { + if i == nil || i.LabelsURL == nil { + return "" + } + return *i.LabelsURL +} + +// GetLocked returns the Locked field if it's non-nil, zero value otherwise. +func (i *Issue) GetLocked() bool { + if i == nil || i.Locked == nil { + return false + } + return *i.Locked +} + +// GetMilestone returns the Milestone field. +func (i *Issue) GetMilestone() *Milestone { + if i == nil { + return nil + } + return i.Milestone +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *Issue) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (i *Issue) GetNumber() int { + if i == nil || i.Number == nil { + return 0 + } + return *i.Number +} + +// GetPullRequestLinks returns the PullRequestLinks field. +func (i *Issue) GetPullRequestLinks() *PullRequestLinks { + if i == nil { + return nil + } + return i.PullRequestLinks +} + +// GetReactions returns the Reactions field. +func (i *Issue) GetReactions() *Reactions { + if i == nil { + return nil + } + return i.Reactions +} + +// GetRepository returns the Repository field. +func (i *Issue) GetRepository() *Repository { + if i == nil { + return nil + } + return i.Repository +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (i *Issue) GetRepositoryURL() string { + if i == nil || i.RepositoryURL == nil { + return "" + } + return *i.RepositoryURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (i *Issue) GetState() string { + if i == nil || i.State == nil { + return "" + } + return *i.State +} + +// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. +func (i *Issue) GetStateReason() string { + if i == nil || i.StateReason == nil { + return "" + } + return *i.StateReason +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (i *Issue) GetTitle() string { + if i == nil || i.Title == nil { + return "" + } + return *i.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *Issue) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *Issue) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUser returns the User field. +func (i *Issue) GetUser() *User { + if i == nil { + return nil + } + return i.User +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetAuthorAssociation() string { + if i == nil || i.AuthorAssociation == nil { + return "" + } + return *i.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetHTMLURL() string { + if i == nil || i.HTMLURL == nil { + return "" + } + return *i.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetIssueURL() string { + if i == nil || i.IssueURL == nil { + return "" + } + return *i.IssueURL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetNodeID() string { + if i == nil || i.NodeID == nil { + return "" + } + return *i.NodeID +} + +// GetReactions returns the Reactions field. +func (i *IssueComment) GetReactions() *Reactions { + if i == nil { + return nil + } + return i.Reactions +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *IssueComment) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetUser returns the User field. +func (i *IssueComment) GetUser() *User { + if i == nil { + return nil + } + return i.User +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *IssueCommentEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetChanges returns the Changes field. +func (i *IssueCommentEvent) GetChanges() *EditChange { + if i == nil { + return nil + } + return i.Changes +} + +// GetComment returns the Comment field. +func (i *IssueCommentEvent) GetComment() *IssueComment { + if i == nil { + return nil + } + return i.Comment +} + +// GetInstallation returns the Installation field. +func (i *IssueCommentEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetIssue returns the Issue field. +func (i *IssueCommentEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetOrganization returns the Organization field. +func (i *IssueCommentEvent) GetOrganization() *Organization { + if i == nil { + return nil + } + return i.Organization +} + +// GetRepo returns the Repo field. +func (i *IssueCommentEvent) GetRepo() *Repository { + if i == nil { + return nil + } + return i.Repo +} + +// GetSender returns the Sender field. +func (i *IssueCommentEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetActor returns the Actor field. +func (i *IssueEvent) GetActor() *User { + if i == nil { + return nil + } + return i.Actor +} + +// GetAssignee returns the Assignee field. +func (i *IssueEvent) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetAssigner returns the Assigner field. +func (i *IssueEvent) GetAssigner() *User { + if i == nil { + return nil + } + return i.Assigner +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetCommitID() string { + if i == nil || i.CommitID == nil { + return "" + } + return *i.CommitID +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetDismissedReview returns the DismissedReview field. +func (i *IssueEvent) GetDismissedReview() *DismissedReview { + if i == nil { + return nil + } + return i.DismissedReview +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetEvent() string { + if i == nil || i.Event == nil { + return "" + } + return *i.Event +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetID() int64 { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetIssue returns the Issue field. +func (i *IssueEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetLabel returns the Label field. +func (i *IssueEvent) GetLabel() *Label { + if i == nil { + return nil + } + return i.Label +} + +// GetLockReason returns the LockReason field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetLockReason() string { + if i == nil || i.LockReason == nil { + return "" + } + return *i.LockReason +} + +// GetMilestone returns the Milestone field. +func (i *IssueEvent) GetMilestone() *Milestone { + if i == nil { + return nil + } + return i.Milestone +} + +// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. +func (i *IssueEvent) GetPerformedViaGithubApp() *App { + if i == nil { + return nil + } + return i.PerformedViaGithubApp +} + +// GetProjectCard returns the ProjectCard field. +func (i *IssueEvent) GetProjectCard() *ProjectCard { + if i == nil { + return nil + } + return i.ProjectCard +} + +// GetRename returns the Rename field. +func (i *IssueEvent) GetRename() *Rename { + if i == nil { + return nil + } + return i.Rename +} + +// GetRequestedReviewer returns the RequestedReviewer field. +func (i *IssueEvent) GetRequestedReviewer() *User { + if i == nil { + return nil + } + return i.RequestedReviewer +} + +// GetRequestedTeam returns the RequestedTeam field. +func (i *IssueEvent) GetRequestedTeam() *Team { + if i == nil { + return nil + } + return i.RequestedTeam +} + +// GetReviewRequester returns the ReviewRequester field. +func (i *IssueEvent) GetReviewRequester() *User { + if i == nil { + return nil + } + return i.ReviewRequester +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetAssignee() string { + if i == nil || i.Assignee == nil { + return "" + } + return *i.Assignee +} + +// GetClosed returns the Closed field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetClosed() bool { + if i == nil || i.Closed == nil { + return false + } + return *i.Closed +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetClosedAt() Timestamp { + if i == nil || i.ClosedAt == nil { + return Timestamp{} + } + return *i.ClosedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetMilestone() int { + if i == nil || i.Milestone == nil { + return 0 + } + return *i.Milestone +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *IssueImport) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetCode returns the Code field if it's non-nil, zero value otherwise. +func (i *IssueImportError) GetCode() string { + if i == nil || i.Code == nil { + return "" + } + return *i.Code +} + +// GetField returns the Field field if it's non-nil, zero value otherwise. +func (i *IssueImportError) GetField() string { + if i == nil || i.Field == nil { + return "" + } + return *i.Field +} + +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (i *IssueImportError) GetLocation() string { + if i == nil || i.Location == nil { + return "" + } + return *i.Location +} + +// GetResource returns the Resource field if it's non-nil, zero value otherwise. +func (i *IssueImportError) GetResource() string { + if i == nil || i.Resource == nil { + return "" + } + return *i.Resource +} + +// GetValue returns the Value field if it's non-nil, zero value otherwise. +func (i *IssueImportError) GetValue() string { + if i == nil || i.Value == nil { + return "" + } + return *i.Value +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetCreatedAt() Timestamp { + if i == nil || i.CreatedAt == nil { + return Timestamp{} + } + return *i.CreatedAt +} + +// GetDocumentationURL returns the DocumentationURL field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetDocumentationURL() string { + if i == nil || i.DocumentationURL == nil { + return "" + } + return *i.DocumentationURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetID() int { + if i == nil || i.ID == nil { + return 0 + } + return *i.ID +} + +// GetImportIssuesURL returns the ImportIssuesURL field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetImportIssuesURL() string { + if i == nil || i.ImportIssuesURL == nil { + return "" + } + return *i.ImportIssuesURL +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetMessage() string { + if i == nil || i.Message == nil { + return "" + } + return *i.Message +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetRepositoryURL() string { + if i == nil || i.RepositoryURL == nil { + return "" + } + return *i.RepositoryURL +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetStatus() string { + if i == nil || i.Status == nil { + return "" + } + return *i.Status +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetUpdatedAt() Timestamp { + if i == nil || i.UpdatedAt == nil { + return Timestamp{} + } + return *i.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (i *IssueImportResponse) GetURL() string { + if i == nil || i.URL == nil { + return "" + } + return *i.URL +} + +// GetDirection returns the Direction field if it's non-nil, zero value otherwise. +func (i *IssueListCommentsOptions) GetDirection() string { + if i == nil || i.Direction == nil { + return "" + } + return *i.Direction +} + +// GetSince returns the Since field if it's non-nil, zero value otherwise. +func (i *IssueListCommentsOptions) GetSince() time.Time { + if i == nil || i.Since == nil { + return time.Time{} + } + return *i.Since +} + +// GetSort returns the Sort field if it's non-nil, zero value otherwise. +func (i *IssueListCommentsOptions) GetSort() string { + if i == nil || i.Sort == nil { + return "" + } + return *i.Sort +} + +// GetAssignee returns the Assignee field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetAssignee() string { + if i == nil || i.Assignee == nil { + return "" + } + return *i.Assignee +} + +// GetAssignees returns the Assignees field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetAssignees() []string { + if i == nil || i.Assignees == nil { + return nil + } + return *i.Assignees +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetBody() string { + if i == nil || i.Body == nil { + return "" + } + return *i.Body +} + +// GetLabels returns the Labels field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetLabels() []string { + if i == nil || i.Labels == nil { + return nil + } + return *i.Labels +} + +// GetMilestone returns the Milestone field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetMilestone() int { + if i == nil || i.Milestone == nil { + return 0 + } + return *i.Milestone +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetState() string { + if i == nil || i.State == nil { + return "" + } + return *i.State +} + +// GetStateReason returns the StateReason field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetStateReason() string { + if i == nil || i.StateReason == nil { + return "" + } + return *i.StateReason +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (i *IssueRequest) GetTitle() string { + if i == nil || i.Title == nil { + return "" + } + return *i.Title +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (i *IssuesEvent) GetAction() string { + if i == nil || i.Action == nil { + return "" + } + return *i.Action +} + +// GetAssignee returns the Assignee field. +func (i *IssuesEvent) GetAssignee() *User { + if i == nil { + return nil + } + return i.Assignee +} + +// GetChanges returns the Changes field. +func (i *IssuesEvent) GetChanges() *EditChange { + if i == nil { + return nil + } + return i.Changes +} + +// GetInstallation returns the Installation field. +func (i *IssuesEvent) GetInstallation() *Installation { + if i == nil { + return nil + } + return i.Installation +} + +// GetIssue returns the Issue field. +func (i *IssuesEvent) GetIssue() *Issue { + if i == nil { + return nil + } + return i.Issue +} + +// GetLabel returns the Label field. +func (i *IssuesEvent) GetLabel() *Label { + if i == nil { + return nil + } + return i.Label +} + +// GetMilestone returns the Milestone field. +func (i *IssuesEvent) GetMilestone() *Milestone { + if i == nil { + return nil + } + return i.Milestone +} + +// GetOrg returns the Org field. +func (i *IssuesEvent) GetOrg() *Organization { + if i == nil { + return nil + } + return i.Org +} + +// GetRepo returns the Repo field. +func (i *IssuesEvent) GetRepo() *Repository { + if i == nil { + return nil + } + return i.Repo +} + +// GetSender returns the Sender field. +func (i *IssuesEvent) GetSender() *User { + if i == nil { + return nil + } + return i.Sender +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (i *IssuesSearchResult) GetIncompleteResults() bool { + if i == nil || i.IncompleteResults == nil { + return false + } + return *i.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (i *IssuesSearchResult) GetTotal() int { + if i == nil || i.Total == nil { + return 0 + } + return *i.Total +} + +// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetClosedIssues() int { + if i == nil || i.ClosedIssues == nil { + return 0 + } + return *i.ClosedIssues +} + +// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetOpenIssues() int { + if i == nil || i.OpenIssues == nil { + return 0 + } + return *i.OpenIssues +} + +// GetTotalIssues returns the TotalIssues field if it's non-nil, zero value otherwise. +func (i *IssueStats) GetTotalIssues() int { + if i == nil || i.TotalIssues == nil { + return 0 + } + return *i.TotalIssues +} + +// GetEncodedJITConfig returns the EncodedJITConfig field if it's non-nil, zero value otherwise. +func (j *JITRunnerConfig) GetEncodedJITConfig() string { + if j == nil || j.EncodedJITConfig == nil { + return "" + } + return *j.EncodedJITConfig +} + +// GetRunner returns the Runner field. +func (j *JITRunnerConfig) GetRunner() *Runner { + if j == nil { + return nil + } + return j.Runner +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (j *Jobs) GetTotalCount() int { + if j == nil || j.TotalCount == nil { + return 0 + } + return *j.TotalCount +} + +// GetAddedBy returns the AddedBy field if it's non-nil, zero value otherwise. +func (k *Key) GetAddedBy() string { + if k == nil || k.AddedBy == nil { + return "" + } + return *k.AddedBy +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (k *Key) GetCreatedAt() Timestamp { + if k == nil || k.CreatedAt == nil { + return Timestamp{} + } + return *k.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (k *Key) GetID() int64 { + if k == nil || k.ID == nil { + return 0 + } + return *k.ID +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (k *Key) GetKey() string { + if k == nil || k.Key == nil { + return "" + } + return *k.Key +} + +// GetLastUsed returns the LastUsed field if it's non-nil, zero value otherwise. +func (k *Key) GetLastUsed() Timestamp { + if k == nil || k.LastUsed == nil { + return Timestamp{} + } + return *k.LastUsed +} + +// GetReadOnly returns the ReadOnly field if it's non-nil, zero value otherwise. +func (k *Key) GetReadOnly() bool { + if k == nil || k.ReadOnly == nil { + return false + } + return *k.ReadOnly +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (k *Key) GetTitle() string { + if k == nil || k.Title == nil { + return "" + } + return *k.Title +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (k *Key) GetURL() string { + if k == nil || k.URL == nil { + return "" + } + return *k.URL +} + +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (k *Key) GetVerified() bool { + if k == nil || k.Verified == nil { + return false + } + return *k.Verified +} + +// GetColor returns the Color field if it's non-nil, zero value otherwise. +func (l *Label) GetColor() string { + if l == nil || l.Color == nil { + return "" + } + return *l.Color +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (l *Label) GetDefault() bool { + if l == nil || l.Default == nil { + return false + } + return *l.Default +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (l *Label) GetDescription() string { + if l == nil || l.Description == nil { + return "" + } + return *l.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (l *Label) GetID() int64 { + if l == nil || l.ID == nil { + return 0 + } + return *l.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *Label) GetName() string { + if l == nil || l.Name == nil { + return "" + } + return *l.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (l *Label) GetNodeID() string { + if l == nil || l.NodeID == nil { + return "" + } + return *l.NodeID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *Label) GetURL() string { + if l == nil || l.URL == nil { + return "" + } + return *l.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (l *LabelEvent) GetAction() string { + if l == nil || l.Action == nil { + return "" + } + return *l.Action +} + +// GetChanges returns the Changes field. +func (l *LabelEvent) GetChanges() *EditChange { + if l == nil { + return nil + } + return l.Changes +} + +// GetInstallation returns the Installation field. +func (l *LabelEvent) GetInstallation() *Installation { + if l == nil { + return nil + } + return l.Installation +} + +// GetLabel returns the Label field. +func (l *LabelEvent) GetLabel() *Label { + if l == nil { + return nil + } + return l.Label +} + +// GetOrg returns the Org field. +func (l *LabelEvent) GetOrg() *Organization { + if l == nil { + return nil + } + return l.Org +} + +// GetRepo returns the Repo field. +func (l *LabelEvent) GetRepo() *Repository { + if l == nil { + return nil + } + return l.Repo +} + +// GetSender returns the Sender field. +func (l *LabelEvent) GetSender() *User { + if l == nil { + return nil + } + return l.Sender +} + +// GetColor returns the Color field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetColor() string { + if l == nil || l.Color == nil { + return "" + } + return *l.Color +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetDefault() bool { + if l == nil || l.Default == nil { + return false + } + return *l.Default +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetDescription() string { + if l == nil || l.Description == nil { + return "" + } + return *l.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetID() int64 { + if l == nil || l.ID == nil { + return 0 + } + return *l.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetName() string { + if l == nil || l.Name == nil { + return "" + } + return *l.Name +} + +// GetScore returns the Score field. +func (l *LabelResult) GetScore() *float64 { + if l == nil { + return nil + } + return l.Score +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *LabelResult) GetURL() string { + if l == nil || l.URL == nil { + return "" + } + return *l.URL +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (l *LabelsSearchResult) GetIncompleteResults() bool { + if l == nil || l.IncompleteResults == nil { + return false + } + return *l.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *LabelsSearchResult) GetTotal() int { + if l == nil || l.Total == nil { + return 0 + } + return *l.Total +} + +// GetOID returns the OID field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetOID() string { + if l == nil || l.OID == nil { + return "" + } + return *l.OID +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetPath() string { + if l == nil || l.Path == nil { + return "" + } + return *l.Path +} + +// GetRefName returns the RefName field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetRefName() string { + if l == nil || l.RefName == nil { + return "" + } + return *l.RefName +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (l *LargeFile) GetSize() int { + if l == nil || l.Size == nil { + return 0 + } + return *l.Size +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (l *License) GetBody() string { + if l == nil || l.Body == nil { + return "" + } + return *l.Body +} + +// GetConditions returns the Conditions field if it's non-nil, zero value otherwise. +func (l *License) GetConditions() []string { + if l == nil || l.Conditions == nil { + return nil + } + return *l.Conditions +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (l *License) GetDescription() string { + if l == nil || l.Description == nil { + return "" + } + return *l.Description +} + +// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. +func (l *License) GetFeatured() bool { + if l == nil || l.Featured == nil { + return false + } + return *l.Featured +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (l *License) GetHTMLURL() string { + if l == nil || l.HTMLURL == nil { + return "" + } + return *l.HTMLURL +} + +// GetImplementation returns the Implementation field if it's non-nil, zero value otherwise. +func (l *License) GetImplementation() string { + if l == nil || l.Implementation == nil { + return "" + } + return *l.Implementation +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (l *License) GetKey() string { + if l == nil || l.Key == nil { + return "" + } + return *l.Key +} + +// GetLimitations returns the Limitations field if it's non-nil, zero value otherwise. +func (l *License) GetLimitations() []string { + if l == nil || l.Limitations == nil { + return nil + } + return *l.Limitations +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (l *License) GetName() string { + if l == nil || l.Name == nil { + return "" + } + return *l.Name +} + +// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. +func (l *License) GetPermissions() []string { + if l == nil || l.Permissions == nil { + return nil + } + return *l.Permissions +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (l *License) GetSPDXID() string { + if l == nil || l.SPDXID == nil { + return "" + } + return *l.SPDXID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (l *License) GetURL() string { + if l == nil || l.URL == nil { + return "" + } + return *l.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (l *LinearHistoryRequirementEnforcementLevelChanges) GetFrom() string { + if l == nil || l.From == nil { + return "" + } + return *l.From +} + +// GetDirection returns the Direction field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetDirection() string { + if l == nil || l.Direction == nil { + return "" + } + return *l.Direction +} + +// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetEcosystem() string { + if l == nil || l.Ecosystem == nil { + return "" + } + return *l.Ecosystem +} + +// GetPackage returns the Package field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetPackage() string { + if l == nil || l.Package == nil { + return "" + } + return *l.Package +} + +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetScope() string { + if l == nil || l.Scope == nil { + return "" + } + return *l.Scope +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetSeverity() string { + if l == nil || l.Severity == nil { + return "" + } + return *l.Severity +} + +// GetSort returns the Sort field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetSort() string { + if l == nil || l.Sort == nil { + return "" + } + return *l.Sort +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (l *ListAlertsOptions) GetState() string { + if l == nil || l.State == nil { + return "" + } + return *l.State +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetAppID() int64 { + if l == nil || l.AppID == nil { + return 0 + } + return *l.AppID +} + +// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetCheckName() string { + if l == nil || l.CheckName == nil { + return "" + } + return *l.CheckName +} + +// GetFilter returns the Filter field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetFilter() string { + if l == nil || l.Filter == nil { + return "" + } + return *l.Filter +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsOptions) GetStatus() string { + if l == nil || l.Status == nil { + return "" + } + return *l.Status +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *ListCheckRunsResults) GetTotal() int { + if l == nil || l.Total == nil { + return 0 + } + return *l.Total +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteOptions) GetAppID() int { + if l == nil || l.AppID == nil { + return 0 + } + return *l.AppID +} + +// GetCheckName returns the CheckName field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteOptions) GetCheckName() string { + if l == nil || l.CheckName == nil { + return "" + } + return *l.CheckName +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (l *ListCheckSuiteResults) GetTotal() int { + if l == nil || l.Total == nil { + return 0 + } + return *l.Total +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (l *ListCodespaces) GetTotalCount() int { + if l == nil || l.TotalCount == nil { + return 0 + } + return *l.TotalCount +} + +// GetAffiliation returns the Affiliation field if it's non-nil, zero value otherwise. +func (l *ListCollaboratorOptions) GetAffiliation() string { + if l == nil || l.Affiliation == nil { + return "" + } + return *l.Affiliation +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (l *ListExternalGroupsOptions) GetDisplayName() string { + if l == nil || l.DisplayName == nil { + return "" + } + return *l.DisplayName +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (l *ListOrganizations) GetTotalCount() int { + if l == nil || l.TotalCount == nil { + return 0 + } + return *l.TotalCount +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (l *ListRepositories) GetTotalCount() int { + if l == nil || l.TotalCount == nil { + return 0 + } + return *l.TotalCount +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (l *ListSCIMProvisionedIdentitiesOptions) GetCount() int { + if l == nil || l.Count == nil { + return 0 + } + return *l.Count +} + +// GetFilter returns the Filter field if it's non-nil, zero value otherwise. +func (l *ListSCIMProvisionedIdentitiesOptions) GetFilter() string { + if l == nil || l.Filter == nil { + return "" + } + return *l.Filter +} + +// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. +func (l *ListSCIMProvisionedIdentitiesOptions) GetStartIndex() int { + if l == nil || l.StartIndex == nil { + return 0 + } + return *l.StartIndex +} + +// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. +func (l *Location) GetEndColumn() int { + if l == nil || l.EndColumn == nil { + return 0 + } + return *l.EndColumn +} + +// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. +func (l *Location) GetEndLine() int { + if l == nil || l.EndLine == nil { + return 0 + } + return *l.EndLine +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (l *Location) GetPath() string { + if l == nil || l.Path == nil { + return "" + } + return *l.Path +} + +// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. +func (l *Location) GetStartColumn() int { + if l == nil || l.StartColumn == nil { + return 0 + } + return *l.StartColumn +} + +// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. +func (l *Location) GetStartLine() int { + if l == nil || l.StartLine == nil { + return 0 + } + return *l.StartLine +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (l *LockBranch) GetEnabled() bool { + if l == nil || l.Enabled == nil { + return false + } + return *l.Enabled +} + +// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetEffectiveDate() Timestamp { + if m == nil || m.EffectiveDate == nil { + return Timestamp{} + } + return *m.EffectiveDate +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetPlan returns the Plan field. +func (m *MarketplacePendingChange) GetPlan() *MarketplacePlan { + if m == nil { + return nil + } + return m.Plan +} + +// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. +func (m *MarketplacePendingChange) GetUnitCount() int { + if m == nil || m.UnitCount == nil { + return 0 + } + return *m.UnitCount +} + +// GetAccountsURL returns the AccountsURL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetAccountsURL() string { + if m == nil || m.AccountsURL == nil { + return "" + } + return *m.AccountsURL +} + +// GetBullets returns the Bullets field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetBullets() []string { + if m == nil || m.Bullets == nil { + return nil + } + return *m.Bullets +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetDescription() string { + if m == nil || m.Description == nil { + return "" + } + return *m.Description +} + +// GetHasFreeTrial returns the HasFreeTrial field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetHasFreeTrial() bool { + if m == nil || m.HasFreeTrial == nil { + return false + } + return *m.HasFreeTrial +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetMonthlyPriceInCents returns the MonthlyPriceInCents field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetMonthlyPriceInCents() int { + if m == nil || m.MonthlyPriceInCents == nil { + return 0 + } + return *m.MonthlyPriceInCents +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetName() string { + if m == nil || m.Name == nil { + return "" + } + return *m.Name +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetNumber() int { + if m == nil || m.Number == nil { + return 0 + } + return *m.Number +} + +// GetPriceModel returns the PriceModel field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetPriceModel() string { + if m == nil || m.PriceModel == nil { + return "" + } + return *m.PriceModel +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetState() string { + if m == nil || m.State == nil { + return "" + } + return *m.State +} + +// GetUnitName returns the UnitName field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetUnitName() string { + if m == nil || m.UnitName == nil { + return "" + } + return *m.UnitName +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetYearlyPriceInCents returns the YearlyPriceInCents field if it's non-nil, zero value otherwise. +func (m *MarketplacePlan) GetYearlyPriceInCents() int { + if m == nil || m.YearlyPriceInCents == nil { + return 0 + } + return *m.YearlyPriceInCents +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetLogin() string { + if m == nil || m.Login == nil { + return "" + } + return *m.Login +} + +// GetMarketplacePendingChange returns the MarketplacePendingChange field. +func (m *MarketplacePlanAccount) GetMarketplacePendingChange() *MarketplacePendingChange { + if m == nil { + return nil + } + return m.MarketplacePendingChange +} + +// GetMarketplacePurchase returns the MarketplacePurchase field. +func (m *MarketplacePlanAccount) GetMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil + } + return m.MarketplacePurchase +} + +// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetOrganizationBillingEmail() string { + if m == nil || m.OrganizationBillingEmail == nil { + return "" + } + return *m.OrganizationBillingEmail +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetType() string { + if m == nil || m.Type == nil { + return "" + } + return *m.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *MarketplacePlanAccount) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetAccount returns the Account field. +func (m *MarketplacePurchase) GetAccount() *MarketplacePurchaseAccount { + if m == nil { + return nil + } + return m.Account +} + +// GetBillingCycle returns the BillingCycle field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetBillingCycle() string { + if m == nil || m.BillingCycle == nil { + return "" + } + return *m.BillingCycle +} + +// GetFreeTrialEndsOn returns the FreeTrialEndsOn field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetFreeTrialEndsOn() Timestamp { + if m == nil || m.FreeTrialEndsOn == nil { + return Timestamp{} + } + return *m.FreeTrialEndsOn +} + +// GetNextBillingDate returns the NextBillingDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetNextBillingDate() Timestamp { + if m == nil || m.NextBillingDate == nil { + return Timestamp{} + } + return *m.NextBillingDate +} + +// GetOnFreeTrial returns the OnFreeTrial field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetOnFreeTrial() bool { + if m == nil || m.OnFreeTrial == nil { + return false + } + return *m.OnFreeTrial +} + +// GetPlan returns the Plan field. +func (m *MarketplacePurchase) GetPlan() *MarketplacePlan { + if m == nil { + return nil + } + return m.Plan +} + +// GetUnitCount returns the UnitCount field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetUnitCount() int { + if m == nil || m.UnitCount == nil { + return 0 + } + return *m.UnitCount +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchase) GetUpdatedAt() Timestamp { + if m == nil || m.UpdatedAt == nil { + return Timestamp{} + } + return *m.UpdatedAt +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetEmail() string { + if m == nil || m.Email == nil { + return "" + } + return *m.Email +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetLogin() string { + if m == nil || m.Login == nil { + return "" + } + return *m.Login +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetNodeID() string { + if m == nil || m.NodeID == nil { + return "" + } + return *m.NodeID +} + +// GetOrganizationBillingEmail returns the OrganizationBillingEmail field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetOrganizationBillingEmail() string { + if m == nil || m.OrganizationBillingEmail == nil { + return "" + } + return *m.OrganizationBillingEmail +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetType() string { + if m == nil || m.Type == nil { + return "" + } + return *m.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseAccount) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetEffectiveDate returns the EffectiveDate field if it's non-nil, zero value otherwise. +func (m *MarketplacePurchaseEvent) GetEffectiveDate() Timestamp { + if m == nil || m.EffectiveDate == nil { + return Timestamp{} + } + return *m.EffectiveDate +} + +// GetInstallation returns the Installation field. +func (m *MarketplacePurchaseEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetMarketplacePurchase returns the MarketplacePurchase field. +func (m *MarketplacePurchaseEvent) GetMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil + } + return m.MarketplacePurchase +} + +// GetOrg returns the Org field. +func (m *MarketplacePurchaseEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetPreviousMarketplacePurchase returns the PreviousMarketplacePurchase field. +func (m *MarketplacePurchaseEvent) GetPreviousMarketplacePurchase() *MarketplacePurchase { + if m == nil { + return nil + } + return m.PreviousMarketplacePurchase +} + +// GetSender returns the Sender field. +func (m *MarketplacePurchaseEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetText returns the Text field if it's non-nil, zero value otherwise. +func (m *Match) GetText() string { + if m == nil || m.Text == nil { + return "" + } + return *m.Text +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MemberEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetInstallation returns the Installation field. +func (m *MemberEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetMember returns the Member field. +func (m *MemberEvent) GetMember() *User { + if m == nil { + return nil + } + return m.Member +} + +// GetOrg returns the Org field. +func (m *MemberEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetRepo returns the Repo field. +func (m *MemberEvent) GetRepo() *Repository { + if m == nil { + return nil + } + return m.Repo +} + +// GetSender returns the Sender field. +func (m *MemberEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetOrganization returns the Organization field. +func (m *Membership) GetOrganization() *Organization { + if m == nil { + return nil + } + return m.Organization +} + +// GetOrganizationURL returns the OrganizationURL field if it's non-nil, zero value otherwise. +func (m *Membership) GetOrganizationURL() string { + if m == nil || m.OrganizationURL == nil { + return "" + } + return *m.OrganizationURL +} + +// GetRole returns the Role field if it's non-nil, zero value otherwise. +func (m *Membership) GetRole() string { + if m == nil || m.Role == nil { + return "" + } + return *m.Role +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Membership) GetState() string { + if m == nil || m.State == nil { + return "" + } + return *m.State +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Membership) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetUser returns the User field. +func (m *Membership) GetUser() *User { + if m == nil { + return nil + } + return m.User +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MembershipEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetInstallation returns the Installation field. +func (m *MembershipEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetMember returns the Member field. +func (m *MembershipEvent) GetMember() *User { + if m == nil { + return nil + } + return m.Member +} + +// GetOrg returns the Org field. +func (m *MembershipEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (m *MembershipEvent) GetScope() string { + if m == nil || m.Scope == nil { + return "" + } + return *m.Scope +} + +// GetSender returns the Sender field. +func (m *MembershipEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetTeam returns the Team field. +func (m *MembershipEvent) GetTeam() *Team { + if m == nil { + return nil + } + return m.Team +} + +// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. +func (m *MergeGroup) GetBaseRef() string { + if m == nil || m.BaseRef == nil { + return "" + } + return *m.BaseRef +} + +// GetBaseSHA returns the BaseSHA field if it's non-nil, zero value otherwise. +func (m *MergeGroup) GetBaseSHA() string { + if m == nil || m.BaseSHA == nil { + return "" + } + return *m.BaseSHA +} + +// GetHeadCommit returns the HeadCommit field. +func (m *MergeGroup) GetHeadCommit() *Commit { + if m == nil { + return nil + } + return m.HeadCommit +} + +// GetHeadRef returns the HeadRef field if it's non-nil, zero value otherwise. +func (m *MergeGroup) GetHeadRef() string { + if m == nil || m.HeadRef == nil { + return "" + } + return *m.HeadRef +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (m *MergeGroup) GetHeadSHA() string { + if m == nil || m.HeadSHA == nil { + return "" + } + return *m.HeadSHA +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MergeGroupEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetInstallation returns the Installation field. +func (m *MergeGroupEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetMergeGroup returns the MergeGroup field. +func (m *MergeGroupEvent) GetMergeGroup() *MergeGroup { + if m == nil { + return nil + } + return m.MergeGroup +} + +// GetOrg returns the Org field. +func (m *MergeGroupEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetRepo returns the Repo field. +func (m *MergeGroupEvent) GetRepo() *Repository { + if m == nil { + return nil + } + return m.Repo +} + +// GetSender returns the Sender field. +func (m *MergeGroupEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetText returns the Text field if it's non-nil, zero value otherwise. +func (m *Message) GetText() string { + if m == nil || m.Text == nil { + return "" + } + return *m.Text +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MetaEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetHook returns the Hook field. +func (m *MetaEvent) GetHook() *Hook { + if m == nil { + return nil + } + return m.Hook +} + +// GetHookID returns the HookID field if it's non-nil, zero value otherwise. +func (m *MetaEvent) GetHookID() int64 { + if m == nil || m.HookID == nil { + return 0 + } + return *m.HookID +} + +// GetInstallation returns the Installation field. +func (m *MetaEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetOrg returns the Org field. +func (m *MetaEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetRepo returns the Repo field. +func (m *MetaEvent) GetRepo() *Repository { + if m == nil { + return nil + } + return m.Repo +} + +// GetSender returns the Sender field. +func (m *MetaEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *Metric) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { + return "" + } + return *m.HTMLURL +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (m *Metric) GetKey() string { + if m == nil || m.Key == nil { + return "" + } + return *m.Key +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (m *Metric) GetName() string { + if m == nil || m.Name == nil { + return "" + } + return *m.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (m *Metric) GetNodeID() string { + if m == nil || m.NodeID == nil { + return "" + } + return *m.NodeID +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (m *Metric) GetSPDXID() string { + if m == nil || m.SPDXID == nil { + return "" + } + return *m.SPDXID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Metric) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (m *Migration) GetCreatedAt() string { + if m == nil || m.CreatedAt == nil { + return "" + } + return *m.CreatedAt +} + +// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. +func (m *Migration) GetExcludeAttachments() bool { + if m == nil || m.ExcludeAttachments == nil { + return false + } + return *m.ExcludeAttachments +} + +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (m *Migration) GetGUID() string { + if m == nil || m.GUID == nil { + return "" + } + return *m.GUID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *Migration) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. +func (m *Migration) GetLockRepositories() bool { + if m == nil || m.LockRepositories == nil { + return false + } + return *m.LockRepositories +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Migration) GetState() string { + if m == nil || m.State == nil { + return "" + } + return *m.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (m *Migration) GetUpdatedAt() string { + if m == nil || m.UpdatedAt == nil { + return "" + } + return *m.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Migration) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetClosedAt() Timestamp { + if m == nil || m.ClosedAt == nil { + return Timestamp{} + } + return *m.ClosedAt +} + +// GetClosedIssues returns the ClosedIssues field if it's non-nil, zero value otherwise. +func (m *Milestone) GetClosedIssues() int { + if m == nil || m.ClosedIssues == nil { + return 0 + } + return *m.ClosedIssues +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetCreatedAt() Timestamp { + if m == nil || m.CreatedAt == nil { + return Timestamp{} + } + return *m.CreatedAt +} + +// GetCreator returns the Creator field. +func (m *Milestone) GetCreator() *User { + if m == nil { + return nil + } + return m.Creator +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (m *Milestone) GetDescription() string { + if m == nil || m.Description == nil { + return "" + } + return *m.Description +} + +// GetDueOn returns the DueOn field if it's non-nil, zero value otherwise. +func (m *Milestone) GetDueOn() Timestamp { + if m == nil || m.DueOn == nil { + return Timestamp{} + } + return *m.DueOn +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { + return "" + } + return *m.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (m *Milestone) GetID() int64 { + if m == nil || m.ID == nil { + return 0 + } + return *m.ID +} + +// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetLabelsURL() string { + if m == nil || m.LabelsURL == nil { + return "" + } + return *m.LabelsURL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (m *Milestone) GetNodeID() string { + if m == nil || m.NodeID == nil { + return "" + } + return *m.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (m *Milestone) GetNumber() int { + if m == nil || m.Number == nil { + return 0 + } + return *m.Number +} + +// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. +func (m *Milestone) GetOpenIssues() int { + if m == nil || m.OpenIssues == nil { + return 0 + } + return *m.OpenIssues +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *Milestone) GetState() string { + if m == nil || m.State == nil { + return "" + } + return *m.State +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (m *Milestone) GetTitle() string { + if m == nil || m.Title == nil { + return "" + } + return *m.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (m *Milestone) GetUpdatedAt() Timestamp { + if m == nil || m.UpdatedAt == nil { + return Timestamp{} + } + return *m.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (m *Milestone) GetURL() string { + if m == nil || m.URL == nil { + return "" + } + return *m.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (m *MilestoneEvent) GetAction() string { + if m == nil || m.Action == nil { + return "" + } + return *m.Action +} + +// GetChanges returns the Changes field. +func (m *MilestoneEvent) GetChanges() *EditChange { + if m == nil { + return nil + } + return m.Changes +} + +// GetInstallation returns the Installation field. +func (m *MilestoneEvent) GetInstallation() *Installation { + if m == nil { + return nil + } + return m.Installation +} + +// GetMilestone returns the Milestone field. +func (m *MilestoneEvent) GetMilestone() *Milestone { + if m == nil { + return nil + } + return m.Milestone +} + +// GetOrg returns the Org field. +func (m *MilestoneEvent) GetOrg() *Organization { + if m == nil { + return nil + } + return m.Org +} + +// GetRepo returns the Repo field. +func (m *MilestoneEvent) GetRepo() *Repository { + if m == nil { + return nil + } + return m.Repo +} + +// GetSender returns the Sender field. +func (m *MilestoneEvent) GetSender() *User { + if m == nil { + return nil + } + return m.Sender +} + +// GetClosedMilestones returns the ClosedMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetClosedMilestones() int { + if m == nil || m.ClosedMilestones == nil { + return 0 + } + return *m.ClosedMilestones +} + +// GetOpenMilestones returns the OpenMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetOpenMilestones() int { + if m == nil || m.OpenMilestones == nil { + return 0 + } + return *m.OpenMilestones +} + +// GetTotalMilestones returns the TotalMilestones field if it's non-nil, zero value otherwise. +func (m *MilestoneStats) GetTotalMilestones() int { + if m == nil || m.TotalMilestones == nil { + return 0 + } + return *m.TotalMilestones +} + +// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetAnalysisKey() string { + if m == nil || m.AnalysisKey == nil { + return "" + } + return *m.AnalysisKey +} + +// GetCategory returns the Category field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetCategory() string { + if m == nil || m.Category == nil { + return "" + } + return *m.Category +} + +// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetCommitSHA() string { + if m == nil || m.CommitSHA == nil { + return "" + } + return *m.CommitSHA +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetEnvironment() string { + if m == nil || m.Environment == nil { + return "" + } + return *m.Environment +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetHTMLURL() string { + if m == nil || m.HTMLURL == nil { + return "" + } + return *m.HTMLURL +} + +// GetLocation returns the Location field. +func (m *MostRecentInstance) GetLocation() *Location { + if m == nil { + return nil + } + return m.Location +} + +// GetMessage returns the Message field. +func (m *MostRecentInstance) GetMessage() *Message { + if m == nil { + return nil + } + return m.Message +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetRef() string { + if m == nil || m.Ref == nil { + return "" + } + return *m.Ref +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (m *MostRecentInstance) GetState() string { + if m == nil || m.State == nil { + return "" + } + return *m.State +} + +// GetBase returns the Base field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetBase() string { + if n == nil || n.Base == nil { + return "" + } + return *n.Base +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetBody() string { + if n == nil || n.Body == nil { + return "" + } + return *n.Body +} + +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetDraft() bool { + if n == nil || n.Draft == nil { + return false + } + return *n.Draft +} + +// GetHead returns the Head field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetHead() string { + if n == nil || n.Head == nil { + return "" + } + return *n.Head +} + +// GetHeadRepo returns the HeadRepo field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetHeadRepo() string { + if n == nil || n.HeadRepo == nil { + return "" + } + return *n.HeadRepo +} + +// GetIssue returns the Issue field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetIssue() int { + if n == nil || n.Issue == nil { + return 0 + } + return *n.Issue +} + +// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetMaintainerCanModify() bool { + if n == nil || n.MaintainerCanModify == nil { + return false + } + return *n.MaintainerCanModify +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (n *NewPullRequest) GetTitle() string { + if n == nil || n.Title == nil { + return "" + } + return *n.Title +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetDescription() string { + if n == nil || n.Description == nil { + return "" + } + return *n.Description +} + +// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetLDAPDN() string { + if n == nil || n.LDAPDN == nil { + return "" + } + return *n.LDAPDN +} + +// GetParentTeamID returns the ParentTeamID field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetParentTeamID() int64 { + if n == nil || n.ParentTeamID == nil { + return 0 + } + return *n.ParentTeamID +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetPermission() string { + if n == nil || n.Permission == nil { + return "" + } + return *n.Permission +} + +// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. +func (n *NewTeam) GetPrivacy() string { + if n == nil || n.Privacy == nil { + return "" + } + return *n.Privacy +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (n *Notification) GetID() string { + if n == nil || n.ID == nil { + return "" + } + return *n.ID +} + +// GetLastReadAt returns the LastReadAt field if it's non-nil, zero value otherwise. +func (n *Notification) GetLastReadAt() Timestamp { + if n == nil || n.LastReadAt == nil { + return Timestamp{} + } + return *n.LastReadAt +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (n *Notification) GetReason() string { + if n == nil || n.Reason == nil { + return "" + } + return *n.Reason +} + +// GetRepository returns the Repository field. +func (n *Notification) GetRepository() *Repository { + if n == nil { + return nil + } + return n.Repository +} + +// GetSubject returns the Subject field. +func (n *Notification) GetSubject() *NotificationSubject { + if n == nil { + return nil + } + return n.Subject +} + +// GetUnread returns the Unread field if it's non-nil, zero value otherwise. +func (n *Notification) GetUnread() bool { + if n == nil || n.Unread == nil { + return false + } + return *n.Unread +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (n *Notification) GetUpdatedAt() Timestamp { + if n == nil || n.UpdatedAt == nil { + return Timestamp{} + } + return *n.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (n *Notification) GetURL() string { + if n == nil || n.URL == nil { + return "" + } + return *n.URL +} + +// GetLatestCommentURL returns the LatestCommentURL field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetLatestCommentURL() string { + if n == nil || n.LatestCommentURL == nil { + return "" + } + return *n.LatestCommentURL +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetTitle() string { + if n == nil || n.Title == nil { + return "" + } + return *n.Title +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetType() string { + if n == nil || n.Type == nil { + return "" + } + return *n.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (n *NotificationSubject) GetURL() string { + if n == nil || n.URL == nil { + return "" + } + return *n.URL +} + +// GetClientID returns the ClientID field if it's non-nil, zero value otherwise. +func (o *OAuthAPP) GetClientID() string { + if o == nil || o.ClientID == nil { + return "" + } + return *o.ClientID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (o *OAuthAPP) GetName() string { + if o == nil || o.Name == nil { + return "" + } + return *o.Name +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (o *OAuthAPP) GetURL() string { + if o == nil || o.URL == nil { + return "" + } + return *o.URL +} + +// GetUseDefault returns the UseDefault field if it's non-nil, zero value otherwise. +func (o *OIDCSubjectClaimCustomTemplate) GetUseDefault() bool { + if o == nil || o.UseDefault == nil { + return false + } + return *o.UseDefault +} + +// GetAdvancedSecurityEnabledForNewRepos returns the AdvancedSecurityEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetAdvancedSecurityEnabledForNewRepos() bool { + if o == nil || o.AdvancedSecurityEnabledForNewRepos == nil { + return false + } + return *o.AdvancedSecurityEnabledForNewRepos +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetAvatarURL() string { + if o == nil || o.AvatarURL == nil { + return "" + } + return *o.AvatarURL +} + +// GetBillingEmail returns the BillingEmail field if it's non-nil, zero value otherwise. +func (o *Organization) GetBillingEmail() string { + if o == nil || o.BillingEmail == nil { + return "" + } + return *o.BillingEmail +} + +// GetBlog returns the Blog field if it's non-nil, zero value otherwise. +func (o *Organization) GetBlog() string { + if o == nil || o.Blog == nil { + return "" + } + return *o.Blog +} + +// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. +func (o *Organization) GetCollaborators() int { + if o == nil || o.Collaborators == nil { + return 0 + } + return *o.Collaborators +} + +// GetCompany returns the Company field if it's non-nil, zero value otherwise. +func (o *Organization) GetCompany() string { + if o == nil || o.Company == nil { + return "" + } + return *o.Company +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (o *Organization) GetCreatedAt() Timestamp { + if o == nil || o.CreatedAt == nil { + return Timestamp{} + } + return *o.CreatedAt +} + +// GetDefaultRepoPermission returns the DefaultRepoPermission field if it's non-nil, zero value otherwise. +func (o *Organization) GetDefaultRepoPermission() string { + if o == nil || o.DefaultRepoPermission == nil { + return "" + } + return *o.DefaultRepoPermission +} + +// GetDefaultRepoSettings returns the DefaultRepoSettings field if it's non-nil, zero value otherwise. +func (o *Organization) GetDefaultRepoSettings() string { + if o == nil || o.DefaultRepoSettings == nil { + return "" + } + return *o.DefaultRepoSettings +} + +// GetDependabotAlertsEnabledForNewRepos returns the DependabotAlertsEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetDependabotAlertsEnabledForNewRepos() bool { + if o == nil || o.DependabotAlertsEnabledForNewRepos == nil { + return false + } + return *o.DependabotAlertsEnabledForNewRepos +} + +// GetDependabotSecurityUpdatesEnabledForNewRepos returns the DependabotSecurityUpdatesEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetDependabotSecurityUpdatesEnabledForNewRepos() bool { + if o == nil || o.DependabotSecurityUpdatesEnabledForNewRepos == nil { + return false + } + return *o.DependabotSecurityUpdatesEnabledForNewRepos +} + +// GetDependencyGraphEnabledForNewRepos returns the DependencyGraphEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetDependencyGraphEnabledForNewRepos() bool { + if o == nil || o.DependencyGraphEnabledForNewRepos == nil { + return false + } + return *o.DependencyGraphEnabledForNewRepos +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (o *Organization) GetDescription() string { + if o == nil || o.Description == nil { + return "" + } + return *o.Description +} + +// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. +func (o *Organization) GetDiskUsage() int { + if o == nil || o.DiskUsage == nil { + return 0 + } + return *o.DiskUsage +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (o *Organization) GetEmail() string { + if o == nil || o.Email == nil { + return "" + } + return *o.Email +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetEventsURL() string { + if o == nil || o.EventsURL == nil { + return "" + } + return *o.EventsURL +} + +// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. +func (o *Organization) GetFollowers() int { + if o == nil || o.Followers == nil { + return 0 + } + return *o.Followers +} + +// GetFollowing returns the Following field if it's non-nil, zero value otherwise. +func (o *Organization) GetFollowing() int { + if o == nil || o.Following == nil { + return 0 + } + return *o.Following +} + +// GetHasOrganizationProjects returns the HasOrganizationProjects field if it's non-nil, zero value otherwise. +func (o *Organization) GetHasOrganizationProjects() bool { + if o == nil || o.HasOrganizationProjects == nil { + return false + } + return *o.HasOrganizationProjects +} + +// GetHasRepositoryProjects returns the HasRepositoryProjects field if it's non-nil, zero value otherwise. +func (o *Organization) GetHasRepositoryProjects() bool { + if o == nil || o.HasRepositoryProjects == nil { + return false + } + return *o.HasRepositoryProjects +} + +// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetHooksURL() string { + if o == nil || o.HooksURL == nil { + return "" + } + return *o.HooksURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetHTMLURL() string { + if o == nil || o.HTMLURL == nil { + return "" + } + return *o.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (o *Organization) GetID() int64 { + if o == nil || o.ID == nil { + return 0 + } + return *o.ID +} + +// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetIssuesURL() string { + if o == nil || o.IssuesURL == nil { + return "" + } + return *o.IssuesURL +} + +// GetIsVerified returns the IsVerified field if it's non-nil, zero value otherwise. +func (o *Organization) GetIsVerified() bool { + if o == nil || o.IsVerified == nil { + return false + } + return *o.IsVerified +} + +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (o *Organization) GetLocation() string { + if o == nil || o.Location == nil { + return "" + } + return *o.Location +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (o *Organization) GetLogin() string { + if o == nil || o.Login == nil { + return "" + } + return *o.Login +} + +// GetMembersAllowedRepositoryCreationType returns the MembersAllowedRepositoryCreationType field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersAllowedRepositoryCreationType() string { + if o == nil || o.MembersAllowedRepositoryCreationType == nil { + return "" + } + return *o.MembersAllowedRepositoryCreationType +} + +// GetMembersCanCreateInternalRepos returns the MembersCanCreateInternalRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreateInternalRepos() bool { + if o == nil || o.MembersCanCreateInternalRepos == nil { + return false + } + return *o.MembersCanCreateInternalRepos +} + +// GetMembersCanCreatePages returns the MembersCanCreatePages field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreatePages() bool { + if o == nil || o.MembersCanCreatePages == nil { + return false + } + return *o.MembersCanCreatePages +} + +// GetMembersCanCreatePrivatePages returns the MembersCanCreatePrivatePages field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreatePrivatePages() bool { + if o == nil || o.MembersCanCreatePrivatePages == nil { + return false + } + return *o.MembersCanCreatePrivatePages +} + +// GetMembersCanCreatePrivateRepos returns the MembersCanCreatePrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreatePrivateRepos() bool { + if o == nil || o.MembersCanCreatePrivateRepos == nil { + return false + } + return *o.MembersCanCreatePrivateRepos +} + +// GetMembersCanCreatePublicPages returns the MembersCanCreatePublicPages field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreatePublicPages() bool { + if o == nil || o.MembersCanCreatePublicPages == nil { + return false + } + return *o.MembersCanCreatePublicPages +} + +// GetMembersCanCreatePublicRepos returns the MembersCanCreatePublicRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreatePublicRepos() bool { + if o == nil || o.MembersCanCreatePublicRepos == nil { + return false + } + return *o.MembersCanCreatePublicRepos +} + +// GetMembersCanCreateRepos returns the MembersCanCreateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanCreateRepos() bool { + if o == nil || o.MembersCanCreateRepos == nil { + return false + } + return *o.MembersCanCreateRepos +} + +// GetMembersCanForkPrivateRepos returns the MembersCanForkPrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersCanForkPrivateRepos() bool { + if o == nil || o.MembersCanForkPrivateRepos == nil { + return false + } + return *o.MembersCanForkPrivateRepos +} + +// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetMembersURL() string { + if o == nil || o.MembersURL == nil { + return "" + } + return *o.MembersURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (o *Organization) GetName() string { + if o == nil || o.Name == nil { + return "" + } + return *o.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (o *Organization) GetNodeID() string { + if o == nil || o.NodeID == nil { + return "" + } + return *o.NodeID +} + +// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetOwnedPrivateRepos() int64 { + if o == nil || o.OwnedPrivateRepos == nil { + return 0 + } + return *o.OwnedPrivateRepos +} + +// GetPlan returns the Plan field. +func (o *Organization) GetPlan() *Plan { + if o == nil { + return nil + } + return o.Plan +} + +// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. +func (o *Organization) GetPrivateGists() int { + if o == nil || o.PrivateGists == nil { + return 0 + } + return *o.PrivateGists +} + +// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicGists() int { + if o == nil || o.PublicGists == nil { + return 0 + } + return *o.PublicGists +} + +// GetPublicMembersURL returns the PublicMembersURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicMembersURL() string { + if o == nil || o.PublicMembersURL == nil { + return "" + } + return *o.PublicMembersURL +} + +// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetPublicRepos() int { + if o == nil || o.PublicRepos == nil { + return 0 + } + return *o.PublicRepos +} + +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (o *Organization) GetReposURL() string { + if o == nil || o.ReposURL == nil { + return "" + } + return *o.ReposURL +} + +// GetSecretScanningEnabledForNewRepos returns the SecretScanningEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetSecretScanningEnabledForNewRepos() bool { + if o == nil || o.SecretScanningEnabledForNewRepos == nil { + return false + } + return *o.SecretScanningEnabledForNewRepos +} + +// GetSecretScanningPushProtectionEnabledForNewRepos returns the SecretScanningPushProtectionEnabledForNewRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetSecretScanningPushProtectionEnabledForNewRepos() bool { + if o == nil || o.SecretScanningPushProtectionEnabledForNewRepos == nil { + return false + } + return *o.SecretScanningPushProtectionEnabledForNewRepos +} + +// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. +func (o *Organization) GetTotalPrivateRepos() int64 { + if o == nil || o.TotalPrivateRepos == nil { + return 0 + } + return *o.TotalPrivateRepos +} + +// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. +func (o *Organization) GetTwitterUsername() string { + if o == nil || o.TwitterUsername == nil { + return "" + } + return *o.TwitterUsername +} + +// GetTwoFactorRequirementEnabled returns the TwoFactorRequirementEnabled field if it's non-nil, zero value otherwise. +func (o *Organization) GetTwoFactorRequirementEnabled() bool { + if o == nil || o.TwoFactorRequirementEnabled == nil { + return false + } + return *o.TwoFactorRequirementEnabled +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (o *Organization) GetType() string { + if o == nil || o.Type == nil { + return "" + } + return *o.Type +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (o *Organization) GetUpdatedAt() Timestamp { + if o == nil || o.UpdatedAt == nil { + return Timestamp{} + } + return *o.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (o *Organization) GetURL() string { + if o == nil || o.URL == nil { + return "" + } + return *o.URL +} + +// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. +func (o *Organization) GetWebCommitSignoffRequired() bool { + if o == nil || o.WebCommitSignoffRequired == nil { + return false + } + return *o.WebCommitSignoffRequired +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (o *OrganizationCustomRepoRoles) GetTotalCount() int { + if o == nil || o.TotalCount == nil { + return 0 + } + return *o.TotalCount +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (o *OrganizationEvent) GetAction() string { + if o == nil || o.Action == nil { + return "" + } + return *o.Action +} + +// GetInstallation returns the Installation field. +func (o *OrganizationEvent) GetInstallation() *Installation { + if o == nil { + return nil + } + return o.Installation +} + +// GetInvitation returns the Invitation field. +func (o *OrganizationEvent) GetInvitation() *Invitation { + if o == nil { + return nil + } + return o.Invitation +} + +// GetMembership returns the Membership field. +func (o *OrganizationEvent) GetMembership() *Membership { + if o == nil { + return nil + } + return o.Membership +} + +// GetOrganization returns the Organization field. +func (o *OrganizationEvent) GetOrganization() *Organization { + if o == nil { + return nil + } + return o.Organization +} + +// GetSender returns the Sender field. +func (o *OrganizationEvent) GetSender() *User { + if o == nil { + return nil + } + return o.Sender +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (o *OrganizationInstallations) GetTotalCount() int { + if o == nil || o.TotalCount == nil { + return 0 + } + return *o.TotalCount +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (o *OrgBlockEvent) GetAction() string { + if o == nil || o.Action == nil { + return "" + } + return *o.Action +} + +// GetBlockedUser returns the BlockedUser field. +func (o *OrgBlockEvent) GetBlockedUser() *User { + if o == nil { + return nil + } + return o.BlockedUser +} + +// GetInstallation returns the Installation field. +func (o *OrgBlockEvent) GetInstallation() *Installation { + if o == nil { + return nil + } + return o.Installation +} + +// GetOrganization returns the Organization field. +func (o *OrgBlockEvent) GetOrganization() *Organization { + if o == nil { + return nil + } + return o.Organization +} + +// GetSender returns the Sender field. +func (o *OrgBlockEvent) GetSender() *User { + if o == nil { + return nil + } + return o.Sender +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetCreatedAt() Timestamp { + if o == nil || o.CreatedAt == nil { + return Timestamp{} + } + return *o.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetID() int64 { + if o == nil || o.ID == nil { + return 0 + } + return *o.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetName() string { + if o == nil || o.Name == nil { + return "" + } + return *o.Name +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetPath() string { + if o == nil || o.Path == nil { + return "" + } + return *o.Path +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetRef() string { + if o == nil || o.Ref == nil { + return "" + } + return *o.Ref +} + +// GetRepository returns the Repository field. +func (o *OrgRequiredWorkflow) GetRepository() *Repository { + if o == nil { + return nil + } + return o.Repository +} + +// GetScope returns the Scope field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetScope() string { + if o == nil || o.Scope == nil { + return "" + } + return *o.Scope +} + +// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetSelectedRepositoriesURL() string { + if o == nil || o.SelectedRepositoriesURL == nil { + return "" + } + return *o.SelectedRepositoriesURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetState() string { + if o == nil || o.State == nil { + return "" + } + return *o.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflow) GetUpdatedAt() Timestamp { + if o == nil || o.UpdatedAt == nil { + return Timestamp{} + } + return *o.UpdatedAt +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (o *OrgRequiredWorkflows) GetTotalCount() int { + if o == nil || o.TotalCount == nil { + return 0 + } + return *o.TotalCount +} + +// GetDisabledOrgs returns the DisabledOrgs field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetDisabledOrgs() int { + if o == nil || o.DisabledOrgs == nil { + return 0 + } + return *o.DisabledOrgs +} + +// GetTotalOrgs returns the TotalOrgs field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalOrgs() int { + if o == nil || o.TotalOrgs == nil { + return 0 + } + return *o.TotalOrgs +} + +// GetTotalTeamMembers returns the TotalTeamMembers field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalTeamMembers() int { + if o == nil || o.TotalTeamMembers == nil { + return 0 + } + return *o.TotalTeamMembers +} + +// GetTotalTeams returns the TotalTeams field if it's non-nil, zero value otherwise. +func (o *OrgStats) GetTotalTeams() int { + if o == nil || o.TotalTeams == nil { + return 0 + } + return *o.TotalTeams +} + +// GetOrg returns the Org field. +func (o *OwnerInfo) GetOrg() *User { + if o == nil { + return nil + } + return o.Org +} + +// GetUser returns the User field. +func (o *OwnerInfo) GetUser() *User { + if o == nil { + return nil + } + return o.User +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *Package) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Package) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *Package) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *Package) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetOwner returns the Owner field. +func (p *Package) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. +func (p *Package) GetPackageType() string { + if p == nil || p.PackageType == nil { + return "" + } + return *p.PackageType +} + +// GetPackageVersion returns the PackageVersion field. +func (p *Package) GetPackageVersion() *PackageVersion { + if p == nil { + return nil + } + return p.PackageVersion +} + +// GetRegistry returns the Registry field. +func (p *Package) GetRegistry() *PackageRegistry { + if p == nil { + return nil + } + return p.Registry +} + +// GetRepository returns the Repository field. +func (p *Package) GetRepository() *Repository { + if p == nil { + return nil + } + return p.Repository +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *Package) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Package) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetVersionCount returns the VersionCount field if it's non-nil, zero value otherwise. +func (p *Package) GetVersionCount() int64 { + if p == nil || p.VersionCount == nil { + return 0 + } + return *p.VersionCount +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (p *Package) GetVisibility() string { + if p == nil || p.Visibility == nil { + return "" + } + return *p.Visibility +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PackageEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *PackageEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PackageEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetPackage returns the Package field. +func (p *PackageEvent) GetPackage() *Package { + if p == nil { + return nil + } + return p.Package +} + +// GetRepo returns the Repo field. +func (p *PackageEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PackageEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetAuthor returns the Author field. +func (p *PackageFile) GetAuthor() *User { + if p == nil { + return nil + } + return p.Author +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetContentType() string { + if p == nil || p.ContentType == nil { + return "" + } + return *p.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetDownloadURL() string { + if p == nil || p.DownloadURL == nil { + return "" + } + return *p.DownloadURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetMD5 returns the MD5 field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetMD5() string { + if p == nil || p.MD5 == nil { + return "" + } + return *p.MD5 +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetSHA1 returns the SHA1 field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetSHA1() string { + if p == nil || p.SHA1 == nil { + return "" + } + return *p.SHA1 +} + +// GetSHA256 returns the SHA256 field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetSHA256() string { + if p == nil || p.SHA256 == nil { + return "" + } + return *p.SHA256 +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetSize() int64 { + if p == nil || p.Size == nil { + return 0 + } + return *p.Size +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PackageFile) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. +func (p *PackageListOptions) GetPackageType() string { + if p == nil || p.PackageType == nil { + return "" + } + return *p.PackageType +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *PackageListOptions) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (p *PackageListOptions) GetVisibility() string { + if p == nil || p.Visibility == nil { + return "" + } + return *p.Visibility +} + +// GetContainer returns the Container field. +func (p *PackageMetadata) GetContainer() *PackageContainerMetadata { + if p == nil { + return nil + } + return p.Container +} + +// GetPackageType returns the PackageType field if it's non-nil, zero value otherwise. +func (p *PackageMetadata) GetPackageType() string { + if p == nil || p.PackageType == nil { + return "" + } + return *p.PackageType +} + +// GetAboutURL returns the AboutURL field if it's non-nil, zero value otherwise. +func (p *PackageRegistry) GetAboutURL() string { + if p == nil || p.AboutURL == nil { + return "" + } + return *p.AboutURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PackageRegistry) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (p *PackageRegistry) GetType() string { + if p == nil || p.Type == nil { + return "" + } + return *p.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PackageRegistry) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetVendor returns the Vendor field if it's non-nil, zero value otherwise. +func (p *PackageRegistry) GetVendor() string { + if p == nil || p.Vendor == nil { + return "" + } + return *p.Vendor +} + +// GetAuthor returns the Author field. +func (p *PackageRelease) GetAuthor() *User { + if p == nil { + return nil + } + return p.Author +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetDraft() bool { + if p == nil || p.Draft == nil { + return false + } + return *p.Draft +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetPrerelease() bool { + if p == nil || p.Prerelease == nil { + return false + } + return *p.Prerelease +} + +// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetPublishedAt() Timestamp { + if p == nil || p.PublishedAt == nil { + return Timestamp{} + } + return *p.PublishedAt +} + +// GetTagName returns the TagName field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetTagName() string { + if p == nil || p.TagName == nil { + return "" + } + return *p.TagName +} + +// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetTargetCommitish() string { + if p == nil || p.TargetCommitish == nil { + return "" + } + return *p.TargetCommitish +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PackageRelease) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetAuthor returns the Author field. +func (p *PackageVersion) GetAuthor() *User { + if p == nil { + return nil + } + return p.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetBodyHTML() string { + if p == nil || p.BodyHTML == nil { + return "" + } + return *p.BodyHTML +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetDraft() bool { + if p == nil || p.Draft == nil { + return false + } + return *p.Draft +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetInstallationCommand returns the InstallationCommand field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetInstallationCommand() string { + if p == nil || p.InstallationCommand == nil { + return "" + } + return *p.InstallationCommand +} + +// GetManifest returns the Manifest field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetManifest() string { + if p == nil || p.Manifest == nil { + return "" + } + return *p.Manifest +} + +// GetMetadata returns the Metadata field. +func (p *PackageVersion) GetMetadata() *PackageMetadata { + if p == nil { + return nil + } + return p.Metadata +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetPackageHTMLURL returns the PackageHTMLURL field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetPackageHTMLURL() string { + if p == nil || p.PackageHTMLURL == nil { + return "" + } + return *p.PackageHTMLURL +} + +// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetPrerelease() bool { + if p == nil || p.Prerelease == nil { + return false + } + return *p.Prerelease +} + +// GetRelease returns the Release field. +func (p *PackageVersion) GetRelease() *PackageRelease { + if p == nil { + return nil + } + return p.Release +} + +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetSummary() string { + if p == nil || p.Summary == nil { + return "" + } + return *p.Summary +} + +// GetTagName returns the TagName field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetTagName() string { + if p == nil || p.TagName == nil { + return "" + } + return *p.TagName +} + +// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetTargetCommitish() string { + if p == nil || p.TargetCommitish == nil { + return "" + } + return *p.TargetCommitish +} + +// GetTargetOID returns the TargetOID field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetTargetOID() string { + if p == nil || p.TargetOID == nil { + return "" + } + return *p.TargetOID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetVersion returns the Version field if it's non-nil, zero value otherwise. +func (p *PackageVersion) GetVersion() string { + if p == nil || p.Version == nil { + return "" + } + return *p.Version +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *Page) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Page) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetPageName returns the PageName field if it's non-nil, zero value otherwise. +func (p *Page) GetPageName() string { + if p == nil || p.PageName == nil { + return "" + } + return *p.PageName +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (p *Page) GetSHA() string { + if p == nil || p.SHA == nil { + return "" + } + return *p.SHA +} + +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (p *Page) GetSummary() string { + if p == nil || p.Summary == nil { + return "" + } + return *p.Summary +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (p *Page) GetTitle() string { + if p == nil || p.Title == nil { + return "" + } + return *p.Title +} + +// GetBuild returns the Build field. +func (p *PageBuildEvent) GetBuild() *PagesBuild { + if p == nil { + return nil + } + return p.Build +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PageBuildEvent) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetInstallation returns the Installation field. +func (p *PageBuildEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PageBuildEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetRepo returns the Repo field. +func (p *PageBuildEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PageBuildEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. +func (p *Pages) GetBuildType() string { + if p == nil || p.BuildType == nil { + return "" + } + return *p.BuildType +} + +// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. +func (p *Pages) GetCNAME() string { + if p == nil || p.CNAME == nil { + return "" + } + return *p.CNAME +} + +// GetCustom404 returns the Custom404 field if it's non-nil, zero value otherwise. +func (p *Pages) GetCustom404() bool { + if p == nil || p.Custom404 == nil { + return false + } + return *p.Custom404 +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Pages) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetHTTPSCertificate returns the HTTPSCertificate field. +func (p *Pages) GetHTTPSCertificate() *PagesHTTPSCertificate { + if p == nil { + return nil + } + return p.HTTPSCertificate +} + +// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. +func (p *Pages) GetHTTPSEnforced() bool { + if p == nil || p.HTTPSEnforced == nil { + return false + } + return *p.HTTPSEnforced +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *Pages) GetPublic() bool { + if p == nil || p.Public == nil { + return false + } + return *p.Public +} + +// GetSource returns the Source field. +func (p *Pages) GetSource() *PagesSource { + if p == nil { + return nil + } + return p.Source +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (p *Pages) GetStatus() string { + if p == nil || p.Status == nil { + return "" + } + return *p.Status +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Pages) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetCommit returns the Commit field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetCommit() string { + if p == nil || p.Commit == nil { + return "" + } + return *p.Commit +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDuration returns the Duration field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetDuration() int { + if p == nil || p.Duration == nil { + return 0 + } + return *p.Duration +} + +// GetError returns the Error field. +func (p *PagesBuild) GetError() *PagesError { + if p == nil { + return nil + } + return p.Error +} + +// GetPusher returns the Pusher field. +func (p *PagesBuild) GetPusher() *User { + if p == nil { + return nil + } + return p.Pusher +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetStatus() string { + if p == nil || p.Status == nil { + return "" + } + return *p.Status +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PagesBuild) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetCAAError returns the CAAError field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetCAAError() string { + if p == nil || p.CAAError == nil { + return "" + } + return *p.CAAError +} + +// GetDNSResolves returns the DNSResolves field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetDNSResolves() bool { + if p == nil || p.DNSResolves == nil { + return false + } + return *p.DNSResolves +} + +// GetEnforcesHTTPS returns the EnforcesHTTPS field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetEnforcesHTTPS() bool { + if p == nil || p.EnforcesHTTPS == nil { + return false + } + return *p.EnforcesHTTPS +} + +// GetHasCNAMERecord returns the HasCNAMERecord field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetHasCNAMERecord() bool { + if p == nil || p.HasCNAMERecord == nil { + return false + } + return *p.HasCNAMERecord +} + +// GetHasMXRecordsPresent returns the HasMXRecordsPresent field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetHasMXRecordsPresent() bool { + if p == nil || p.HasMXRecordsPresent == nil { + return false + } + return *p.HasMXRecordsPresent +} + +// GetHost returns the Host field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetHost() string { + if p == nil || p.Host == nil { + return "" + } + return *p.Host +} + +// GetHTTPSError returns the HTTPSError field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetHTTPSError() string { + if p == nil || p.HTTPSError == nil { + return "" + } + return *p.HTTPSError +} + +// GetIsApexDomain returns the IsApexDomain field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsApexDomain() bool { + if p == nil || p.IsApexDomain == nil { + return false + } + return *p.IsApexDomain +} + +// GetIsARecord returns the IsARecord field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsARecord() bool { + if p == nil || p.IsARecord == nil { + return false + } + return *p.IsARecord +} + +// GetIsCloudflareIP returns the IsCloudflareIP field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsCloudflareIP() bool { + if p == nil || p.IsCloudflareIP == nil { + return false + } + return *p.IsCloudflareIP +} + +// GetIsCNAMEToFastly returns the IsCNAMEToFastly field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsCNAMEToFastly() bool { + if p == nil || p.IsCNAMEToFastly == nil { + return false + } + return *p.IsCNAMEToFastly +} + +// GetIsCNAMEToGithubUserDomain returns the IsCNAMEToGithubUserDomain field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsCNAMEToGithubUserDomain() bool { + if p == nil || p.IsCNAMEToGithubUserDomain == nil { + return false + } + return *p.IsCNAMEToGithubUserDomain +} + +// GetIsCNAMEToPagesDotGithubDotCom returns the IsCNAMEToPagesDotGithubDotCom field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsCNAMEToPagesDotGithubDotCom() bool { + if p == nil || p.IsCNAMEToPagesDotGithubDotCom == nil { + return false + } + return *p.IsCNAMEToPagesDotGithubDotCom +} + +// GetIsFastlyIP returns the IsFastlyIP field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsFastlyIP() bool { + if p == nil || p.IsFastlyIP == nil { + return false + } + return *p.IsFastlyIP +} + +// GetIsHTTPSEligible returns the IsHTTPSEligible field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsHTTPSEligible() bool { + if p == nil || p.IsHTTPSEligible == nil { + return false + } + return *p.IsHTTPSEligible +} + +// GetIsNonGithubPagesIPPresent returns the IsNonGithubPagesIPPresent field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsNonGithubPagesIPPresent() bool { + if p == nil || p.IsNonGithubPagesIPPresent == nil { + return false + } + return *p.IsNonGithubPagesIPPresent +} + +// GetIsOldIPAddress returns the IsOldIPAddress field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsOldIPAddress() bool { + if p == nil || p.IsOldIPAddress == nil { + return false + } + return *p.IsOldIPAddress +} + +// GetIsPagesDomain returns the IsPagesDomain field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsPagesDomain() bool { + if p == nil || p.IsPagesDomain == nil { + return false + } + return *p.IsPagesDomain +} + +// GetIsPointedToGithubPagesIP returns the IsPointedToGithubPagesIP field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsPointedToGithubPagesIP() bool { + if p == nil || p.IsPointedToGithubPagesIP == nil { + return false + } + return *p.IsPointedToGithubPagesIP +} + +// GetIsProxied returns the IsProxied field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsProxied() bool { + if p == nil || p.IsProxied == nil { + return false + } + return *p.IsProxied +} + +// GetIsServedByPages returns the IsServedByPages field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsServedByPages() bool { + if p == nil || p.IsServedByPages == nil { + return false + } + return *p.IsServedByPages +} + +// GetIsValid returns the IsValid field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsValid() bool { + if p == nil || p.IsValid == nil { + return false + } + return *p.IsValid +} + +// GetIsValidDomain returns the IsValidDomain field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetIsValidDomain() bool { + if p == nil || p.IsValidDomain == nil { + return false + } + return *p.IsValidDomain +} + +// GetNameservers returns the Nameservers field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetNameservers() string { + if p == nil || p.Nameservers == nil { + return "" + } + return *p.Nameservers +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetReason() string { + if p == nil || p.Reason == nil { + return "" + } + return *p.Reason +} + +// GetRespondsToHTTPS returns the RespondsToHTTPS field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetRespondsToHTTPS() bool { + if p == nil || p.RespondsToHTTPS == nil { + return false + } + return *p.RespondsToHTTPS +} + +// GetShouldBeARecord returns the ShouldBeARecord field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetShouldBeARecord() bool { + if p == nil || p.ShouldBeARecord == nil { + return false + } + return *p.ShouldBeARecord +} + +// GetURI returns the URI field if it's non-nil, zero value otherwise. +func (p *PagesDomain) GetURI() string { + if p == nil || p.URI == nil { + return "" + } + return *p.URI +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PagesError) GetMessage() string { + if p == nil || p.Message == nil { + return "" + } + return *p.Message +} + +// GetAltDomain returns the AltDomain field. +func (p *PagesHealthCheckResponse) GetAltDomain() *PagesDomain { + if p == nil { + return nil + } + return p.AltDomain +} + +// GetDomain returns the Domain field. +func (p *PagesHealthCheckResponse) GetDomain() *PagesDomain { + if p == nil { + return nil + } + return p.Domain +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (p *PagesHTTPSCertificate) GetDescription() string { + if p == nil || p.Description == nil { + return "" + } + return *p.Description +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (p *PagesHTTPSCertificate) GetExpiresAt() string { + if p == nil || p.ExpiresAt == nil { + return "" + } + return *p.ExpiresAt +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *PagesHTTPSCertificate) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetBranch returns the Branch field if it's non-nil, zero value otherwise. +func (p *PagesSource) GetBranch() string { + if p == nil || p.Branch == nil { + return "" + } + return *p.Branch +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (p *PagesSource) GetPath() string { + if p == nil || p.Path == nil { + return "" + } + return *p.Path +} + +// GetTotalPages returns the TotalPages field if it's non-nil, zero value otherwise. +func (p *PageStats) GetTotalPages() int { + if p == nil || p.TotalPages == nil { + return 0 + } + return *p.TotalPages +} + +// GetBuildType returns the BuildType field if it's non-nil, zero value otherwise. +func (p *PagesUpdate) GetBuildType() string { + if p == nil || p.BuildType == nil { + return "" + } + return *p.BuildType +} + +// GetCNAME returns the CNAME field if it's non-nil, zero value otherwise. +func (p *PagesUpdate) GetCNAME() string { + if p == nil || p.CNAME == nil { + return "" + } + return *p.CNAME +} + +// GetHTTPSEnforced returns the HTTPSEnforced field if it's non-nil, zero value otherwise. +func (p *PagesUpdate) GetHTTPSEnforced() bool { + if p == nil || p.HTTPSEnforced == nil { + return false + } + return *p.HTTPSEnforced +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *PagesUpdate) GetPublic() bool { + if p == nil || p.Public == nil { + return false + } + return *p.Public +} + +// GetSource returns the Source field. +func (p *PagesUpdate) GetSource() *PagesSource { + if p == nil { + return nil + } + return p.Source +} + +// GetOrg returns the Org map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetOrg() map[string]string { + if p == nil || p.Org == nil { + return map[string]string{} + } + return p.Org +} + +// GetOther returns the Other map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetOther() map[string]string { + if p == nil || p.Other == nil { + return map[string]string{} + } + return p.Other +} + +// GetRepo returns the Repo map if it's non-nil, an empty map otherwise. +func (p *PersonalAccessTokenPermissions) GetRepo() map[string]string { + if p == nil || p.Repo == nil { + return map[string]string{} + } + return p.Repo +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetOrg returns the Org field. +func (p *PersonalAccessTokenRequest) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetOwner returns the Owner field. +func (p *PersonalAccessTokenRequest) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPermissionsAdded returns the PermissionsAdded field. +func (p *PersonalAccessTokenRequest) GetPermissionsAdded() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsAdded +} + +// GetPermissionsResult returns the PermissionsResult field. +func (p *PersonalAccessTokenRequest) GetPermissionsResult() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsResult +} + +// GetPermissionsUpgraded returns the PermissionsUpgraded field. +func (p *PersonalAccessTokenRequest) GetPermissionsUpgraded() *PersonalAccessTokenPermissions { + if p == nil { + return nil + } + return p.PermissionsUpgraded +} + +// GetRepositoryCount returns the RepositoryCount field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetRepositoryCount() int64 { + if p == nil || p.RepositoryCount == nil { + return 0 + } + return *p.RepositoryCount +} + +// GetRepositorySelection returns the RepositorySelection field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetRepositorySelection() string { + if p == nil || p.RepositorySelection == nil { + return "" + } + return *p.RepositorySelection +} + +// GetTokenExpired returns the TokenExpired field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenExpired() bool { + if p == nil || p.TokenExpired == nil { + return false + } + return *p.TokenExpired +} + +// GetTokenExpiresAt returns the TokenExpiresAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenExpiresAt() Timestamp { + if p == nil || p.TokenExpiresAt == nil { + return Timestamp{} + } + return *p.TokenExpiresAt +} + +// GetTokenLastUsedAt returns the TokenLastUsedAt field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequest) GetTokenLastUsedAt() Timestamp { + if p == nil || p.TokenLastUsedAt == nil { + return Timestamp{} + } + return *p.TokenLastUsedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PersonalAccessTokenRequestEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *PersonalAccessTokenRequestEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PersonalAccessTokenRequestEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetPersonalAccessTokenRequest returns the PersonalAccessTokenRequest field. +func (p *PersonalAccessTokenRequestEvent) GetPersonalAccessTokenRequest() *PersonalAccessTokenRequest { + if p == nil { + return nil + } + return p.PersonalAccessTokenRequest +} + +// GetSender returns the Sender field. +func (p *PersonalAccessTokenRequestEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetHook returns the Hook field. +func (p *PingEvent) GetHook() *Hook { + if p == nil { + return nil + } + return p.Hook +} + +// GetHookID returns the HookID field if it's non-nil, zero value otherwise. +func (p *PingEvent) GetHookID() int64 { + if p == nil || p.HookID == nil { + return 0 + } + return *p.HookID +} + +// GetInstallation returns the Installation field. +func (p *PingEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PingEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetRepo returns the Repo field. +func (p *PingEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PingEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetZen returns the Zen field if it's non-nil, zero value otherwise. +func (p *PingEvent) GetZen() string { + if p == nil || p.Zen == nil { + return "" + } + return *p.Zen +} + +// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. +func (p *Plan) GetCollaborators() int { + if p == nil || p.Collaborators == nil { + return 0 + } + return *p.Collaborators +} + +// GetFilledSeats returns the FilledSeats field if it's non-nil, zero value otherwise. +func (p *Plan) GetFilledSeats() int { + if p == nil || p.FilledSeats == nil { + return 0 + } + return *p.FilledSeats +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *Plan) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetPrivateRepos returns the PrivateRepos field if it's non-nil, zero value otherwise. +func (p *Plan) GetPrivateRepos() int64 { + if p == nil || p.PrivateRepos == nil { + return 0 + } + return *p.PrivateRepos +} + +// GetSeats returns the Seats field if it's non-nil, zero value otherwise. +func (p *Plan) GetSeats() int { + if p == nil || p.Seats == nil { + return 0 + } + return *p.Seats +} + +// GetSpace returns the Space field if it's non-nil, zero value otherwise. +func (p *Plan) GetSpace() int { + if p == nil || p.Space == nil { + return 0 + } + return *p.Space +} + +// GetCode returns the Code field if it's non-nil, zero value otherwise. +func (p *PolicyOverrideReason) GetCode() string { + if p == nil || p.Code == nil { + return "" + } + return *p.Code +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PolicyOverrideReason) GetMessage() string { + if p == nil || p.Message == nil { + return "" + } + return *p.Message +} + +// GetConfigURL returns the ConfigURL field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetConfigURL() string { + if p == nil || p.ConfigURL == nil { + return "" + } + return *p.ConfigURL +} + +// GetEnforcement returns the Enforcement field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetEnforcement() string { + if p == nil || p.Enforcement == nil { + return "" + } + return *p.Enforcement +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetHRef returns the HRef field if it's non-nil, zero value otherwise. +func (p *PRLink) GetHRef() string { + if p == nil || p.HRef == nil { + return "" + } + return *p.HRef +} + +// GetComments returns the Comments field. +func (p *PRLinks) GetComments() *PRLink { + if p == nil { + return nil + } + return p.Comments +} + +// GetCommits returns the Commits field. +func (p *PRLinks) GetCommits() *PRLink { + if p == nil { + return nil + } + return p.Commits +} + +// GetHTML returns the HTML field. +func (p *PRLinks) GetHTML() *PRLink { + if p == nil { + return nil + } + return p.HTML +} + +// GetIssue returns the Issue field. +func (p *PRLinks) GetIssue() *PRLink { + if p == nil { + return nil + } + return p.Issue +} + +// GetReviewComment returns the ReviewComment field. +func (p *PRLinks) GetReviewComment() *PRLink { + if p == nil { + return nil + } + return p.ReviewComment +} + +// GetReviewComments returns the ReviewComments field. +func (p *PRLinks) GetReviewComments() *PRLink { + if p == nil { + return nil + } + return p.ReviewComments +} + +// GetSelf returns the Self field. +func (p *PRLinks) GetSelf() *PRLink { + if p == nil { + return nil + } + return p.Self +} + +// GetStatuses returns the Statuses field. +func (p *PRLinks) GetStatuses() *PRLink { + if p == nil { + return nil + } + return p.Statuses +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *Project) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetColumnsURL returns the ColumnsURL field if it's non-nil, zero value otherwise. +func (p *Project) GetColumnsURL() string { + if p == nil || p.ColumnsURL == nil { + return "" + } + return *p.ColumnsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *Project) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *Project) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Project) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *Project) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *Project) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *Project) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *Project) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. +func (p *Project) GetOrganizationPermission() string { + if p == nil || p.OrganizationPermission == nil { + return "" + } + return *p.OrganizationPermission +} + +// GetOwnerURL returns the OwnerURL field if it's non-nil, zero value otherwise. +func (p *Project) GetOwnerURL() string { + if p == nil || p.OwnerURL == nil { + return "" + } + return *p.OwnerURL +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (p *Project) GetPrivate() bool { + if p == nil || p.Private == nil { + return false + } + return *p.Private +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *Project) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *Project) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Project) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (p *ProjectBody) GetFrom() string { + if p == nil || p.From == nil { + return "" + } + return *p.From +} + +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetArchived() bool { + if p == nil || p.Archived == nil { + return false + } + return *p.Archived +} + +// GetColumnID returns the ColumnID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnID() int64 { + if p == nil || p.ColumnID == nil { + return 0 + } + return *p.ColumnID +} + +// GetColumnName returns the ColumnName field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnName() string { + if p == nil || p.ColumnName == nil { + return "" + } + return *p.ColumnName +} + +// GetColumnURL returns the ColumnURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetColumnURL() string { + if p == nil || p.ColumnURL == nil { + return "" + } + return *p.ColumnURL +} + +// GetContentURL returns the ContentURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetContentURL() string { + if p == nil || p.ContentURL == nil { + return "" + } + return *p.ContentURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *ProjectCard) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetNote() string { + if p == nil || p.Note == nil { + return "" + } + return *p.Note +} + +// GetPreviousColumnName returns the PreviousColumnName field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetPreviousColumnName() string { + if p == nil || p.PreviousColumnName == nil { + return "" + } + return *p.PreviousColumnName +} + +// GetProjectID returns the ProjectID field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetProjectID() int64 { + if p == nil || p.ProjectID == nil { + return 0 + } + return *p.ProjectID +} + +// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetProjectURL() string { + if p == nil || p.ProjectURL == nil { + return "" + } + return *p.ProjectURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *ProjectCard) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetNote returns the Note field. +func (p *ProjectCardChange) GetNote() *ProjectCardNote { + if p == nil { + return nil + } + return p.Note +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectCardEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. +func (p *ProjectCardEvent) GetAfterID() int64 { + if p == nil || p.AfterID == nil { + return 0 + } + return *p.AfterID +} + +// GetChanges returns the Changes field. +func (p *ProjectCardEvent) GetChanges() *ProjectCardChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *ProjectCardEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectCardEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProjectCard returns the ProjectCard field. +func (p *ProjectCardEvent) GetProjectCard() *ProjectCard { + if p == nil { + return nil + } + return p.ProjectCard +} + +// GetRepo returns the Repo field. +func (p *ProjectCardEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *ProjectCardEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetArchivedState returns the ArchivedState field if it's non-nil, zero value otherwise. +func (p *ProjectCardListOptions) GetArchivedState() string { + if p == nil || p.ArchivedState == nil { + return "" + } + return *p.ArchivedState +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (p *ProjectCardNote) GetFrom() string { + if p == nil || p.From == nil { + return "" + } + return *p.From +} + +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (p *ProjectCardOptions) GetArchived() bool { + if p == nil || p.Archived == nil { + return false + } + return *p.Archived +} + +// GetBody returns the Body field. +func (p *ProjectChange) GetBody() *ProjectBody { + if p == nil { + return nil + } + return p.Body +} + +// GetName returns the Name field. +func (p *ProjectChange) GetName() *ProjectName { + if p == nil { + return nil + } + return p.Name +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (p *ProjectCollaboratorOptions) GetPermission() string { + if p == nil || p.Permission == nil { + return "" + } + return *p.Permission +} + +// GetCardsURL returns the CardsURL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetCardsURL() string { + if p == nil || p.CardsURL == nil { + return "" + } + return *p.CardsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetProjectURL returns the ProjectURL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetProjectURL() string { + if p == nil || p.ProjectURL == nil { + return "" + } + return *p.ProjectURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetName returns the Name field. +func (p *ProjectColumnChange) GetName() *ProjectColumnName { + if p == nil { + return nil + } + return p.Name +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectColumnEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetAfterID returns the AfterID field if it's non-nil, zero value otherwise. +func (p *ProjectColumnEvent) GetAfterID() int64 { + if p == nil || p.AfterID == nil { + return 0 + } + return *p.AfterID +} + +// GetChanges returns the Changes field. +func (p *ProjectColumnEvent) GetChanges() *ProjectColumnChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *ProjectColumnEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectColumnEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProjectColumn returns the ProjectColumn field. +func (p *ProjectColumnEvent) GetProjectColumn() *ProjectColumn { + if p == nil { + return nil + } + return p.ProjectColumn +} + +// GetRepo returns the Repo field. +func (p *ProjectColumnEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *ProjectColumnEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (p *ProjectColumnName) GetFrom() string { + if p == nil || p.From == nil { + return "" + } + return *p.From +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetChanges returns the Changes field. +func (p *ProjectEvent) GetChanges() *ProjectChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *ProjectEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProject returns the Project field. +func (p *ProjectEvent) GetProject() *Project { + if p == nil { + return nil + } + return p.Project +} + +// GetRepo returns the Repo field. +func (p *ProjectEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *ProjectEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (p *ProjectName) GetFrom() string { + if p == nil || p.From == nil { + return "" + } + return *p.From +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetOrganizationPermission() string { + if p == nil || p.OrganizationPermission == nil { + return "" + } + return *p.OrganizationPermission +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetPrivate() bool { + if p == nil || p.Private == nil { + return false + } + return *p.Private +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (p *ProjectPermissionLevel) GetPermission() string { + if p == nil || p.Permission == nil { + return "" + } + return *p.Permission +} + +// GetUser returns the User field. +func (p *ProjectPermissionLevel) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetClosedAt() Timestamp { + if p == nil || p.ClosedAt == nil { + return Timestamp{} + } + return *p.ClosedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *ProjectsV2) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetDeletedAt returns the DeletedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetDeletedAt() Timestamp { + if p == nil || p.DeletedAt == nil { + return Timestamp{} + } + return *p.DeletedAt +} + +// GetDeletedBy returns the DeletedBy field. +func (p *ProjectsV2) GetDeletedBy() *User { + if p == nil { + return nil + } + return p.DeletedBy +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetDescription() string { + if p == nil || p.Description == nil { + return "" + } + return *p.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetOwner returns the Owner field. +func (p *ProjectsV2) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetPublic() bool { + if p == nil || p.Public == nil { + return false + } + return *p.Public +} + +// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetShortDescription() string { + if p == nil || p.ShortDescription == nil { + return "" + } + return *p.ShortDescription +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetTitle() string { + if p == nil || p.Title == nil { + return "" + } + return *p.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectsV2) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectV2Event) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *ProjectV2Event) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectV2Event) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProjectsV2 returns the ProjectsV2 field. +func (p *ProjectV2Event) GetProjectsV2() *ProjectsV2 { + if p == nil { + return nil + } + return p.ProjectsV2 +} + +// GetSender returns the Sender field. +func (p *ProjectV2Event) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetArchivedAt returns the ArchivedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetArchivedAt() Timestamp { + if p == nil || p.ArchivedAt == nil { + return Timestamp{} + } + return *p.ArchivedAt +} + +// GetContentNodeID returns the ContentNodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetContentNodeID() string { + if p == nil || p.ContentNodeID == nil { + return "" + } + return *p.ContentNodeID +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetContentType() string { + if p == nil || p.ContentType == nil { + return "" + } + return *p.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetCreator returns the Creator field. +func (p *ProjectV2Item) GetCreator() *User { + if p == nil { + return nil + } + return p.Creator +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetProjectNodeID returns the ProjectNodeID field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetProjectNodeID() string { + if p == nil || p.ProjectNodeID == nil { + return "" + } + return *p.ProjectNodeID +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *ProjectV2Item) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetArchivedAt returns the ArchivedAt field. +func (p *ProjectV2ItemChange) GetArchivedAt() *ArchivedAt { + if p == nil { + return nil + } + return p.ArchivedAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *ProjectV2ItemEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetChanges returns the Changes field. +func (p *ProjectV2ItemEvent) GetChanges() *ProjectV2ItemChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *ProjectV2ItemEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *ProjectV2ItemEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetProjectV2Item returns the ProjectV2Item field. +func (p *ProjectV2ItemEvent) GetProjectV2Item() *ProjectV2Item { + if p == nil { + return nil + } + return p.ProjectV2Item +} + +// GetSender returns the Sender field. +func (p *ProjectV2ItemEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetAllowDeletions returns the AllowDeletions field. +func (p *Protection) GetAllowDeletions() *AllowDeletions { + if p == nil { + return nil + } + return p.AllowDeletions +} + +// GetAllowForcePushes returns the AllowForcePushes field. +func (p *Protection) GetAllowForcePushes() *AllowForcePushes { + if p == nil { + return nil + } + return p.AllowForcePushes +} + +// GetAllowForkSyncing returns the AllowForkSyncing field. +func (p *Protection) GetAllowForkSyncing() *AllowForkSyncing { + if p == nil { + return nil + } + return p.AllowForkSyncing +} + +// GetBlockCreations returns the BlockCreations field. +func (p *Protection) GetBlockCreations() *BlockCreations { + if p == nil { + return nil + } + return p.BlockCreations +} + +// GetEnforceAdmins returns the EnforceAdmins field. +func (p *Protection) GetEnforceAdmins() *AdminEnforcement { + if p == nil { + return nil + } + return p.EnforceAdmins +} + +// GetLockBranch returns the LockBranch field. +func (p *Protection) GetLockBranch() *LockBranch { + if p == nil { + return nil + } + return p.LockBranch +} + +// GetRequiredConversationResolution returns the RequiredConversationResolution field. +func (p *Protection) GetRequiredConversationResolution() *RequiredConversationResolution { + if p == nil { + return nil + } + return p.RequiredConversationResolution +} + +// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. +func (p *Protection) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcement { + if p == nil { + return nil + } + return p.RequiredPullRequestReviews +} + +// GetRequiredSignatures returns the RequiredSignatures field. +func (p *Protection) GetRequiredSignatures() *SignaturesProtectedBranch { + if p == nil { + return nil + } + return p.RequiredSignatures +} + +// GetRequiredStatusChecks returns the RequiredStatusChecks field. +func (p *Protection) GetRequiredStatusChecks() *RequiredStatusChecks { + if p == nil { + return nil + } + return p.RequiredStatusChecks +} + +// GetRequireLinearHistory returns the RequireLinearHistory field. +func (p *Protection) GetRequireLinearHistory() *RequireLinearHistory { + if p == nil { + return nil + } + return p.RequireLinearHistory +} + +// GetRestrictions returns the Restrictions field. +func (p *Protection) GetRestrictions() *BranchRestrictions { + if p == nil { + return nil + } + return p.Restrictions +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *Protection) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetAdminEnforced returns the AdminEnforced field. +func (p *ProtectionChanges) GetAdminEnforced() *AdminEnforcedChanges { + if p == nil { + return nil + } + return p.AdminEnforced +} + +// GetAllowDeletionsEnforcementLevel returns the AllowDeletionsEnforcementLevel field. +func (p *ProtectionChanges) GetAllowDeletionsEnforcementLevel() *AllowDeletionsEnforcementLevelChanges { + if p == nil { + return nil + } + return p.AllowDeletionsEnforcementLevel +} + +// GetAuthorizedActorNames returns the AuthorizedActorNames field. +func (p *ProtectionChanges) GetAuthorizedActorNames() *AuthorizedActorNames { + if p == nil { + return nil + } + return p.AuthorizedActorNames +} + +// GetAuthorizedActorsOnly returns the AuthorizedActorsOnly field. +func (p *ProtectionChanges) GetAuthorizedActorsOnly() *AuthorizedActorsOnly { + if p == nil { + return nil + } + return p.AuthorizedActorsOnly +} + +// GetAuthorizedDismissalActorsOnly returns the AuthorizedDismissalActorsOnly field. +func (p *ProtectionChanges) GetAuthorizedDismissalActorsOnly() *AuthorizedDismissalActorsOnlyChanges { + if p == nil { + return nil + } + return p.AuthorizedDismissalActorsOnly +} + +// GetCreateProtected returns the CreateProtected field. +func (p *ProtectionChanges) GetCreateProtected() *CreateProtectedChanges { + if p == nil { + return nil + } + return p.CreateProtected +} + +// GetDismissStaleReviewsOnPush returns the DismissStaleReviewsOnPush field. +func (p *ProtectionChanges) GetDismissStaleReviewsOnPush() *DismissStaleReviewsOnPushChanges { + if p == nil { + return nil + } + return p.DismissStaleReviewsOnPush +} + +// GetLinearHistoryRequirementEnforcementLevel returns the LinearHistoryRequirementEnforcementLevel field. +func (p *ProtectionChanges) GetLinearHistoryRequirementEnforcementLevel() *LinearHistoryRequirementEnforcementLevelChanges { + if p == nil { + return nil + } + return p.LinearHistoryRequirementEnforcementLevel +} + +// GetPullRequestReviewsEnforcementLevel returns the PullRequestReviewsEnforcementLevel field. +func (p *ProtectionChanges) GetPullRequestReviewsEnforcementLevel() *PullRequestReviewsEnforcementLevelChanges { + if p == nil { + return nil + } + return p.PullRequestReviewsEnforcementLevel +} + +// GetRequireCodeOwnerReview returns the RequireCodeOwnerReview field. +func (p *ProtectionChanges) GetRequireCodeOwnerReview() *RequireCodeOwnerReviewChanges { + if p == nil { + return nil + } + return p.RequireCodeOwnerReview +} + +// GetRequiredConversationResolutionLevel returns the RequiredConversationResolutionLevel field. +func (p *ProtectionChanges) GetRequiredConversationResolutionLevel() *RequiredConversationResolutionLevelChanges { + if p == nil { + return nil + } + return p.RequiredConversationResolutionLevel +} + +// GetRequiredDeploymentsEnforcementLevel returns the RequiredDeploymentsEnforcementLevel field. +func (p *ProtectionChanges) GetRequiredDeploymentsEnforcementLevel() *RequiredDeploymentsEnforcementLevelChanges { + if p == nil { + return nil + } + return p.RequiredDeploymentsEnforcementLevel +} + +// GetRequiredStatusChecks returns the RequiredStatusChecks field. +func (p *ProtectionChanges) GetRequiredStatusChecks() *RequiredStatusChecksChanges { + if p == nil { + return nil + } + return p.RequiredStatusChecks +} + +// GetRequiredStatusChecksEnforcementLevel returns the RequiredStatusChecksEnforcementLevel field. +func (p *ProtectionChanges) GetRequiredStatusChecksEnforcementLevel() *RequiredStatusChecksEnforcementLevelChanges { + if p == nil { + return nil + } + return p.RequiredStatusChecksEnforcementLevel +} + +// GetSignatureRequirementEnforcementLevel returns the SignatureRequirementEnforcementLevel field. +func (p *ProtectionChanges) GetSignatureRequirementEnforcementLevel() *SignatureRequirementEnforcementLevelChanges { + if p == nil { + return nil + } + return p.SignatureRequirementEnforcementLevel +} + +// GetAllowDeletions returns the AllowDeletions field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetAllowDeletions() bool { + if p == nil || p.AllowDeletions == nil { + return false + } + return *p.AllowDeletions +} + +// GetAllowForcePushes returns the AllowForcePushes field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetAllowForcePushes() bool { + if p == nil || p.AllowForcePushes == nil { + return false + } + return *p.AllowForcePushes +} + +// GetAllowForkSyncing returns the AllowForkSyncing field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetAllowForkSyncing() bool { + if p == nil || p.AllowForkSyncing == nil { + return false + } + return *p.AllowForkSyncing +} + +// GetBlockCreations returns the BlockCreations field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetBlockCreations() bool { + if p == nil || p.BlockCreations == nil { + return false + } + return *p.BlockCreations +} + +// GetLockBranch returns the LockBranch field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetLockBranch() bool { + if p == nil || p.LockBranch == nil { + return false + } + return *p.LockBranch +} + +// GetRequiredConversationResolution returns the RequiredConversationResolution field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetRequiredConversationResolution() bool { + if p == nil || p.RequiredConversationResolution == nil { + return false + } + return *p.RequiredConversationResolution +} + +// GetRequiredPullRequestReviews returns the RequiredPullRequestReviews field. +func (p *ProtectionRequest) GetRequiredPullRequestReviews() *PullRequestReviewsEnforcementRequest { + if p == nil { + return nil + } + return p.RequiredPullRequestReviews +} + +// GetRequiredStatusChecks returns the RequiredStatusChecks field. +func (p *ProtectionRequest) GetRequiredStatusChecks() *RequiredStatusChecks { + if p == nil { + return nil + } + return p.RequiredStatusChecks +} + +// GetRequireLinearHistory returns the RequireLinearHistory field if it's non-nil, zero value otherwise. +func (p *ProtectionRequest) GetRequireLinearHistory() bool { + if p == nil || p.RequireLinearHistory == nil { + return false + } + return *p.RequireLinearHistory +} + +// GetRestrictions returns the Restrictions field. +func (p *ProtectionRequest) GetRestrictions() *BranchRestrictionsRequest { + if p == nil { + return nil + } + return p.Restrictions +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *ProtectionRule) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *ProtectionRule) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetPreventSelfReview returns the PreventSelfReview field if it's non-nil, zero value otherwise. +func (p *ProtectionRule) GetPreventSelfReview() bool { + if p == nil || p.PreventSelfReview == nil { + return false + } + return *p.PreventSelfReview +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (p *ProtectionRule) GetType() string { + if p == nil || p.Type == nil { + return "" + } + return *p.Type +} + +// GetWaitTimer returns the WaitTimer field if it's non-nil, zero value otherwise. +func (p *ProtectionRule) GetWaitTimer() int { + if p == nil || p.WaitTimer == nil { + return 0 + } + return *p.WaitTimer +} + +// GetInstallation returns the Installation field. +func (p *PublicEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PublicEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetRepo returns the Repo field. +func (p *PublicEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PublicEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (p *PublicKey) GetKey() string { + if p == nil || p.Key == nil { + return "" + } + return *p.Key +} + +// GetKeyID returns the KeyID field if it's non-nil, zero value otherwise. +func (p *PublicKey) GetKeyID() string { + if p == nil || p.KeyID == nil { + return "" + } + return *p.KeyID +} + +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetActiveLockReason() string { + if p == nil || p.ActiveLockReason == nil { + return "" + } + return *p.ActiveLockReason +} + +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetAdditions() int { + if p == nil || p.Additions == nil { + return 0 + } + return *p.Additions +} + +// GetAssignee returns the Assignee field. +func (p *PullRequest) GetAssignee() *User { + if p == nil { + return nil + } + return p.Assignee +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetAuthorAssociation() string { + if p == nil || p.AuthorAssociation == nil { + return "" + } + return *p.AuthorAssociation +} + +// GetAutoMerge returns the AutoMerge field. +func (p *PullRequest) GetAutoMerge() *PullRequestAutoMerge { + if p == nil { + return nil + } + return p.AutoMerge +} + +// GetBase returns the Base field. +func (p *PullRequest) GetBase() *PullRequestBranch { + if p == nil { + return nil + } + return p.Base +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetChangedFiles returns the ChangedFiles field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetChangedFiles() int { + if p == nil || p.ChangedFiles == nil { + return 0 + } + return *p.ChangedFiles +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetClosedAt() Timestamp { + if p == nil || p.ClosedAt == nil { + return Timestamp{} + } + return *p.ClosedAt +} + +// GetComments returns the Comments field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetComments() int { + if p == nil || p.Comments == nil { + return 0 + } + return *p.Comments +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCommentsURL() string { + if p == nil || p.CommentsURL == nil { + return "" + } + return *p.CommentsURL +} + +// GetCommits returns the Commits field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCommits() int { + if p == nil || p.Commits == nil { + return 0 + } + return *p.Commits +} + +// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCommitsURL() string { + if p == nil || p.CommitsURL == nil { + return "" + } + return *p.CommitsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetDeletions() int { + if p == nil || p.Deletions == nil { + return 0 + } + return *p.Deletions +} + +// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetDiffURL() string { + if p == nil || p.DiffURL == nil { + return "" + } + return *p.DiffURL +} + +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetDraft() bool { + if p == nil || p.Draft == nil { + return false + } + return *p.Draft +} + +// GetHead returns the Head field. +func (p *PullRequest) GetHead() *PullRequestBranch { + if p == nil { + return nil + } + return p.Head +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetIssueURL returns the IssueURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetIssueURL() string { + if p == nil || p.IssueURL == nil { + return "" + } + return *p.IssueURL +} + +// GetLinks returns the Links field. +func (p *PullRequest) GetLinks() *PRLinks { + if p == nil { + return nil + } + return p.Links +} + +// GetLocked returns the Locked field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetLocked() bool { + if p == nil || p.Locked == nil { + return false + } + return *p.Locked +} + +// GetMaintainerCanModify returns the MaintainerCanModify field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMaintainerCanModify() bool { + if p == nil || p.MaintainerCanModify == nil { + return false + } + return *p.MaintainerCanModify +} + +// GetMergeable returns the Mergeable field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMergeable() bool { + if p == nil || p.Mergeable == nil { + return false + } + return *p.Mergeable +} + +// GetMergeableState returns the MergeableState field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMergeableState() string { + if p == nil || p.MergeableState == nil { + return "" + } + return *p.MergeableState +} + +// GetMergeCommitSHA returns the MergeCommitSHA field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMergeCommitSHA() string { + if p == nil || p.MergeCommitSHA == nil { + return "" + } + return *p.MergeCommitSHA +} + +// GetMerged returns the Merged field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMerged() bool { + if p == nil || p.Merged == nil { + return false + } + return *p.Merged +} + +// GetMergedAt returns the MergedAt field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetMergedAt() Timestamp { + if p == nil || p.MergedAt == nil { + return Timestamp{} + } + return *p.MergedAt +} + +// GetMergedBy returns the MergedBy field. +func (p *PullRequest) GetMergedBy() *User { + if p == nil { + return nil + } + return p.MergedBy +} + +// GetMilestone returns the Milestone field. +func (p *PullRequest) GetMilestone() *Milestone { + if p == nil { + return nil + } + return p.Milestone +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetPatchURL() string { + if p == nil || p.PatchURL == nil { + return "" + } + return *p.PatchURL +} + +// GetRebaseable returns the Rebaseable field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetRebaseable() bool { + if p == nil || p.Rebaseable == nil { + return false + } + return *p.Rebaseable +} + +// GetReviewComments returns the ReviewComments field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetReviewComments() int { + if p == nil || p.ReviewComments == nil { + return 0 + } + return *p.ReviewComments +} + +// GetReviewCommentsURL returns the ReviewCommentsURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetReviewCommentsURL() string { + if p == nil || p.ReviewCommentsURL == nil { + return "" + } + return *p.ReviewCommentsURL +} + +// GetReviewCommentURL returns the ReviewCommentURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetReviewCommentURL() string { + if p == nil || p.ReviewCommentURL == nil { + return "" + } + return *p.ReviewCommentURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetStatusesURL() string { + if p == nil || p.StatusesURL == nil { + return "" + } + return *p.StatusesURL +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetTitle() string { + if p == nil || p.Title == nil { + return "" + } + return *p.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetUser returns the User field. +func (p *PullRequest) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. +func (p *PullRequestAutoMerge) GetCommitMessage() string { + if p == nil || p.CommitMessage == nil { + return "" + } + return *p.CommitMessage +} + +// GetCommitTitle returns the CommitTitle field if it's non-nil, zero value otherwise. +func (p *PullRequestAutoMerge) GetCommitTitle() string { + if p == nil || p.CommitTitle == nil { + return "" + } + return *p.CommitTitle +} + +// GetEnabledBy returns the EnabledBy field. +func (p *PullRequestAutoMerge) GetEnabledBy() *User { + if p == nil { + return nil + } + return p.EnabledBy +} + +// GetMergeMethod returns the MergeMethod field if it's non-nil, zero value otherwise. +func (p *PullRequestAutoMerge) GetMergeMethod() string { + if p == nil || p.MergeMethod == nil { + return "" + } + return *p.MergeMethod +} + +// GetLabel returns the Label field if it's non-nil, zero value otherwise. +func (p *PullRequestBranch) GetLabel() string { + if p == nil || p.Label == nil { + return "" + } + return *p.Label +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (p *PullRequestBranch) GetRef() string { + if p == nil || p.Ref == nil { + return "" + } + return *p.Ref +} + +// GetRepo returns the Repo field. +func (p *PullRequestBranch) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (p *PullRequestBranch) GetSHA() string { + if p == nil || p.SHA == nil { + return "" + } + return *p.SHA +} + +// GetUser returns the User field. +func (p *PullRequestBranch) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetExpectedHeadSHA returns the ExpectedHeadSHA field if it's non-nil, zero value otherwise. +func (p *PullRequestBranchUpdateOptions) GetExpectedHeadSHA() string { + if p == nil || p.ExpectedHeadSHA == nil { + return "" + } + return *p.ExpectedHeadSHA +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PullRequestBranchUpdateResponse) GetMessage() string { + if p == nil || p.Message == nil { + return "" + } + return *p.Message +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PullRequestBranchUpdateResponse) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetAuthorAssociation() string { + if p == nil || p.AuthorAssociation == nil { + return "" + } + return *p.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetCommitID() string { + if p == nil || p.CommitID == nil { + return "" + } + return *p.CommitID +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDiffHunk returns the DiffHunk field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetDiffHunk() string { + if p == nil || p.DiffHunk == nil { + return "" + } + return *p.DiffHunk +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetInReplyTo returns the InReplyTo field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetInReplyTo() int64 { + if p == nil || p.InReplyTo == nil { + return 0 + } + return *p.InReplyTo +} + +// GetLine returns the Line field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetLine() int { + if p == nil || p.Line == nil { + return 0 + } + return *p.Line +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetOriginalCommitID returns the OriginalCommitID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetOriginalCommitID() string { + if p == nil || p.OriginalCommitID == nil { + return "" + } + return *p.OriginalCommitID +} + +// GetOriginalLine returns the OriginalLine field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetOriginalLine() int { + if p == nil || p.OriginalLine == nil { + return 0 + } + return *p.OriginalLine +} + +// GetOriginalPosition returns the OriginalPosition field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetOriginalPosition() int { + if p == nil || p.OriginalPosition == nil { + return 0 + } + return *p.OriginalPosition +} + +// GetOriginalStartLine returns the OriginalStartLine field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetOriginalStartLine() int { + if p == nil || p.OriginalStartLine == nil { + return 0 + } + return *p.OriginalStartLine +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetPath() string { + if p == nil || p.Path == nil { + return "" + } + return *p.Path +} + +// GetPosition returns the Position field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetPosition() int { + if p == nil || p.Position == nil { + return 0 + } + return *p.Position +} + +// GetPullRequestReviewID returns the PullRequestReviewID field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetPullRequestReviewID() int64 { + if p == nil || p.PullRequestReviewID == nil { + return 0 + } + return *p.PullRequestReviewID +} + +// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetPullRequestURL() string { + if p == nil || p.PullRequestURL == nil { + return "" + } + return *p.PullRequestURL +} + +// GetReactions returns the Reactions field. +func (p *PullRequestComment) GetReactions() *Reactions { + if p == nil { + return nil + } + return p.Reactions +} + +// GetSide returns the Side field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetSide() string { + if p == nil || p.Side == nil { + return "" + } + return *p.Side +} + +// GetStartLine returns the StartLine field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetStartLine() int { + if p == nil || p.StartLine == nil { + return 0 + } + return *p.StartLine +} + +// GetStartSide returns the StartSide field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetStartSide() string { + if p == nil || p.StartSide == nil { + return "" + } + return *p.StartSide +} + +// GetSubjectType returns the SubjectType field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetSubjectType() string { + if p == nil || p.SubjectType == nil { + return "" + } + return *p.SubjectType +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PullRequestComment) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetUser returns the User field. +func (p *PullRequestComment) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PullRequestEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetAfter returns the After field if it's non-nil, zero value otherwise. +func (p *PullRequestEvent) GetAfter() string { + if p == nil || p.After == nil { + return "" + } + return *p.After +} + +// GetAssignee returns the Assignee field. +func (p *PullRequestEvent) GetAssignee() *User { + if p == nil { + return nil + } + return p.Assignee +} + +// GetBefore returns the Before field if it's non-nil, zero value otherwise. +func (p *PullRequestEvent) GetBefore() string { + if p == nil || p.Before == nil { + return "" + } + return *p.Before +} + +// GetChanges returns the Changes field. +func (p *PullRequestEvent) GetChanges() *EditChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *PullRequestEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetLabel returns the Label field. +func (p *PullRequestEvent) GetLabel() *Label { + if p == nil { + return nil + } + return p.Label +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *PullRequestEvent) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetOrganization returns the Organization field. +func (p *PullRequestEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. +func (p *PullRequestEvent) GetPerformedViaGithubApp() *App { + if p == nil { + return nil + } + return p.PerformedViaGithubApp +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetRequestedReviewer returns the RequestedReviewer field. +func (p *PullRequestEvent) GetRequestedReviewer() *User { + if p == nil { + return nil + } + return p.RequestedReviewer +} + +// GetRequestedTeam returns the RequestedTeam field. +func (p *PullRequestEvent) GetRequestedTeam() *Team { + if p == nil { + return nil + } + return p.RequestedTeam +} + +// GetSender returns the Sender field. +func (p *PullRequestEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetDiffURL returns the DiffURL field if it's non-nil, zero value otherwise. +func (p *PullRequestLinks) GetDiffURL() string { + if p == nil || p.DiffURL == nil { + return "" + } + return *p.DiffURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PullRequestLinks) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetPatchURL returns the PatchURL field if it's non-nil, zero value otherwise. +func (p *PullRequestLinks) GetPatchURL() string { + if p == nil || p.PatchURL == nil { + return "" + } + return *p.PatchURL +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PullRequestLinks) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetMerged returns the Merged field if it's non-nil, zero value otherwise. +func (p *PullRequestMergeResult) GetMerged() bool { + if p == nil || p.Merged == nil { + return false + } + return *p.Merged +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PullRequestMergeResult) GetMessage() string { + if p == nil || p.Message == nil { + return "" + } + return *p.Message +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (p *PullRequestMergeResult) GetSHA() string { + if p == nil || p.SHA == nil { + return "" + } + return *p.SHA +} + +// GetAuthorAssociation returns the AuthorAssociation field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetAuthorAssociation() string { + if p == nil || p.AuthorAssociation == nil { + return "" + } + return *p.AuthorAssociation +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetCommitID() string { + if p == nil || p.CommitID == nil { + return "" + } + return *p.CommitID +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetPullRequestURL returns the PullRequestURL field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetPullRequestURL() string { + if p == nil || p.PullRequestURL == nil { + return "" + } + return *p.PullRequestURL +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + +// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. +func (p *PullRequestReview) GetSubmittedAt() Timestamp { + if p == nil || p.SubmittedAt == nil { + return Timestamp{} + } + return *p.SubmittedAt +} + +// GetUser returns the User field. +func (p *PullRequestReview) GetUser() *User { + if p == nil { + return nil + } + return p.User +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewCommentEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetChanges returns the Changes field. +func (p *PullRequestReviewCommentEvent) GetChanges() *EditChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetComment returns the Comment field. +func (p *PullRequestReviewCommentEvent) GetComment() *PullRequestComment { + if p == nil { + return nil + } + return p.Comment +} + +// GetInstallation returns the Installation field. +func (p *PullRequestReviewCommentEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PullRequestReviewCommentEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestReviewCommentEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestReviewCommentEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PullRequestReviewCommentEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewDismissalRequest) GetMessage() string { + if p == nil || p.Message == nil { + return "" + } + return *p.Message +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *PullRequestReviewEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrganization returns the Organization field. +func (p *PullRequestReviewEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestReviewEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestReviewEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetReview returns the Review field. +func (p *PullRequestReviewEvent) GetReview() *PullRequestReview { + if p == nil { + return nil + } + return p.Review +} + +// GetSender returns the Sender field. +func (p *PullRequestReviewEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewRequest) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewRequest) GetCommitID() string { + if p == nil || p.CommitID == nil { + return "" + } + return *p.CommitID +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewRequest) GetEvent() string { + if p == nil || p.Event == nil { + return "" + } + return *p.Event +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewRequest) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetBypassPullRequestAllowances returns the BypassPullRequestAllowances field. +func (p *PullRequestReviewsEnforcement) GetBypassPullRequestAllowances() *BypassPullRequestAllowances { + if p == nil { + return nil + } + return p.BypassPullRequestAllowances +} + +// GetDismissalRestrictions returns the DismissalRestrictions field. +func (p *PullRequestReviewsEnforcement) GetDismissalRestrictions() *DismissalRestrictions { + if p == nil { + return nil + } + return p.DismissalRestrictions +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewsEnforcementLevelChanges) GetFrom() string { + if p == nil || p.From == nil { + return "" + } + return *p.From +} + +// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. +func (p *PullRequestReviewsEnforcementRequest) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { + if p == nil { + return nil + } + return p.BypassPullRequestAllowancesRequest +} + +// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. +func (p *PullRequestReviewsEnforcementRequest) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { + if p == nil { + return nil + } + return p.DismissalRestrictionsRequest +} + +// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewsEnforcementRequest) GetRequireLastPushApproval() bool { + if p == nil || p.RequireLastPushApproval == nil { + return false + } + return *p.RequireLastPushApproval +} + +// GetBypassPullRequestAllowancesRequest returns the BypassPullRequestAllowancesRequest field. +func (p *PullRequestReviewsEnforcementUpdate) GetBypassPullRequestAllowancesRequest() *BypassPullRequestAllowancesRequest { + if p == nil { + return nil + } + return p.BypassPullRequestAllowancesRequest +} + +// GetDismissalRestrictionsRequest returns the DismissalRestrictionsRequest field. +func (p *PullRequestReviewsEnforcementUpdate) GetDismissalRestrictionsRequest() *DismissalRestrictionsRequest { + if p == nil { + return nil + } + return p.DismissalRestrictionsRequest +} + +// GetDismissStaleReviews returns the DismissStaleReviews field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewsEnforcementUpdate) GetDismissStaleReviews() bool { + if p == nil || p.DismissStaleReviews == nil { + return false + } + return *p.DismissStaleReviews +} + +// GetRequireCodeOwnerReviews returns the RequireCodeOwnerReviews field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewsEnforcementUpdate) GetRequireCodeOwnerReviews() bool { + if p == nil || p.RequireCodeOwnerReviews == nil { + return false + } + return *p.RequireCodeOwnerReviews +} + +// GetRequireLastPushApproval returns the RequireLastPushApproval field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewsEnforcementUpdate) GetRequireLastPushApproval() bool { + if p == nil || p.RequireLastPushApproval == nil { + return false + } + return *p.RequireLastPushApproval +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PullRequestReviewThreadEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetInstallation returns the Installation field. +func (p *PullRequestReviewThreadEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrg returns the Org field. +func (p *PullRequestReviewThreadEvent) GetOrg() *Organization { + if p == nil { + return nil + } + return p.Org +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestReviewThreadEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestReviewThreadEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PullRequestReviewThreadEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetThread returns the Thread field. +func (p *PullRequestReviewThreadEvent) GetThread() *PullRequestThread { + if p == nil { + return nil + } + return p.Thread +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PullRequestTargetEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetAfter returns the After field if it's non-nil, zero value otherwise. +func (p *PullRequestTargetEvent) GetAfter() string { + if p == nil || p.After == nil { + return "" + } + return *p.After +} + +// GetAssignee returns the Assignee field. +func (p *PullRequestTargetEvent) GetAssignee() *User { + if p == nil { + return nil + } + return p.Assignee +} + +// GetBefore returns the Before field if it's non-nil, zero value otherwise. +func (p *PullRequestTargetEvent) GetBefore() string { + if p == nil || p.Before == nil { + return "" + } + return *p.Before +} + +// GetChanges returns the Changes field. +func (p *PullRequestTargetEvent) GetChanges() *EditChange { + if p == nil { + return nil + } + return p.Changes +} + +// GetInstallation returns the Installation field. +func (p *PullRequestTargetEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetLabel returns the Label field. +func (p *PullRequestTargetEvent) GetLabel() *Label { + if p == nil { + return nil + } + return p.Label +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (p *PullRequestTargetEvent) GetNumber() int { + if p == nil || p.Number == nil { + return 0 + } + return *p.Number +} + +// GetOrganization returns the Organization field. +func (p *PullRequestTargetEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. +func (p *PullRequestTargetEvent) GetPerformedViaGithubApp() *App { + if p == nil { + return nil + } + return p.PerformedViaGithubApp +} + +// GetPullRequest returns the PullRequest field. +func (p *PullRequestTargetEvent) GetPullRequest() *PullRequest { + if p == nil { + return nil + } + return p.PullRequest +} + +// GetRepo returns the Repo field. +func (p *PullRequestTargetEvent) GetRepo() *Repository { + if p == nil { + return nil + } + return p.Repo +} + +// GetRequestedReviewer returns the RequestedReviewer field. +func (p *PullRequestTargetEvent) GetRequestedReviewer() *User { + if p == nil { + return nil + } + return p.RequestedReviewer +} + +// GetRequestedTeam returns the RequestedTeam field. +func (p *PullRequestTargetEvent) GetRequestedTeam() *Team { + if p == nil { + return nil + } + return p.RequestedTeam +} + +// GetSender returns the Sender field. +func (p *PullRequestTargetEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PullRequestThread) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PullRequestThread) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetMergablePulls returns the MergablePulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetMergablePulls() int { + if p == nil || p.MergablePulls == nil { + return 0 + } + return *p.MergablePulls +} + +// GetMergedPulls returns the MergedPulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetMergedPulls() int { + if p == nil || p.MergedPulls == nil { + return 0 + } + return *p.MergedPulls +} + +// GetTotalPulls returns the TotalPulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetTotalPulls() int { + if p == nil || p.TotalPulls == nil { + return 0 + } + return *p.TotalPulls +} + +// GetUnmergablePulls returns the UnmergablePulls field if it's non-nil, zero value otherwise. +func (p *PullStats) GetUnmergablePulls() int { + if p == nil || p.UnmergablePulls == nil { + return 0 + } + return *p.UnmergablePulls +} + +// GetCommits returns the Commits field if it's non-nil, zero value otherwise. +func (p *PunchCard) GetCommits() int { + if p == nil || p.Commits == nil { + return 0 + } + return *p.Commits +} + +// GetDay returns the Day field if it's non-nil, zero value otherwise. +func (p *PunchCard) GetDay() int { + if p == nil || p.Day == nil { + return 0 + } + return *p.Day +} + +// GetHour returns the Hour field if it's non-nil, zero value otherwise. +func (p *PunchCard) GetHour() int { + if p == nil || p.Hour == nil { + return 0 + } + return *p.Hour +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetAction() string { + if p == nil || p.Action == nil { + return "" + } + return *p.Action +} + +// GetAfter returns the After field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetAfter() string { + if p == nil || p.After == nil { + return "" + } + return *p.After +} + +// GetBaseRef returns the BaseRef field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetBaseRef() string { + if p == nil || p.BaseRef == nil { + return "" + } + return *p.BaseRef +} + +// GetBefore returns the Before field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetBefore() string { + if p == nil || p.Before == nil { + return "" + } + return *p.Before +} + +// GetCommits returns the Commits slice if it's non-nil, nil otherwise. +func (p *PushEvent) GetCommits() []*HeadCommit { + if p == nil || p.Commits == nil { + return nil + } + return p.Commits +} + +// GetCompare returns the Compare field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetCompare() string { + if p == nil || p.Compare == nil { + return "" + } + return *p.Compare +} + +// GetCreated returns the Created field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetCreated() bool { + if p == nil || p.Created == nil { + return false + } + return *p.Created +} + +// GetDeleted returns the Deleted field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetDeleted() bool { + if p == nil || p.Deleted == nil { + return false + } + return *p.Deleted +} + +// GetDistinctSize returns the DistinctSize field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetDistinctSize() int { + if p == nil || p.DistinctSize == nil { + return 0 + } + return *p.DistinctSize +} + +// GetForced returns the Forced field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetForced() bool { + if p == nil || p.Forced == nil { + return false + } + return *p.Forced +} + +// GetHead returns the Head field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetHead() string { + if p == nil || p.Head == nil { + return "" + } + return *p.Head +} + +// GetHeadCommit returns the HeadCommit field. +func (p *PushEvent) GetHeadCommit() *HeadCommit { + if p == nil { + return nil + } + return p.HeadCommit +} + +// GetInstallation returns the Installation field. +func (p *PushEvent) GetInstallation() *Installation { + if p == nil { + return nil + } + return p.Installation +} + +// GetOrganization returns the Organization field. +func (p *PushEvent) GetOrganization() *Organization { + if p == nil { + return nil + } + return p.Organization +} + +// GetPusher returns the Pusher field. +func (p *PushEvent) GetPusher() *User { + if p == nil { + return nil + } + return p.Pusher +} + +// GetPushID returns the PushID field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetPushID() int64 { + if p == nil || p.PushID == nil { + return 0 + } + return *p.PushID +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetRef() string { + if p == nil || p.Ref == nil { + return "" + } + return *p.Ref +} + +// GetRepo returns the Repo field. +func (p *PushEvent) GetRepo() *PushEventRepository { + if p == nil { + return nil + } + return p.Repo +} + +// GetSender returns the Sender field. +func (p *PushEvent) GetSender() *User { + if p == nil { + return nil + } + return p.Sender +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (p *PushEvent) GetSize() int { + if p == nil || p.Size == nil { + return 0 + } + return *p.Size +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (p *PushEventRepoOwner) GetEmail() string { + if p == nil || p.Email == nil { + return "" + } + return *p.Email +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PushEventRepoOwner) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetArchived() bool { + if p == nil || p.Archived == nil { + return false + } + return *p.Archived +} + +// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetArchiveURL() string { + if p == nil || p.ArchiveURL == nil { + return "" + } + return *p.ArchiveURL +} + +// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetCloneURL() string { + if p == nil || p.CloneURL == nil { + return "" + } + return *p.CloneURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetCreatedAt() Timestamp { + if p == nil || p.CreatedAt == nil { + return Timestamp{} + } + return *p.CreatedAt +} + +// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetDefaultBranch() string { + if p == nil || p.DefaultBranch == nil { + return "" + } + return *p.DefaultBranch +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetDescription() string { + if p == nil || p.Description == nil { + return "" + } + return *p.Description +} + +// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetDisabled() bool { + if p == nil || p.Disabled == nil { + return false + } + return *p.Disabled +} + +// GetFork returns the Fork field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetFork() bool { + if p == nil || p.Fork == nil { + return false + } + return *p.Fork +} + +// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetForksCount() int { + if p == nil || p.ForksCount == nil { + return 0 + } + return *p.ForksCount +} + +// GetFullName returns the FullName field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetFullName() string { + if p == nil || p.FullName == nil { + return "" + } + return *p.FullName +} + +// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetGitURL() string { + if p == nil || p.GitURL == nil { + return "" + } + return *p.GitURL +} + +// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHasDownloads() bool { + if p == nil || p.HasDownloads == nil { + return false + } + return *p.HasDownloads +} + +// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHasIssues() bool { + if p == nil || p.HasIssues == nil { + return false + } + return *p.HasIssues +} + +// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHasPages() bool { + if p == nil || p.HasPages == nil { + return false + } + return *p.HasPages +} + +// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHasWiki() bool { + if p == nil || p.HasWiki == nil { + return false + } + return *p.HasWiki +} + +// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHomepage() string { + if p == nil || p.Homepage == nil { + return "" + } + return *p.Homepage +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetLanguage() string { + if p == nil || p.Language == nil { + return "" + } + return *p.Language +} + +// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetMasterBranch() string { + if p == nil || p.MasterBranch == nil { + return "" + } + return *p.MasterBranch +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + +// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetOpenIssuesCount() int { + if p == nil || p.OpenIssuesCount == nil { + return 0 + } + return *p.OpenIssuesCount +} + +// GetOrganization returns the Organization field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetOrganization() string { + if p == nil || p.Organization == nil { + return "" + } + return *p.Organization +} + +// GetOwner returns the Owner field. +func (p *PushEventRepository) GetOwner() *User { + if p == nil { + return nil + } + return p.Owner +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetPrivate() bool { + if p == nil || p.Private == nil { + return false + } + return *p.Private +} + +// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetPullsURL() string { + if p == nil || p.PullsURL == nil { + return "" + } + return *p.PullsURL +} + +// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetPushedAt() Timestamp { + if p == nil || p.PushedAt == nil { + return Timestamp{} + } + return *p.PushedAt +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetSize() int { + if p == nil || p.Size == nil { + return 0 + } + return *p.Size +} + +// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetSSHURL() string { + if p == nil || p.SSHURL == nil { + return "" + } + return *p.SSHURL +} + +// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetStargazersCount() int { + if p == nil || p.StargazersCount == nil { + return 0 + } + return *p.StargazersCount +} + +// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetStatusesURL() string { + if p == nil || p.StatusesURL == nil { + return "" + } + return *p.StatusesURL +} + +// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetSVNURL() string { + if p == nil || p.SVNURL == nil { + return "" + } + return *p.SVNURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetUpdatedAt() Timestamp { + if p == nil || p.UpdatedAt == nil { + return Timestamp{} + } + return *p.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + +// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetWatchersCount() int { + if p == nil || p.WatchersCount == nil { + return 0 + } + return *p.WatchersCount +} + +// GetActionsRunnerRegistration returns the ActionsRunnerRegistration field. +func (r *RateLimits) GetActionsRunnerRegistration() *Rate { + if r == nil { + return nil + } + return r.ActionsRunnerRegistration +} + +// GetCodeScanningUpload returns the CodeScanningUpload field. +func (r *RateLimits) GetCodeScanningUpload() *Rate { + if r == nil { + return nil + } + return r.CodeScanningUpload +} + +// GetCore returns the Core field. +func (r *RateLimits) GetCore() *Rate { + if r == nil { + return nil + } + return r.Core +} + +// GetGraphQL returns the GraphQL field. +func (r *RateLimits) GetGraphQL() *Rate { + if r == nil { + return nil + } + return r.GraphQL +} + +// GetIntegrationManifest returns the IntegrationManifest field. +func (r *RateLimits) GetIntegrationManifest() *Rate { + if r == nil { + return nil + } + return r.IntegrationManifest +} + +// GetSCIM returns the SCIM field. +func (r *RateLimits) GetSCIM() *Rate { + if r == nil { + return nil + } + return r.SCIM +} + +// GetSearch returns the Search field. +func (r *RateLimits) GetSearch() *Rate { + if r == nil { + return nil + } + return r.Search +} + +// GetSourceImport returns the SourceImport field. +func (r *RateLimits) GetSourceImport() *Rate { + if r == nil { + return nil + } + return r.SourceImport +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (r *Reaction) GetContent() string { + if r == nil || r.Content == nil { + return "" + } + return *r.Content +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Reaction) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Reaction) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetUser returns the User field. +func (r *Reaction) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +// GetConfused returns the Confused field if it's non-nil, zero value otherwise. +func (r *Reactions) GetConfused() int { + if r == nil || r.Confused == nil { + return 0 + } + return *r.Confused +} + +// GetEyes returns the Eyes field if it's non-nil, zero value otherwise. +func (r *Reactions) GetEyes() int { + if r == nil || r.Eyes == nil { + return 0 + } + return *r.Eyes +} + +// GetHeart returns the Heart field if it's non-nil, zero value otherwise. +func (r *Reactions) GetHeart() int { + if r == nil || r.Heart == nil { + return 0 + } + return *r.Heart +} + +// GetHooray returns the Hooray field if it's non-nil, zero value otherwise. +func (r *Reactions) GetHooray() int { + if r == nil || r.Hooray == nil { + return 0 + } + return *r.Hooray +} + +// GetLaugh returns the Laugh field if it's non-nil, zero value otherwise. +func (r *Reactions) GetLaugh() int { + if r == nil || r.Laugh == nil { + return 0 + } + return *r.Laugh +} + +// GetMinusOne returns the MinusOne field if it's non-nil, zero value otherwise. +func (r *Reactions) GetMinusOne() int { + if r == nil || r.MinusOne == nil { + return 0 + } + return *r.MinusOne +} + +// GetPlusOne returns the PlusOne field if it's non-nil, zero value otherwise. +func (r *Reactions) GetPlusOne() int { + if r == nil || r.PlusOne == nil { + return 0 + } + return *r.PlusOne +} + +// GetRocket returns the Rocket field if it's non-nil, zero value otherwise. +func (r *Reactions) GetRocket() int { + if r == nil || r.Rocket == nil { + return 0 + } + return *r.Rocket +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (r *Reactions) GetTotalCount() int { + if r == nil || r.TotalCount == nil { + return 0 + } + return *r.TotalCount +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *Reactions) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Reference) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetObject returns the Object field. +func (r *Reference) GetObject() *GitObject { + if r == nil { + return nil + } + return r.Object +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (r *Reference) GetRef() string { + if r == nil || r.Ref == nil { + return "" + } + return *r.Ref +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *Reference) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (r *RegistrationToken) GetExpiresAt() Timestamp { + if r == nil || r.ExpiresAt == nil { + return Timestamp{} + } + return *r.ExpiresAt +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (r *RegistrationToken) GetToken() string { + if r == nil || r.Token == nil { + return "" + } + return *r.Token +} + +// GetBrowserDownloadURL returns the BrowserDownloadURL field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetBrowserDownloadURL() string { + if r == nil || r.BrowserDownloadURL == nil { + return "" + } + return *r.BrowserDownloadURL +} + +// GetContentType returns the ContentType field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetContentType() string { + if r == nil || r.ContentType == nil { + return "" + } + return *r.ContentType +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetDownloadCount returns the DownloadCount field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetDownloadCount() int { + if r == nil || r.DownloadCount == nil { + return 0 + } + return *r.DownloadCount +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetLabel returns the Label field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetLabel() string { + if r == nil || r.Label == nil { + return "" + } + return *r.Label +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetSize() int { + if r == nil || r.Size == nil { + return 0 + } + return *r.Size +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetState() string { + if r == nil || r.State == nil { + return "" + } + return *r.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetUpdatedAt() Timestamp { + if r == nil || r.UpdatedAt == nil { + return Timestamp{} + } + return *r.UpdatedAt +} + +// GetUploader returns the Uploader field. +func (r *ReleaseAsset) GetUploader() *User { + if r == nil { + return nil + } + return r.Uploader +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *ReleaseAsset) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *ReleaseEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetInstallation returns the Installation field. +func (r *ReleaseEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrg returns the Org field. +func (r *ReleaseEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRelease returns the Release field. +func (r *ReleaseEvent) GetRelease() *RepositoryRelease { + if r == nil { + return nil + } + return r.Release +} + +// GetRepo returns the Repo field. +func (r *ReleaseEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *ReleaseEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + +// GetExpiresAt returns the ExpiresAt field if it's non-nil, zero value otherwise. +func (r *RemoveToken) GetExpiresAt() Timestamp { + if r == nil || r.ExpiresAt == nil { + return Timestamp{} + } + return *r.ExpiresAt +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (r *RemoveToken) GetToken() string { + if r == nil || r.Token == nil { + return "" + } + return *r.Token +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *Rename) GetFrom() string { + if r == nil || r.From == nil { + return "" + } + return *r.From +} + +// GetTo returns the To field if it's non-nil, zero value otherwise. +func (r *Rename) GetTo() string { + if r == nil || r.To == nil { + return "" + } + return *r.To +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (r *RenameOrgResponse) GetMessage() string { + if r == nil || r.Message == nil { + return "" + } + return *r.Message +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RenameOrgResponse) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (r *RepoAdvisoryCredit) GetLogin() string { + if r == nil || r.Login == nil { + return "" + } + return *r.Login +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RepoAdvisoryCredit) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (r *RepoAdvisoryCreditDetailed) GetState() string { + if r == nil || r.State == nil { + return "" + } + return *r.State +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RepoAdvisoryCreditDetailed) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetUser returns the User field. +func (r *RepoAdvisoryCreditDetailed) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +// GetDownloadLocation returns the DownloadLocation field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetDownloadLocation() string { + if r == nil || r.DownloadLocation == nil { + return "" + } + return *r.DownloadLocation +} + +// GetFilesAnalyzed returns the FilesAnalyzed field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetFilesAnalyzed() bool { + if r == nil || r.FilesAnalyzed == nil { + return false + } + return *r.FilesAnalyzed +} + +// GetLicenseConcluded returns the LicenseConcluded field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetLicenseConcluded() string { + if r == nil || r.LicenseConcluded == nil { + return "" + } + return *r.LicenseConcluded +} + +// GetLicenseDeclared returns the LicenseDeclared field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetLicenseDeclared() string { + if r == nil || r.LicenseDeclared == nil { + return "" + } + return *r.LicenseDeclared +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetSPDXID() string { + if r == nil || r.SPDXID == nil { + return "" + } + return *r.SPDXID +} + +// GetVersionInfo returns the VersionInfo field if it's non-nil, zero value otherwise. +func (r *RepoDependencies) GetVersionInfo() string { + if r == nil || r.VersionInfo == nil { + return "" + } + return *r.VersionInfo +} + +// GetBranch returns the Branch field if it's non-nil, zero value otherwise. +func (r *RepoMergeUpstreamRequest) GetBranch() string { + if r == nil || r.Branch == nil { + return "" + } + return *r.Branch +} + +// GetBaseBranch returns the BaseBranch field if it's non-nil, zero value otherwise. +func (r *RepoMergeUpstreamResult) GetBaseBranch() string { + if r == nil || r.BaseBranch == nil { + return "" + } + return *r.BaseBranch +} + +// GetMergeType returns the MergeType field if it's non-nil, zero value otherwise. +func (r *RepoMergeUpstreamResult) GetMergeType() string { + if r == nil || r.MergeType == nil { + return "" + } + return *r.MergeType +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (r *RepoMergeUpstreamResult) GetMessage() string { + if r == nil || r.Message == nil { + return "" + } + return *r.Message +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *RepoName) GetFrom() string { + if r == nil || r.From == nil { + return "" + } + return *r.From +} + +// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetBadgeURL() string { + if r == nil || r.BadgeURL == nil { + return "" + } + return *r.BadgeURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetPath() string { + if r == nil || r.Path == nil { + return "" + } + return *r.Path +} + +// GetSourceRepository returns the SourceRepository field. +func (r *RepoRequiredWorkflow) GetSourceRepository() *Repository { + if r == nil { + return nil + } + return r.SourceRepository +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetState() string { + if r == nil || r.State == nil { + return "" + } + return *r.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetUpdatedAt() Timestamp { + if r == nil || r.UpdatedAt == nil { + return Timestamp{} + } + return *r.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflow) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (r *RepoRequiredWorkflows) GetTotalCount() int { + if r == nil || r.TotalCount == nil { + return 0 + } + return *r.TotalCount +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (r *RepositoriesSearchResult) GetIncompleteResults() bool { + if r == nil || r.IncompleteResults == nil { + return false + } + return *r.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (r *RepositoriesSearchResult) GetTotal() int { + if r == nil || r.Total == nil { + return 0 + } + return *r.Total +} + +// GetAllowAutoMerge returns the AllowAutoMerge field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowAutoMerge() bool { + if r == nil || r.AllowAutoMerge == nil { + return false + } + return *r.AllowAutoMerge +} + +// GetAllowForking returns the AllowForking field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowForking() bool { + if r == nil || r.AllowForking == nil { + return false + } + return *r.AllowForking +} + +// GetAllowMergeCommit returns the AllowMergeCommit field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowMergeCommit() bool { + if r == nil || r.AllowMergeCommit == nil { + return false + } + return *r.AllowMergeCommit +} + +// GetAllowRebaseMerge returns the AllowRebaseMerge field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowRebaseMerge() bool { + if r == nil || r.AllowRebaseMerge == nil { + return false + } + return *r.AllowRebaseMerge +} + +// GetAllowSquashMerge returns the AllowSquashMerge field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowSquashMerge() bool { + if r == nil || r.AllowSquashMerge == nil { + return false + } + return *r.AllowSquashMerge +} + +// GetAllowUpdateBranch returns the AllowUpdateBranch field if it's non-nil, zero value otherwise. +func (r *Repository) GetAllowUpdateBranch() bool { + if r == nil || r.AllowUpdateBranch == nil { + return false + } + return *r.AllowUpdateBranch +} + +// GetArchived returns the Archived field if it's non-nil, zero value otherwise. +func (r *Repository) GetArchived() bool { + if r == nil || r.Archived == nil { + return false + } + return *r.Archived +} + +// GetArchiveURL returns the ArchiveURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetArchiveURL() string { + if r == nil || r.ArchiveURL == nil { + return "" + } + return *r.ArchiveURL +} + +// GetAssigneesURL returns the AssigneesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetAssigneesURL() string { + if r == nil || r.AssigneesURL == nil { + return "" + } + return *r.AssigneesURL +} + +// GetAutoInit returns the AutoInit field if it's non-nil, zero value otherwise. +func (r *Repository) GetAutoInit() bool { + if r == nil || r.AutoInit == nil { + return false + } + return *r.AutoInit +} + +// GetBlobsURL returns the BlobsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetBlobsURL() string { + if r == nil || r.BlobsURL == nil { + return "" + } + return *r.BlobsURL +} + +// GetBranchesURL returns the BranchesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetBranchesURL() string { + if r == nil || r.BranchesURL == nil { + return "" + } + return *r.BranchesURL +} + +// GetCloneURL returns the CloneURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetCloneURL() string { + if r == nil || r.CloneURL == nil { + return "" + } + return *r.CloneURL +} + +// GetCodeOfConduct returns the CodeOfConduct field. +func (r *Repository) GetCodeOfConduct() *CodeOfConduct { + if r == nil { + return nil + } + return r.CodeOfConduct +} + +// GetCollaboratorsURL returns the CollaboratorsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetCollaboratorsURL() string { + if r == nil || r.CollaboratorsURL == nil { + return "" + } + return *r.CollaboratorsURL +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetCommentsURL() string { + if r == nil || r.CommentsURL == nil { + return "" + } + return *r.CommentsURL +} + +// GetCommitsURL returns the CommitsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetCommitsURL() string { + if r == nil || r.CommitsURL == nil { + return "" + } + return *r.CommitsURL +} + +// GetCompareURL returns the CompareURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetCompareURL() string { + if r == nil || r.CompareURL == nil { + return "" + } + return *r.CompareURL +} + +// GetContentsURL returns the ContentsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetContentsURL() string { + if r == nil || r.ContentsURL == nil { + return "" + } + return *r.ContentsURL +} + +// GetContributorsURL returns the ContributorsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetContributorsURL() string { + if r == nil || r.ContributorsURL == nil { + return "" + } + return *r.ContributorsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *Repository) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetDefaultBranch returns the DefaultBranch field if it's non-nil, zero value otherwise. +func (r *Repository) GetDefaultBranch() string { + if r == nil || r.DefaultBranch == nil { + return "" + } + return *r.DefaultBranch +} + +// GetDeleteBranchOnMerge returns the DeleteBranchOnMerge field if it's non-nil, zero value otherwise. +func (r *Repository) GetDeleteBranchOnMerge() bool { + if r == nil || r.DeleteBranchOnMerge == nil { + return false + } + return *r.DeleteBranchOnMerge +} + +// GetDeploymentsURL returns the DeploymentsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetDeploymentsURL() string { + if r == nil || r.DeploymentsURL == nil { + return "" + } + return *r.DeploymentsURL +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (r *Repository) GetDescription() string { + if r == nil || r.Description == nil { + return "" + } + return *r.Description +} + +// GetDisabled returns the Disabled field if it's non-nil, zero value otherwise. +func (r *Repository) GetDisabled() bool { + if r == nil || r.Disabled == nil { + return false + } + return *r.Disabled +} + +// GetDownloadsURL returns the DownloadsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetDownloadsURL() string { + if r == nil || r.DownloadsURL == nil { + return "" + } + return *r.DownloadsURL +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetEventsURL() string { + if r == nil || r.EventsURL == nil { + return "" + } + return *r.EventsURL +} + +// GetFork returns the Fork field if it's non-nil, zero value otherwise. +func (r *Repository) GetFork() bool { + if r == nil || r.Fork == nil { + return false + } + return *r.Fork +} + +// GetForksCount returns the ForksCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetForksCount() int { + if r == nil || r.ForksCount == nil { + return 0 + } + return *r.ForksCount +} + +// GetForksURL returns the ForksURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetForksURL() string { + if r == nil || r.ForksURL == nil { + return "" + } + return *r.ForksURL +} + +// GetFullName returns the FullName field if it's non-nil, zero value otherwise. +func (r *Repository) GetFullName() string { + if r == nil || r.FullName == nil { + return "" + } + return *r.FullName +} + +// GetGitCommitsURL returns the GitCommitsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetGitCommitsURL() string { + if r == nil || r.GitCommitsURL == nil { + return "" + } + return *r.GitCommitsURL +} + +// GetGitignoreTemplate returns the GitignoreTemplate field if it's non-nil, zero value otherwise. +func (r *Repository) GetGitignoreTemplate() string { + if r == nil || r.GitignoreTemplate == nil { + return "" + } + return *r.GitignoreTemplate +} + +// GetGitRefsURL returns the GitRefsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetGitRefsURL() string { + if r == nil || r.GitRefsURL == nil { + return "" + } + return *r.GitRefsURL +} + +// GetGitTagsURL returns the GitTagsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetGitTagsURL() string { + if r == nil || r.GitTagsURL == nil { + return "" + } + return *r.GitTagsURL +} + +// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetGitURL() string { + if r == nil || r.GitURL == nil { + return "" + } + return *r.GitURL +} + +// GetHasDiscussions returns the HasDiscussions field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasDiscussions() bool { + if r == nil || r.HasDiscussions == nil { + return false + } + return *r.HasDiscussions +} + +// GetHasDownloads returns the HasDownloads field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasDownloads() bool { + if r == nil || r.HasDownloads == nil { + return false + } + return *r.HasDownloads +} + +// GetHasIssues returns the HasIssues field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasIssues() bool { + if r == nil || r.HasIssues == nil { + return false + } + return *r.HasIssues +} + +// GetHasPages returns the HasPages field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasPages() bool { + if r == nil || r.HasPages == nil { + return false + } + return *r.HasPages +} + +// GetHasProjects returns the HasProjects field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasProjects() bool { + if r == nil || r.HasProjects == nil { + return false + } + return *r.HasProjects +} + +// GetHasWiki returns the HasWiki field if it's non-nil, zero value otherwise. +func (r *Repository) GetHasWiki() bool { + if r == nil || r.HasWiki == nil { + return false + } + return *r.HasWiki +} + +// GetHomepage returns the Homepage field if it's non-nil, zero value otherwise. +func (r *Repository) GetHomepage() string { + if r == nil || r.Homepage == nil { + return "" + } + return *r.Homepage +} + +// GetHooksURL returns the HooksURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetHooksURL() string { + if r == nil || r.HooksURL == nil { + return "" + } + return *r.HooksURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Repository) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetIssueCommentURL returns the IssueCommentURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetIssueCommentURL() string { + if r == nil || r.IssueCommentURL == nil { + return "" + } + return *r.IssueCommentURL +} + +// GetIssueEventsURL returns the IssueEventsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetIssueEventsURL() string { + if r == nil || r.IssueEventsURL == nil { + return "" + } + return *r.IssueEventsURL +} + +// GetIssuesURL returns the IssuesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetIssuesURL() string { + if r == nil || r.IssuesURL == nil { + return "" + } + return *r.IssuesURL +} + +// GetIsTemplate returns the IsTemplate field if it's non-nil, zero value otherwise. +func (r *Repository) GetIsTemplate() bool { + if r == nil || r.IsTemplate == nil { + return false + } + return *r.IsTemplate +} + +// GetKeysURL returns the KeysURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetKeysURL() string { + if r == nil || r.KeysURL == nil { + return "" + } + return *r.KeysURL +} + +// GetLabelsURL returns the LabelsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetLabelsURL() string { + if r == nil || r.LabelsURL == nil { + return "" + } + return *r.LabelsURL +} + +// GetLanguage returns the Language field if it's non-nil, zero value otherwise. +func (r *Repository) GetLanguage() string { + if r == nil || r.Language == nil { + return "" + } + return *r.Language +} + +// GetLanguagesURL returns the LanguagesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetLanguagesURL() string { + if r == nil || r.LanguagesURL == nil { + return "" + } + return *r.LanguagesURL +} + +// GetLicense returns the License field. +func (r *Repository) GetLicense() *License { + if r == nil { + return nil + } + return r.License +} + +// GetLicenseTemplate returns the LicenseTemplate field if it's non-nil, zero value otherwise. +func (r *Repository) GetLicenseTemplate() string { + if r == nil || r.LicenseTemplate == nil { + return "" + } + return *r.LicenseTemplate +} + +// GetMasterBranch returns the MasterBranch field if it's non-nil, zero value otherwise. +func (r *Repository) GetMasterBranch() string { + if r == nil || r.MasterBranch == nil { + return "" + } + return *r.MasterBranch +} + +// GetMergeCommitMessage returns the MergeCommitMessage field if it's non-nil, zero value otherwise. +func (r *Repository) GetMergeCommitMessage() string { + if r == nil || r.MergeCommitMessage == nil { + return "" + } + return *r.MergeCommitMessage +} + +// GetMergeCommitTitle returns the MergeCommitTitle field if it's non-nil, zero value otherwise. +func (r *Repository) GetMergeCommitTitle() string { + if r == nil || r.MergeCommitTitle == nil { + return "" + } + return *r.MergeCommitTitle +} + +// GetMergesURL returns the MergesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetMergesURL() string { + if r == nil || r.MergesURL == nil { + return "" + } + return *r.MergesURL +} + +// GetMilestonesURL returns the MilestonesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetMilestonesURL() string { + if r == nil || r.MilestonesURL == nil { + return "" + } + return *r.MilestonesURL +} + +// GetMirrorURL returns the MirrorURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetMirrorURL() string { + if r == nil || r.MirrorURL == nil { + return "" + } + return *r.MirrorURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *Repository) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetNetworkCount returns the NetworkCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetNetworkCount() int { + if r == nil || r.NetworkCount == nil { + return 0 + } + return *r.NetworkCount +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Repository) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetNotificationsURL returns the NotificationsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetNotificationsURL() string { + if r == nil || r.NotificationsURL == nil { + return "" + } + return *r.NotificationsURL +} + +// GetOpenIssues returns the OpenIssues field if it's non-nil, zero value otherwise. +func (r *Repository) GetOpenIssues() int { + if r == nil || r.OpenIssues == nil { + return 0 + } + return *r.OpenIssues +} + +// GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetOpenIssuesCount() int { + if r == nil || r.OpenIssuesCount == nil { + return 0 + } + return *r.OpenIssuesCount +} + +// GetOrganization returns the Organization field. +func (r *Repository) GetOrganization() *Organization { + if r == nil { + return nil + } + return r.Organization +} + +// GetOwner returns the Owner field. +func (r *Repository) GetOwner() *User { + if r == nil { + return nil + } + return r.Owner +} + +// GetParent returns the Parent field. +func (r *Repository) GetParent() *Repository { + if r == nil { + return nil + } + return r.Parent +} + +// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. +func (r *Repository) GetPermissions() map[string]bool { + if r == nil || r.Permissions == nil { + return map[string]bool{} + } + return r.Permissions +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (r *Repository) GetPrivate() bool { + if r == nil || r.Private == nil { + return false + } + return *r.Private +} + +// GetPullsURL returns the PullsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetPullsURL() string { + if r == nil || r.PullsURL == nil { + return "" + } + return *r.PullsURL +} + +// GetPushedAt returns the PushedAt field if it's non-nil, zero value otherwise. +func (r *Repository) GetPushedAt() Timestamp { + if r == nil || r.PushedAt == nil { + return Timestamp{} + } + return *r.PushedAt +} + +// GetReleasesURL returns the ReleasesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetReleasesURL() string { + if r == nil || r.ReleasesURL == nil { + return "" + } + return *r.ReleasesURL +} + +// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. +func (r *Repository) GetRoleName() string { + if r == nil || r.RoleName == nil { + return "" + } + return *r.RoleName +} + +// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. +func (r *Repository) GetSecurityAndAnalysis() *SecurityAndAnalysis { + if r == nil { + return nil + } + return r.SecurityAndAnalysis +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (r *Repository) GetSize() int { + if r == nil || r.Size == nil { + return 0 + } + return *r.Size +} + +// GetSource returns the Source field. +func (r *Repository) GetSource() *Repository { + if r == nil { + return nil + } + return r.Source +} + +// GetSquashMergeCommitMessage returns the SquashMergeCommitMessage field if it's non-nil, zero value otherwise. +func (r *Repository) GetSquashMergeCommitMessage() string { + if r == nil || r.SquashMergeCommitMessage == nil { + return "" + } + return *r.SquashMergeCommitMessage +} + +// GetSquashMergeCommitTitle returns the SquashMergeCommitTitle field if it's non-nil, zero value otherwise. +func (r *Repository) GetSquashMergeCommitTitle() string { + if r == nil || r.SquashMergeCommitTitle == nil { + return "" + } + return *r.SquashMergeCommitTitle +} + +// GetSSHURL returns the SSHURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetSSHURL() string { + if r == nil || r.SSHURL == nil { + return "" + } + return *r.SSHURL +} + +// GetStargazersCount returns the StargazersCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetStargazersCount() int { + if r == nil || r.StargazersCount == nil { + return 0 + } + return *r.StargazersCount +} + +// GetStargazersURL returns the StargazersURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetStargazersURL() string { + if r == nil || r.StargazersURL == nil { + return "" + } + return *r.StargazersURL +} + +// GetStatusesURL returns the StatusesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetStatusesURL() string { + if r == nil || r.StatusesURL == nil { + return "" + } + return *r.StatusesURL +} + +// GetSubscribersCount returns the SubscribersCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetSubscribersCount() int { + if r == nil || r.SubscribersCount == nil { + return 0 + } + return *r.SubscribersCount +} + +// GetSubscribersURL returns the SubscribersURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetSubscribersURL() string { + if r == nil || r.SubscribersURL == nil { + return "" + } + return *r.SubscribersURL +} + +// GetSubscriptionURL returns the SubscriptionURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetSubscriptionURL() string { + if r == nil || r.SubscriptionURL == nil { + return "" + } + return *r.SubscriptionURL +} + +// GetSVNURL returns the SVNURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetSVNURL() string { + if r == nil || r.SVNURL == nil { + return "" + } + return *r.SVNURL +} + +// GetTagsURL returns the TagsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetTagsURL() string { + if r == nil || r.TagsURL == nil { + return "" + } + return *r.TagsURL +} + +// GetTeamID returns the TeamID field if it's non-nil, zero value otherwise. +func (r *Repository) GetTeamID() int64 { + if r == nil || r.TeamID == nil { + return 0 + } + return *r.TeamID +} + +// GetTeamsURL returns the TeamsURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetTeamsURL() string { + if r == nil || r.TeamsURL == nil { + return "" + } + return *r.TeamsURL +} + +// GetTemplateRepository returns the TemplateRepository field. +func (r *Repository) GetTemplateRepository() *Repository { + if r == nil { + return nil + } + return r.TemplateRepository +} + +// GetTreesURL returns the TreesURL field if it's non-nil, zero value otherwise. +func (r *Repository) GetTreesURL() string { + if r == nil || r.TreesURL == nil { + return "" + } + return *r.TreesURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (r *Repository) GetUpdatedAt() Timestamp { + if r == nil || r.UpdatedAt == nil { + return Timestamp{} + } + return *r.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *Repository) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetUseSquashPRTitleAsDefault returns the UseSquashPRTitleAsDefault field if it's non-nil, zero value otherwise. +func (r *Repository) GetUseSquashPRTitleAsDefault() bool { + if r == nil || r.UseSquashPRTitleAsDefault == nil { + return false + } + return *r.UseSquashPRTitleAsDefault +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (r *Repository) GetVisibility() string { + if r == nil || r.Visibility == nil { + return "" + } + return *r.Visibility +} + +// GetWatchers returns the Watchers field if it's non-nil, zero value otherwise. +func (r *Repository) GetWatchers() int { + if r == nil || r.Watchers == nil { + return 0 + } + return *r.Watchers +} + +// GetWatchersCount returns the WatchersCount field if it's non-nil, zero value otherwise. +func (r *Repository) GetWatchersCount() int { + if r == nil || r.WatchersCount == nil { + return 0 + } + return *r.WatchersCount +} + +// GetWebCommitSignoffRequired returns the WebCommitSignoffRequired field if it's non-nil, zero value otherwise. +func (r *Repository) GetWebCommitSignoffRequired() bool { + if r == nil || r.WebCommitSignoffRequired == nil { + return false + } + return *r.WebCommitSignoffRequired +} + +// GetAccessLevel returns the AccessLevel field if it's non-nil, zero value otherwise. +func (r *RepositoryActionsAccessLevel) GetAccessLevel() string { + if r == nil || r.AccessLevel == nil { + return "" + } + return *r.AccessLevel +} + +// GetAdvancedSecurityCommitters returns the AdvancedSecurityCommitters field if it's non-nil, zero value otherwise. +func (r *RepositoryActiveCommitters) GetAdvancedSecurityCommitters() int { + if r == nil || r.AdvancedSecurityCommitters == nil { + return 0 + } + return *r.AdvancedSecurityCommitters +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepositoryActiveCommitters) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetBody() string { + if r == nil || r.Body == nil { + return "" + } + return *r.Body +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetCommitID() string { + if r == nil || r.CommitID == nil { + return "" + } + return *r.CommitID +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetPath() string { + if r == nil || r.Path == nil { + return "" + } + return *r.Path +} + +// GetPosition returns the Position field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetPosition() int { + if r == nil || r.Position == nil { + return 0 + } + return *r.Position +} + +// GetReactions returns the Reactions field. +func (r *RepositoryComment) GetReactions() *Reactions { + if r == nil { + return nil + } + return r.Reactions +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetUpdatedAt() Timestamp { + if r == nil || r.UpdatedAt == nil { + return Timestamp{} + } + return *r.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryComment) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetUser returns the User field. +func (r *RepositoryComment) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +// GetAuthor returns the Author field. +func (r *RepositoryCommit) GetAuthor() *User { + if r == nil { + return nil + } + return r.Author +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (r *RepositoryCommit) GetCommentsURL() string { + if r == nil || r.CommentsURL == nil { + return "" + } + return *r.CommentsURL +} + +// GetCommit returns the Commit field. +func (r *RepositoryCommit) GetCommit() *Commit { + if r == nil { + return nil + } + return r.Commit +} + +// GetCommitter returns the Committer field. +func (r *RepositoryCommit) GetCommitter() *User { + if r == nil { + return nil + } + return r.Committer +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryCommit) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepositoryCommit) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (r *RepositoryCommit) GetSHA() string { + if r == nil || r.SHA == nil { + return "" + } + return *r.SHA +} + +// GetStats returns the Stats field. +func (r *RepositoryCommit) GetStats() *CommitStats { + if r == nil { + return nil + } + return r.Stats +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryCommit) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetDownloadURL() string { + if r == nil || r.DownloadURL == nil { + return "" + } + return *r.DownloadURL +} + +// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetEncoding() string { + if r == nil || r.Encoding == nil { + return "" + } + return *r.Encoding +} + +// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetGitURL() string { + if r == nil || r.GitURL == nil { + return "" + } + return *r.GitURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetPath() string { + if r == nil || r.Path == nil { + return "" + } + return *r.Path +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetSHA() string { + if r == nil || r.SHA == nil { + return "" + } + return *r.SHA +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetSize() int { + if r == nil || r.Size == nil { + return 0 + } + return *r.Size +} + +// GetSubmoduleGitURL returns the SubmoduleGitURL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetSubmoduleGitURL() string { + if r == nil || r.SubmoduleGitURL == nil { + return "" + } + return *r.SubmoduleGitURL +} + +// GetTarget returns the Target field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetTarget() string { + if r == nil || r.Target == nil { + return "" + } + return *r.Target +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryContent) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetAuthor returns the Author field. +func (r *RepositoryContentFileOptions) GetAuthor() *CommitAuthor { + if r == nil { + return nil + } + return r.Author +} + +// GetBranch returns the Branch field if it's non-nil, zero value otherwise. +func (r *RepositoryContentFileOptions) GetBranch() string { + if r == nil || r.Branch == nil { + return "" + } + return *r.Branch +} + +// GetCommitter returns the Committer field. +func (r *RepositoryContentFileOptions) GetCommitter() *CommitAuthor { + if r == nil { + return nil + } + return r.Committer +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (r *RepositoryContentFileOptions) GetMessage() string { + if r == nil || r.Message == nil { + return "" + } + return *r.Message +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (r *RepositoryContentFileOptions) GetSHA() string { + if r == nil || r.SHA == nil { + return "" + } + return *r.SHA +} + +// GetContent returns the Content field. +func (r *RepositoryContentResponse) GetContent() *RepositoryContent { + if r == nil { + return nil + } + return r.Content +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RepositoryDispatchEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetBranch returns the Branch field if it's non-nil, zero value otherwise. +func (r *RepositoryDispatchEvent) GetBranch() string { + if r == nil || r.Branch == nil { + return "" + } + return *r.Branch +} + +// GetInstallation returns the Installation field. +func (r *RepositoryDispatchEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrg returns the Org field. +func (r *RepositoryDispatchEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRepo returns the Repo field. +func (r *RepositoryDispatchEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *RepositoryDispatchEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RepositoryEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetChanges returns the Changes field. +func (r *RepositoryEvent) GetChanges() *EditChange { + if r == nil { + return nil + } + return r.Changes +} + +// GetInstallation returns the Installation field. +func (r *RepositoryEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrg returns the Org field. +func (r *RepositoryEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRepo returns the Repo field. +func (r *RepositoryEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *RepositoryEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + +// GetOrg returns the Org field. +func (r *RepositoryImportEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRepo returns the Repo field. +func (r *RepositoryImportEvent) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetSender returns the Sender field. +func (r *RepositoryImportEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (r *RepositoryImportEvent) GetStatus() string { + if r == nil || r.Status == nil { + return "" + } + return *r.Status +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryInvitation) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryInvitation) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepositoryInvitation) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetInvitee returns the Invitee field. +func (r *RepositoryInvitation) GetInvitee() *User { + if r == nil { + return nil + } + return r.Invitee +} + +// GetInviter returns the Inviter field. +func (r *RepositoryInvitation) GetInviter() *User { + if r == nil { + return nil + } + return r.Inviter +} + +// GetPermissions returns the Permissions field if it's non-nil, zero value otherwise. +func (r *RepositoryInvitation) GetPermissions() string { + if r == nil || r.Permissions == nil { + return "" + } + return *r.Permissions +} + +// GetRepo returns the Repo field. +func (r *RepositoryInvitation) GetRepo() *Repository { + if r == nil { + return nil + } + return r.Repo +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryInvitation) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetContent() string { + if r == nil || r.Content == nil { + return "" + } + return *r.Content +} + +// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetDownloadURL() string { + if r == nil || r.DownloadURL == nil { + return "" + } + return *r.DownloadURL +} + +// GetEncoding returns the Encoding field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetEncoding() string { + if r == nil || r.Encoding == nil { + return "" + } + return *r.Encoding +} + +// GetGitURL returns the GitURL field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetGitURL() string { + if r == nil || r.GitURL == nil { + return "" + } + return *r.GitURL +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetLicense returns the License field. +func (r *RepositoryLicense) GetLicense() *License { + if r == nil { + return nil + } + return r.License +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetPath() string { + if r == nil || r.Path == nil { + return "" + } + return *r.Path +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetSHA() string { + if r == nil || r.SHA == nil { + return "" + } + return *r.SHA +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetSize() int { + if r == nil || r.Size == nil { + return 0 + } + return *r.Size +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryLicense) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetBase returns the Base field if it's non-nil, zero value otherwise. +func (r *RepositoryMergeRequest) GetBase() string { + if r == nil || r.Base == nil { + return "" + } + return *r.Base +} + +// GetCommitMessage returns the CommitMessage field if it's non-nil, zero value otherwise. +func (r *RepositoryMergeRequest) GetCommitMessage() string { + if r == nil || r.CommitMessage == nil { + return "" + } + return *r.CommitMessage +} + +// GetHead returns the Head field if it's non-nil, zero value otherwise. +func (r *RepositoryMergeRequest) GetHead() string { + if r == nil || r.Head == nil { + return "" + } + return *r.Head +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (r *RepositoryPermissionLevel) GetPermission() string { + if r == nil || r.Permission == nil { + return "" + } + return *r.Permission +} + +// GetUser returns the User field. +func (r *RepositoryPermissionLevel) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +// GetAssetsURL returns the AssetsURL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetAssetsURL() string { + if r == nil || r.AssetsURL == nil { + return "" + } + return *r.AssetsURL +} + +// GetAuthor returns the Author field. +func (r *RepositoryRelease) GetAuthor() *User { + if r == nil { + return nil + } + return r.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetBody() string { + if r == nil || r.Body == nil { + return "" + } + return *r.Body +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetDiscussionCategoryName returns the DiscussionCategoryName field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetDiscussionCategoryName() string { + if r == nil || r.DiscussionCategoryName == nil { + return "" + } + return *r.DiscussionCategoryName +} + +// GetDraft returns the Draft field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetDraft() bool { + if r == nil || r.Draft == nil { + return false + } + return *r.Draft +} + +// GetGenerateReleaseNotes returns the GenerateReleaseNotes field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetGenerateReleaseNotes() bool { + if r == nil || r.GenerateReleaseNotes == nil { + return false + } + return *r.GenerateReleaseNotes +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetHTMLURL() string { + if r == nil || r.HTMLURL == nil { + return "" + } + return *r.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetMakeLatest returns the MakeLatest field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetMakeLatest() string { + if r == nil || r.MakeLatest == nil { + return "" + } + return *r.MakeLatest +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetPrerelease returns the Prerelease field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetPrerelease() bool { + if r == nil || r.Prerelease == nil { + return false + } + return *r.Prerelease +} + +// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetPublishedAt() Timestamp { + if r == nil || r.PublishedAt == nil { + return Timestamp{} + } + return *r.PublishedAt +} + +// GetTagName returns the TagName field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetTagName() string { + if r == nil || r.TagName == nil { + return "" + } + return *r.TagName +} + +// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetTarballURL() string { + if r == nil || r.TarballURL == nil { + return "" + } + return *r.TarballURL +} + +// GetTargetCommitish returns the TargetCommitish field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetTargetCommitish() string { + if r == nil || r.TargetCommitish == nil { + return "" + } + return *r.TargetCommitish +} + +// GetUploadURL returns the UploadURL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetUploadURL() string { + if r == nil || r.UploadURL == nil { + return "" + } + return *r.UploadURL +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. +func (r *RepositoryRelease) GetZipballURL() string { + if r == nil || r.ZipballURL == nil { + return "" + } + return *r.ZipballURL +} + +// GetParameters returns the Parameters field if it's non-nil, zero value otherwise. +func (r *RepositoryRule) GetParameters() json.RawMessage { + if r == nil || r.Parameters == nil { + return json.RawMessage{} + } + return *r.Parameters +} + +// GetCommit returns the Commit field. +func (r *RepositoryTag) GetCommit() *Commit { + if r == nil { + return nil + } + return r.Commit +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RepositoryTag) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetTarballURL returns the TarballURL field if it's non-nil, zero value otherwise. +func (r *RepositoryTag) GetTarballURL() string { + if r == nil || r.TarballURL == nil { + return "" + } + return *r.TarballURL +} + +// GetZipballURL returns the ZipballURL field if it's non-nil, zero value otherwise. +func (r *RepositoryTag) GetZipballURL() string { + if r == nil || r.ZipballURL == nil { + return "" + } + return *r.ZipballURL +} + +// GetAffectedPackageName returns the AffectedPackageName field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetAffectedPackageName() string { + if r == nil || r.AffectedPackageName == nil { + return "" + } + return *r.AffectedPackageName +} + +// GetAffectedRange returns the AffectedRange field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetAffectedRange() string { + if r == nil || r.AffectedRange == nil { + return "" + } + return *r.AffectedRange +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetDismissedAt returns the DismissedAt field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetDismissedAt() Timestamp { + if r == nil || r.DismissedAt == nil { + return Timestamp{} + } + return *r.DismissedAt +} + +// GetDismisser returns the Dismisser field. +func (r *RepositoryVulnerabilityAlert) GetDismisser() *User { + if r == nil { + return nil + } + return r.Dismisser +} + +// GetDismissReason returns the DismissReason field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetDismissReason() string { + if r == nil || r.DismissReason == nil { + return "" + } + return *r.DismissReason +} + +// GetExternalIdentifier returns the ExternalIdentifier field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetExternalIdentifier() string { + if r == nil || r.ExternalIdentifier == nil { + return "" + } + return *r.ExternalIdentifier +} + +// GetExternalReference returns the ExternalReference field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetExternalReference() string { + if r == nil || r.ExternalReference == nil { + return "" + } + return *r.ExternalReference +} + +// GetFixedIn returns the FixedIn field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetFixedIn() string { + if r == nil || r.FixedIn == nil { + return "" + } + return *r.FixedIn +} + +// GetGitHubSecurityAdvisoryID returns the GitHubSecurityAdvisoryID field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetGitHubSecurityAdvisoryID() string { + if r == nil || r.GitHubSecurityAdvisoryID == nil { + return "" + } + return *r.GitHubSecurityAdvisoryID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlert) GetSeverity() string { + if r == nil || r.Severity == nil { + return "" + } + return *r.Severity +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (r *RepositoryVulnerabilityAlertEvent) GetAction() string { + if r == nil || r.Action == nil { + return "" + } + return *r.Action +} + +// GetAlert returns the Alert field. +func (r *RepositoryVulnerabilityAlertEvent) GetAlert() *RepositoryVulnerabilityAlert { + if r == nil { + return nil + } + return r.Alert +} + +// GetInstallation returns the Installation field. +func (r *RepositoryVulnerabilityAlertEvent) GetInstallation() *Installation { + if r == nil { + return nil + } + return r.Installation +} + +// GetOrg returns the Org field. +func (r *RepositoryVulnerabilityAlertEvent) GetOrg() *Organization { + if r == nil { + return nil + } + return r.Org +} + +// GetRepository returns the Repository field. +func (r *RepositoryVulnerabilityAlertEvent) GetRepository() *Repository { + if r == nil { + return nil + } + return r.Repository +} + +// GetSender returns the Sender field. +func (r *RepositoryVulnerabilityAlertEvent) GetSender() *User { + if r == nil { + return nil + } + return r.Sender +} + +// GetForkRepos returns the ForkRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetForkRepos() int { + if r == nil || r.ForkRepos == nil { + return 0 + } + return *r.ForkRepos +} + +// GetOrgRepos returns the OrgRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetOrgRepos() int { + if r == nil || r.OrgRepos == nil { + return 0 + } + return *r.OrgRepos +} + +// GetRootRepos returns the RootRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetRootRepos() int { + if r == nil || r.RootRepos == nil { + return 0 + } + return *r.RootRepos +} + +// GetTotalPushes returns the TotalPushes field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalPushes() int { + if r == nil || r.TotalPushes == nil { + return 0 + } + return *r.TotalPushes +} + +// GetTotalRepos returns the TotalRepos field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalRepos() int { + if r == nil || r.TotalRepos == nil { + return 0 + } + return *r.TotalRepos +} + +// GetTotalWikis returns the TotalWikis field if it's non-nil, zero value otherwise. +func (r *RepoStats) GetTotalWikis() int { + if r == nil || r.TotalWikis == nil { + return 0 + } + return *r.TotalWikis +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetAvatarURL() string { + if r == nil || r.AvatarURL == nil { + return "" + } + return *r.AvatarURL +} + +// GetContext returns the Context field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetContext() string { + if r == nil || r.Context == nil { + return "" + } + return *r.Context +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetCreatedAt() Timestamp { + if r == nil || r.CreatedAt == nil { + return Timestamp{} + } + return *r.CreatedAt +} + +// GetCreator returns the Creator field. +func (r *RepoStatus) GetCreator() *User { + if r == nil { + return nil + } + return r.Creator +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetDescription() string { + if r == nil || r.Description == nil { + return "" + } + return *r.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetState() string { + if r == nil || r.State == nil { + return "" + } + return *r.State +} + +// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetTargetURL() string { + if r == nil || r.TargetURL == nil { + return "" + } + return *r.TargetURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetUpdatedAt() Timestamp { + if r == nil || r.UpdatedAt == nil { + return Timestamp{} + } + return *r.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RepoStatus) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *RequireCodeOwnerReviewChanges) GetFrom() bool { + if r == nil || r.From == nil { + return false + } + return *r.From +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *RequiredConversationResolutionLevelChanges) GetFrom() string { + if r == nil || r.From == nil { + return "" + } + return *r.From +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *RequiredDeploymentsEnforcementLevelChanges) GetFrom() string { + if r == nil || r.From == nil { + return "" + } + return *r.From +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RequiredReviewer) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetAppID returns the AppID field if it's non-nil, zero value otherwise. +func (r *RequiredStatusCheck) GetAppID() int64 { + if r == nil || r.AppID == nil { + return 0 + } + return *r.AppID +} + +// GetContextsURL returns the ContextsURL field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecks) GetContextsURL() string { + if r == nil || r.ContextsURL == nil { + return "" + } + return *r.ContextsURL +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecks) GetURL() string { + if r == nil || r.URL == nil { + return "" + } + return *r.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecksEnforcementLevelChanges) GetFrom() string { + if r == nil || r.From == nil { + return "" + } + return *r.From +} + +// GetStrict returns the Strict field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecksRequest) GetStrict() bool { + if r == nil || r.Strict == nil { + return false + } + return *r.Strict +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (r *RequiredWorkflowSelectedRepos) GetTotalCount() int { + if r == nil || r.TotalCount == nil { + return 0 + } + return *r.TotalCount +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *ReviewersRequest) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (r *ReviewPersonalAccessTokenRequestOptions) GetReason() string { + if r == nil || r.Reason == nil { + return "" + } + return *r.Reason +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (r *Rule) GetDescription() string { + if r == nil || r.Description == nil { + return "" + } + return *r.Description +} + +// GetFullDescription returns the FullDescription field if it's non-nil, zero value otherwise. +func (r *Rule) GetFullDescription() string { + if r == nil || r.FullDescription == nil { + return "" + } + return *r.FullDescription +} + +// GetHelp returns the Help field if it's non-nil, zero value otherwise. +func (r *Rule) GetHelp() string { + if r == nil || r.Help == nil { + return "" + } + return *r.Help +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Rule) GetID() string { + if r == nil || r.ID == nil { + return "" + } + return *r.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *Rule) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetSecuritySeverityLevel returns the SecuritySeverityLevel field if it's non-nil, zero value otherwise. +func (r *Rule) GetSecuritySeverityLevel() string { + if r == nil || r.SecuritySeverityLevel == nil { + return "" + } + return *r.SecuritySeverityLevel +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (r *Rule) GetSeverity() string { + if r == nil || r.Severity == nil { + return "" + } + return *r.Severity +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RulePatternParameters) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetNegate returns the Negate field if it's non-nil, zero value otherwise. +func (r *RulePatternParameters) GetNegate() bool { + if r == nil || r.Negate == nil { + return false + } + return *r.Negate +} + +// GetIntegrationID returns the IntegrationID field if it's non-nil, zero value otherwise. +func (r *RuleRequiredStatusChecks) GetIntegrationID() int64 { + if r == nil || r.IntegrationID == nil { + return 0 + } + return *r.IntegrationID +} + +// GetConditions returns the Conditions field. +func (r *Ruleset) GetConditions() *RulesetConditions { + if r == nil { + return nil + } + return r.Conditions +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Ruleset) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetLinks returns the Links field. +func (r *Ruleset) GetLinks() *RulesetLinks { + if r == nil { + return nil + } + return r.Links +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Ruleset) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + +// GetSourceType returns the SourceType field if it's non-nil, zero value otherwise. +func (r *Ruleset) GetSourceType() string { + if r == nil || r.SourceType == nil { + return "" + } + return *r.SourceType +} + +// GetTarget returns the Target field if it's non-nil, zero value otherwise. +func (r *Ruleset) GetTarget() string { + if r == nil || r.Target == nil { + return "" + } + return *r.Target +} + +// GetRefName returns the RefName field. +func (r *RulesetConditions) GetRefName() *RulesetRefConditionParameters { + if r == nil { + return nil + } + return r.RefName +} + +// GetRepositoryID returns the RepositoryID field. +func (r *RulesetConditions) GetRepositoryID() *RulesetRepositoryIDsConditionParameters { + if r == nil { + return nil + } + return r.RepositoryID +} + +// GetRepositoryName returns the RepositoryName field. +func (r *RulesetConditions) GetRepositoryName() *RulesetRepositoryNamesConditionParameters { + if r == nil { + return nil + } + return r.RepositoryName +} + +// GetHRef returns the HRef field if it's non-nil, zero value otherwise. +func (r *RulesetLink) GetHRef() string { + if r == nil || r.HRef == nil { + return "" + } + return *r.HRef +} + +// GetSelf returns the Self field. +func (r *RulesetLinks) GetSelf() *RulesetLink { + if r == nil { + return nil + } + return r.Self +} + +// GetProtected returns the Protected field if it's non-nil, zero value otherwise. +func (r *RulesetRepositoryNamesConditionParameters) GetProtected() bool { + if r == nil || r.Protected == nil { + return false + } + return *r.Protected +} + +// GetBusy returns the Busy field if it's non-nil, zero value otherwise. +func (r *Runner) GetBusy() bool { + if r == nil || r.Busy == nil { + return false + } + return *r.Busy +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *Runner) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *Runner) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetOS returns the OS field if it's non-nil, zero value otherwise. +func (r *Runner) GetOS() string { + if r == nil || r.OS == nil { + return "" + } + return *r.OS +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (r *Runner) GetStatus() string { + if r == nil || r.Status == nil { + return "" + } + return *r.Status +} + +// GetArchitecture returns the Architecture field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetArchitecture() string { + if r == nil || r.Architecture == nil { + return "" + } + return *r.Architecture +} + +// GetDownloadURL returns the DownloadURL field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetDownloadURL() string { + if r == nil || r.DownloadURL == nil { + return "" + } + return *r.DownloadURL +} + +// GetFilename returns the Filename field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetFilename() string { + if r == nil || r.Filename == nil { + return "" + } + return *r.Filename +} + +// GetOS returns the OS field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetOS() string { + if r == nil || r.OS == nil { + return "" + } + return *r.OS +} + +// GetSHA256Checksum returns the SHA256Checksum field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetSHA256Checksum() string { + if r == nil || r.SHA256Checksum == nil { + return "" + } + return *r.SHA256Checksum +} + +// GetTempDownloadToken returns the TempDownloadToken field if it's non-nil, zero value otherwise. +func (r *RunnerApplicationDownload) GetTempDownloadToken() string { + if r == nil || r.TempDownloadToken == nil { + return "" + } + return *r.TempDownloadToken +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetAllowsPublicRepositories() bool { + if r == nil || r.AllowsPublicRepositories == nil { + return false + } + return *r.AllowsPublicRepositories +} + +// GetDefault returns the Default field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetDefault() bool { + if r == nil || r.Default == nil { + return false + } + return *r.Default +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetInherited returns the Inherited field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetInherited() bool { + if r == nil || r.Inherited == nil { + return false + } + return *r.Inherited +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetRestrictedToWorkflows() bool { + if r == nil || r.RestrictedToWorkflows == nil { + return false + } + return *r.RestrictedToWorkflows +} + +// GetRunnersURL returns the RunnersURL field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetRunnersURL() string { + if r == nil || r.RunnersURL == nil { + return "" + } + return *r.RunnersURL +} + +// GetSelectedRepositoriesURL returns the SelectedRepositoriesURL field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetSelectedRepositoriesURL() string { + if r == nil || r.SelectedRepositoriesURL == nil { + return "" + } + return *r.SelectedRepositoriesURL +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetVisibility() string { + if r == nil || r.Visibility == nil { + return "" + } + return *r.Visibility +} + +// GetWorkflowRestrictionsReadOnly returns the WorkflowRestrictionsReadOnly field if it's non-nil, zero value otherwise. +func (r *RunnerGroup) GetWorkflowRestrictionsReadOnly() bool { + if r == nil || r.WorkflowRestrictionsReadOnly == nil { + return false + } + return *r.WorkflowRestrictionsReadOnly +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (r *RunnerLabels) GetID() int64 { + if r == nil || r.ID == nil { + return 0 + } + return *r.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (r *RunnerLabels) GetName() string { + if r == nil || r.Name == nil { + return "" + } + return *r.Name +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (r *RunnerLabels) GetType() string { + if r == nil || r.Type == nil { + return "" + } + return *r.Type +} + +// GetCheckoutURI returns the CheckoutURI field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetCheckoutURI() string { + if s == nil || s.CheckoutURI == nil { + return "" + } + return *s.CheckoutURI +} + +// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetCommitSHA() string { + if s == nil || s.CommitSHA == nil { + return "" + } + return *s.CommitSHA +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetRef() string { + if s == nil || s.Ref == nil { + return "" + } + return *s.Ref +} + +// GetSarif returns the Sarif field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetSarif() string { + if s == nil || s.Sarif == nil { + return "" + } + return *s.Sarif +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetStartedAt() Timestamp { + if s == nil || s.StartedAt == nil { + return Timestamp{} + } + return *s.StartedAt +} + +// GetToolName returns the ToolName field if it's non-nil, zero value otherwise. +func (s *SarifAnalysis) GetToolName() string { + if s == nil || s.ToolName == nil { + return "" + } + return *s.ToolName +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *SarifID) GetID() string { + if s == nil || s.ID == nil { + return "" + } + return *s.ID +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SarifID) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetAnalysesURL returns the AnalysesURL field if it's non-nil, zero value otherwise. +func (s *SARIFUpload) GetAnalysesURL() string { + if s == nil || s.AnalysesURL == nil { + return "" + } + return *s.AnalysesURL +} + +// GetProcessingStatus returns the ProcessingStatus field if it's non-nil, zero value otherwise. +func (s *SARIFUpload) GetProcessingStatus() string { + if s == nil || s.ProcessingStatus == nil { + return "" + } + return *s.ProcessingStatus +} + +// GetSBOM returns the SBOM field. +func (s *SBOM) GetSBOM() *SBOMInfo { + if s == nil { + return nil + } + return s.SBOM +} + +// GetCreationInfo returns the CreationInfo field. +func (s *SBOMInfo) GetCreationInfo() *CreationInfo { + if s == nil { + return nil + } + return s.CreationInfo +} + +// GetDataLicense returns the DataLicense field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetDataLicense() string { + if s == nil || s.DataLicense == nil { + return "" + } + return *s.DataLicense +} + +// GetDocumentNamespace returns the DocumentNamespace field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetDocumentNamespace() string { + if s == nil || s.DocumentNamespace == nil { + return "" + } + return *s.DocumentNamespace +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetName() string { + if s == nil || s.Name == nil { + return "" + } + return *s.Name +} + +// GetSPDXID returns the SPDXID field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetSPDXID() string { + if s == nil || s.SPDXID == nil { + return "" + } + return *s.SPDXID +} + +// GetSPDXVersion returns the SPDXVersion field if it's non-nil, zero value otherwise. +func (s *SBOMInfo) GetSPDXVersion() string { + if s == nil || s.SPDXVersion == nil { + return "" + } + return *s.SPDXVersion +} + +// GetAnalysisKey returns the AnalysisKey field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetAnalysisKey() string { + if s == nil || s.AnalysisKey == nil { + return "" + } + return *s.AnalysisKey +} + +// GetCategory returns the Category field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetCategory() string { + if s == nil || s.Category == nil { + return "" + } + return *s.Category +} + +// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetCommitSHA() string { + if s == nil || s.CommitSHA == nil { + return "" + } + return *s.CommitSHA +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetDeletable returns the Deletable field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetDeletable() bool { + if s == nil || s.Deletable == nil { + return false + } + return *s.Deletable +} + +// GetEnvironment returns the Environment field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetEnvironment() string { + if s == nil || s.Environment == nil { + return "" + } + return *s.Environment +} + +// GetError returns the Error field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetError() string { + if s == nil || s.Error == nil { + return "" + } + return *s.Error +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetID() int64 { + if s == nil || s.ID == nil { + return 0 + } + return *s.ID +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetRef() string { + if s == nil || s.Ref == nil { + return "" + } + return *s.Ref +} + +// GetResultsCount returns the ResultsCount field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetResultsCount() int { + if s == nil || s.ResultsCount == nil { + return 0 + } + return *s.ResultsCount +} + +// GetRulesCount returns the RulesCount field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetRulesCount() int { + if s == nil || s.RulesCount == nil { + return 0 + } + return *s.RulesCount +} + +// GetSarifID returns the SarifID field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetSarifID() string { + if s == nil || s.SarifID == nil { + return "" + } + return *s.SarifID +} + +// GetTool returns the Tool field. +func (s *ScanningAnalysis) GetTool() *Tool { + if s == nil { + return nil + } + return s.Tool +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetWarning returns the Warning field if it's non-nil, zero value otherwise. +func (s *ScanningAnalysis) GetWarning() string { + if s == nil || s.Warning == nil { + return "" + } + return *s.Warning +} + +// GetCreated returns the Created field if it's non-nil, zero value otherwise. +func (s *SCIMMeta) GetCreated() Timestamp { + if s == nil || s.Created == nil { + return Timestamp{} + } + return *s.Created +} + +// GetLastModified returns the LastModified field if it's non-nil, zero value otherwise. +func (s *SCIMMeta) GetLastModified() Timestamp { + if s == nil || s.LastModified == nil { + return Timestamp{} + } + return *s.LastModified +} + +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (s *SCIMMeta) GetLocation() string { + if s == nil || s.Location == nil { + return "" + } + return *s.Location +} + +// GetResourceType returns the ResourceType field if it's non-nil, zero value otherwise. +func (s *SCIMMeta) GetResourceType() string { + if s == nil || s.ResourceType == nil { + return "" + } + return *s.ResourceType +} + +// GetItemsPerPage returns the ItemsPerPage field if it's non-nil, zero value otherwise. +func (s *SCIMProvisionedIdentities) GetItemsPerPage() int { + if s == nil || s.ItemsPerPage == nil { + return 0 + } + return *s.ItemsPerPage +} + +// GetStartIndex returns the StartIndex field if it's non-nil, zero value otherwise. +func (s *SCIMProvisionedIdentities) GetStartIndex() int { + if s == nil || s.StartIndex == nil { + return 0 + } + return *s.StartIndex +} + +// GetTotalResults returns the TotalResults field if it's non-nil, zero value otherwise. +func (s *SCIMProvisionedIdentities) GetTotalResults() int { + if s == nil || s.TotalResults == nil { + return 0 + } + return *s.TotalResults +} + +// GetActive returns the Active field if it's non-nil, zero value otherwise. +func (s *SCIMUserAttributes) GetActive() bool { + if s == nil || s.Active == nil { + return false + } + return *s.Active +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (s *SCIMUserAttributes) GetDisplayName() string { + if s == nil || s.DisplayName == nil { + return "" + } + return *s.DisplayName +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (s *SCIMUserAttributes) GetExternalID() string { + if s == nil || s.ExternalID == nil { + return "" + } + return *s.ExternalID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *SCIMUserAttributes) GetID() string { + if s == nil || s.ID == nil { + return "" + } + return *s.ID +} + +// GetMeta returns the Meta field. +func (s *SCIMUserAttributes) GetMeta() *SCIMMeta { + if s == nil { + return nil + } + return s.Meta +} + +// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. +func (s *SCIMUserEmail) GetPrimary() bool { + if s == nil || s.Primary == nil { + return false + } + return *s.Primary +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (s *SCIMUserEmail) GetType() string { + if s == nil || s.Type == nil { + return "" + } + return *s.Type +} + +// GetFormatted returns the Formatted field if it's non-nil, zero value otherwise. +func (s *SCIMUserName) GetFormatted() string { + if s == nil || s.Formatted == nil { + return "" + } + return *s.Formatted +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (s *SecretScanning) GetStatus() string { + if s == nil || s.Status == nil { + return "" + } + return *s.Status +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetHTMLURL() string { + if s == nil || s.HTMLURL == nil { + return "" + } + return *s.HTMLURL +} + +// GetLocationsURL returns the LocationsURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetLocationsURL() string { + if s == nil || s.LocationsURL == nil { + return "" + } + return *s.LocationsURL +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetNumber() int { + if s == nil || s.Number == nil { + return 0 + } + return *s.Number +} + +// GetPushProtectionBypassed returns the PushProtectionBypassed field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassed() bool { + if s == nil || s.PushProtectionBypassed == nil { + return false + } + return *s.PushProtectionBypassed +} + +// GetPushProtectionBypassedAt returns the PushProtectionBypassedAt field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetPushProtectionBypassedAt() Timestamp { + if s == nil || s.PushProtectionBypassedAt == nil { + return Timestamp{} + } + return *s.PushProtectionBypassedAt +} + +// GetPushProtectionBypassedBy returns the PushProtectionBypassedBy field. +func (s *SecretScanningAlert) GetPushProtectionBypassedBy() *User { + if s == nil { + return nil + } + return s.PushProtectionBypassedBy +} + +// GetRepository returns the Repository field. +func (s *SecretScanningAlert) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + +// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetResolution() string { + if s == nil || s.Resolution == nil { + return "" + } + return *s.Resolution +} + +// GetResolutionComment returns the ResolutionComment field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetResolutionComment() string { + if s == nil || s.ResolutionComment == nil { + return "" + } + return *s.ResolutionComment +} + +// GetResolvedAt returns the ResolvedAt field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetResolvedAt() Timestamp { + if s == nil || s.ResolvedAt == nil { + return Timestamp{} + } + return *s.ResolvedAt +} + +// GetResolvedBy returns the ResolvedBy field. +func (s *SecretScanningAlert) GetResolvedBy() *User { + if s == nil { + return nil + } + return s.ResolvedBy +} + +// GetSecret returns the Secret field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetSecret() string { + if s == nil || s.Secret == nil { + return "" + } + return *s.Secret +} + +// GetSecretType returns the SecretType field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetSecretType() string { + if s == nil || s.SecretType == nil { + return "" + } + return *s.SecretType +} + +// GetSecretTypeDisplayName returns the SecretTypeDisplayName field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetSecretTypeDisplayName() string { + if s == nil || s.SecretTypeDisplayName == nil { + return "" + } + return *s.SecretTypeDisplayName +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetState() string { + if s == nil || s.State == nil { + return "" + } + return *s.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetUpdatedAt() Timestamp { + if s == nil || s.UpdatedAt == nil { + return Timestamp{} + } + return *s.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlert) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertEvent) GetAction() string { + if s == nil || s.Action == nil { + return "" + } + return *s.Action +} + +// GetAlert returns the Alert field. +func (s *SecretScanningAlertEvent) GetAlert() *SecretScanningAlert { + if s == nil { + return nil + } + return s.Alert +} + +// GetEnterprise returns the Enterprise field. +func (s *SecretScanningAlertEvent) GetEnterprise() *Enterprise { + if s == nil { + return nil + } + return s.Enterprise +} + +// GetInstallation returns the Installation field. +func (s *SecretScanningAlertEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrganization returns the Organization field. +func (s *SecretScanningAlertEvent) GetOrganization() *Organization { + if s == nil { + return nil + } + return s.Organization +} + +// GetRepo returns the Repo field. +func (s *SecretScanningAlertEvent) GetRepo() *Repository { + if s == nil { + return nil + } + return s.Repo +} + +// GetSender returns the Sender field. +func (s *SecretScanningAlertEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + +// GetDetails returns the Details field. +func (s *SecretScanningAlertLocation) GetDetails() *SecretScanningAlertLocationDetails { + if s == nil { + return nil + } + return s.Details +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocation) GetType() string { + if s == nil || s.Type == nil { + return "" + } + return *s.Type +} + +// GetBlobSHA returns the BlobSHA field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetBlobSHA() string { + if s == nil || s.BlobSHA == nil { + return "" + } + return *s.BlobSHA +} + +// GetBlobURL returns the BlobURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetBlobURL() string { + if s == nil || s.BlobURL == nil { + return "" + } + return *s.BlobURL +} + +// GetCommitSHA returns the CommitSHA field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetCommitSHA() string { + if s == nil || s.CommitSHA == nil { + return "" + } + return *s.CommitSHA +} + +// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetCommitURL() string { + if s == nil || s.CommitURL == nil { + return "" + } + return *s.CommitURL +} + +// GetEndColumn returns the EndColumn field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetEndColumn() int { + if s == nil || s.EndColumn == nil { + return 0 + } + return *s.EndColumn +} + +// GetEndLine returns the EndLine field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetEndLine() int { + if s == nil || s.EndLine == nil { + return 0 + } + return *s.EndLine +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetPath() string { + if s == nil || s.Path == nil { + return "" + } + return *s.Path +} + +// GetStartColumn returns the StartColumn field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetStartColumn() int { + if s == nil || s.StartColumn == nil { + return 0 + } + return *s.StartColumn +} + +// GetStartline returns the Startline field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertLocationDetails) GetStartline() int { + if s == nil || s.Startline == nil { + return 0 + } + return *s.Startline +} + +// GetResolution returns the Resolution field if it's non-nil, zero value otherwise. +func (s *SecretScanningAlertUpdateOptions) GetResolution() string { + if s == nil || s.Resolution == nil { + return "" + } + return *s.Resolution +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (s *SecretScanningPushProtection) GetStatus() string { + if s == nil || s.Status == nil { + return "" + } + return *s.Status +} + +// GetAuthor returns the Author field. +func (s *SecurityAdvisory) GetAuthor() *User { + if s == nil { + return nil + } + return s.Author +} + +// GetClosedAt returns the ClosedAt field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetClosedAt() Timestamp { + if s == nil || s.ClosedAt == nil { + return Timestamp{} + } + return *s.ClosedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetCVEID returns the CVEID field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetCVEID() string { + if s == nil || s.CVEID == nil { + return "" + } + return *s.CVEID +} + +// GetCVSS returns the CVSS field. +func (s *SecurityAdvisory) GetCVSS() *AdvisoryCVSS { + if s == nil { + return nil + } + return s.CVSS +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetDescription() string { + if s == nil || s.Description == nil { + return "" + } + return *s.Description +} + +// GetGHSAID returns the GHSAID field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetGHSAID() string { + if s == nil || s.GHSAID == nil { + return "" + } + return *s.GHSAID +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetHTMLURL() string { + if s == nil || s.HTMLURL == nil { + return "" + } + return *s.HTMLURL +} + +// GetPrivateFork returns the PrivateFork field. +func (s *SecurityAdvisory) GetPrivateFork() *Repository { + if s == nil { + return nil + } + return s.PrivateFork +} + +// GetPublishedAt returns the PublishedAt field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetPublishedAt() Timestamp { + if s == nil || s.PublishedAt == nil { + return Timestamp{} + } + return *s.PublishedAt +} + +// GetPublisher returns the Publisher field. +func (s *SecurityAdvisory) GetPublisher() *User { + if s == nil { + return nil + } + return s.Publisher +} + +// GetSeverity returns the Severity field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetSeverity() string { + if s == nil || s.Severity == nil { + return "" + } + return *s.Severity +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetState() string { + if s == nil || s.State == nil { + return "" + } + return *s.State +} + +// GetSubmission returns the Submission field. +func (s *SecurityAdvisory) GetSubmission() *SecurityAdvisorySubmission { + if s == nil { + return nil + } + return s.Submission +} + +// GetSummary returns the Summary field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetSummary() string { + if s == nil || s.Summary == nil { + return "" + } + return *s.Summary +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetUpdatedAt() Timestamp { + if s == nil || s.UpdatedAt == nil { + return Timestamp{} + } + return *s.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetWithdrawnAt returns the WithdrawnAt field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisory) GetWithdrawnAt() Timestamp { + if s == nil || s.WithdrawnAt == nil { + return Timestamp{} + } + return *s.WithdrawnAt +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisoryEvent) GetAction() string { + if s == nil || s.Action == nil { + return "" + } + return *s.Action +} + +// GetEnterprise returns the Enterprise field. +func (s *SecurityAdvisoryEvent) GetEnterprise() *Enterprise { + if s == nil { + return nil + } + return s.Enterprise +} + +// GetInstallation returns the Installation field. +func (s *SecurityAdvisoryEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrganization returns the Organization field. +func (s *SecurityAdvisoryEvent) GetOrganization() *Organization { + if s == nil { + return nil + } + return s.Organization +} + +// GetRepository returns the Repository field. +func (s *SecurityAdvisoryEvent) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + +// GetSecurityAdvisory returns the SecurityAdvisory field. +func (s *SecurityAdvisoryEvent) GetSecurityAdvisory() *SecurityAdvisory { + if s == nil { + return nil + } + return s.SecurityAdvisory +} + +// GetSender returns the Sender field. +func (s *SecurityAdvisoryEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + +// GetAccepted returns the Accepted field if it's non-nil, zero value otherwise. +func (s *SecurityAdvisorySubmission) GetAccepted() bool { + if s == nil || s.Accepted == nil { + return false + } + return *s.Accepted +} + +// GetAdvancedSecurity returns the AdvancedSecurity field. +func (s *SecurityAndAnalysis) GetAdvancedSecurity() *AdvancedSecurity { + if s == nil { + return nil + } + return s.AdvancedSecurity +} + +// GetDependabotSecurityUpdates returns the DependabotSecurityUpdates field. +func (s *SecurityAndAnalysis) GetDependabotSecurityUpdates() *DependabotSecurityUpdates { + if s == nil { + return nil + } + return s.DependabotSecurityUpdates +} + +// GetSecretScanning returns the SecretScanning field. +func (s *SecurityAndAnalysis) GetSecretScanning() *SecretScanning { + if s == nil { + return nil + } + return s.SecretScanning +} + +// GetSecretScanningPushProtection returns the SecretScanningPushProtection field. +func (s *SecurityAndAnalysis) GetSecretScanningPushProtection() *SecretScanningPushProtection { + if s == nil { + return nil + } + return s.SecretScanningPushProtection +} + +// GetFrom returns the From field. +func (s *SecurityAndAnalysisChange) GetFrom() *SecurityAndAnalysisChangeFrom { + if s == nil { + return nil + } + return s.From +} + +// GetSecurityAndAnalysis returns the SecurityAndAnalysis field. +func (s *SecurityAndAnalysisChangeFrom) GetSecurityAndAnalysis() *SecurityAndAnalysis { + if s == nil { + return nil + } + return s.SecurityAndAnalysis +} + +// GetChanges returns the Changes field. +func (s *SecurityAndAnalysisEvent) GetChanges() *SecurityAndAnalysisChange { + if s == nil { + return nil + } + return s.Changes +} + +// GetEnterprise returns the Enterprise field. +func (s *SecurityAndAnalysisEvent) GetEnterprise() *Enterprise { + if s == nil { + return nil + } + return s.Enterprise +} + +// GetInstallation returns the Installation field. +func (s *SecurityAndAnalysisEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrganization returns the Organization field. +func (s *SecurityAndAnalysisEvent) GetOrganization() *Organization { + if s == nil { + return nil + } + return s.Organization +} + +// GetRepository returns the Repository field. +func (s *SecurityAndAnalysisEvent) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + +// GetSender returns the Sender field. +func (s *SecurityAndAnalysisEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (s *SelectedReposList) GetTotalCount() int { + if s == nil || s.TotalCount == nil { + return 0 + } + return *s.TotalCount +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (s *SignatureRequirementEnforcementLevelChanges) GetFrom() string { + if s == nil || s.From == nil { + return "" + } + return *s.From +} + +// GetEnabled returns the Enabled field if it's non-nil, zero value otherwise. +func (s *SignaturesProtectedBranch) GetEnabled() bool { + if s == nil || s.Enabled == nil { + return false + } + return *s.Enabled +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SignaturesProtectedBranch) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetPayload returns the Payload field if it's non-nil, zero value otherwise. +func (s *SignatureVerification) GetPayload() string { + if s == nil || s.Payload == nil { + return "" + } + return *s.Payload +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (s *SignatureVerification) GetReason() string { + if s == nil || s.Reason == nil { + return "" + } + return *s.Reason +} + +// GetSignature returns the Signature field if it's non-nil, zero value otherwise. +func (s *SignatureVerification) GetSignature() string { + if s == nil || s.Signature == nil { + return "" + } + return *s.Signature +} + +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (s *SignatureVerification) GetVerified() bool { + if s == nil || s.Verified == nil { + return false + } + return *s.Verified +} + +// GetActor returns the Actor field. +func (s *Source) GetActor() *User { + if s == nil { + return nil + } + return s.Actor +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *Source) GetID() int64 { + if s == nil || s.ID == nil { + return 0 + } + return *s.ID +} + +// GetIssue returns the Issue field. +func (s *Source) GetIssue() *Issue { + if s == nil { + return nil + } + return s.Issue +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (s *Source) GetType() string { + if s == nil || s.Type == nil { + return "" + } + return *s.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *Source) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetEmail() string { + if s == nil || s.Email == nil { + return "" + } + return *s.Email +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetID() int64 { + if s == nil || s.ID == nil { + return 0 + } + return *s.ID +} + +// GetImportURL returns the ImportURL field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetImportURL() string { + if s == nil || s.ImportURL == nil { + return "" + } + return *s.ImportURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetName() string { + if s == nil || s.Name == nil { + return "" + } + return *s.Name +} + +// GetRemoteID returns the RemoteID field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetRemoteID() string { + if s == nil || s.RemoteID == nil { + return "" + } + return *s.RemoteID +} + +// GetRemoteName returns the RemoteName field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetRemoteName() string { + if s == nil || s.RemoteName == nil { + return "" + } + return *s.RemoteName +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *SourceImportAuthor) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *SSHSigningKey) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *SSHSigningKey) GetID() int64 { + if s == nil || s.ID == nil { + return 0 + } + return *s.ID +} + +// GetKey returns the Key field if it's non-nil, zero value otherwise. +func (s *SSHSigningKey) GetKey() string { + if s == nil || s.Key == nil { + return "" + } + return *s.Key +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (s *SSHSigningKey) GetTitle() string { + if s == nil || s.Title == nil { + return "" + } + return *s.Title +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (s *StarEvent) GetAction() string { + if s == nil || s.Action == nil { + return "" + } + return *s.Action +} + +// GetInstallation returns the Installation field. +func (s *StarEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetOrg returns the Org field. +func (s *StarEvent) GetOrg() *Organization { + if s == nil { + return nil + } + return s.Org +} + +// GetRepo returns the Repo field. +func (s *StarEvent) GetRepo() *Repository { + if s == nil { + return nil + } + return s.Repo +} + +// GetSender returns the Sender field. +func (s *StarEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + +// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. +func (s *StarEvent) GetStarredAt() Timestamp { + if s == nil || s.StarredAt == nil { + return Timestamp{} + } + return *s.StarredAt +} + +// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. +func (s *Stargazer) GetStarredAt() Timestamp { + if s == nil || s.StarredAt == nil { + return Timestamp{} + } + return *s.StarredAt +} + +// GetUser returns the User field. +func (s *Stargazer) GetUser() *User { + if s == nil { + return nil + } + return s.User +} + +// GetRepository returns the Repository field. +func (s *StarredRepository) GetRepository() *Repository { + if s == nil { + return nil + } + return s.Repository +} + +// GetStarredAt returns the StarredAt field if it's non-nil, zero value otherwise. +func (s *StarredRepository) GetStarredAt() Timestamp { + if s == nil || s.StarredAt == nil { + return Timestamp{} + } + return *s.StarredAt +} + +// GetCommit returns the Commit field. +func (s *StatusEvent) GetCommit() *RepositoryCommit { + if s == nil { + return nil + } + return s.Commit +} + +// GetContext returns the Context field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetContext() string { + if s == nil || s.Context == nil { + return "" + } + return *s.Context +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetDescription() string { + if s == nil || s.Description == nil { + return "" + } + return *s.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetID() int64 { + if s == nil || s.ID == nil { + return 0 + } + return *s.ID +} + +// GetInstallation returns the Installation field. +func (s *StatusEvent) GetInstallation() *Installation { + if s == nil { + return nil + } + return s.Installation +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetName() string { + if s == nil || s.Name == nil { + return "" + } + return *s.Name +} + +// GetOrg returns the Org field. +func (s *StatusEvent) GetOrg() *Organization { + if s == nil { + return nil + } + return s.Org +} + +// GetRepo returns the Repo field. +func (s *StatusEvent) GetRepo() *Repository { + if s == nil { + return nil + } + return s.Repo +} + +// GetSender returns the Sender field. +func (s *StatusEvent) GetSender() *User { + if s == nil { + return nil + } + return s.Sender +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetSHA() string { + if s == nil || s.SHA == nil { + return "" + } + return *s.SHA +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetState() string { + if s == nil || s.State == nil { + return "" + } + return *s.State +} + +// GetTargetURL returns the TargetURL field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetTargetURL() string { + if s == nil || s.TargetURL == nil { + return "" + } + return *s.TargetURL +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (s *StatusEvent) GetUpdatedAt() Timestamp { + if s == nil || s.UpdatedAt == nil { + return Timestamp{} + } + return *s.UpdatedAt +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (s *Subscription) GetCreatedAt() Timestamp { + if s == nil || s.CreatedAt == nil { + return Timestamp{} + } + return *s.CreatedAt +} + +// GetIgnored returns the Ignored field if it's non-nil, zero value otherwise. +func (s *Subscription) GetIgnored() bool { + if s == nil || s.Ignored == nil { + return false + } + return *s.Ignored +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (s *Subscription) GetReason() string { + if s == nil || s.Reason == nil { + return "" + } + return *s.Reason +} + +// GetRepositoryURL returns the RepositoryURL field if it's non-nil, zero value otherwise. +func (s *Subscription) GetRepositoryURL() string { + if s == nil || s.RepositoryURL == nil { + return "" + } + return *s.RepositoryURL +} + +// GetSubscribed returns the Subscribed field if it's non-nil, zero value otherwise. +func (s *Subscription) GetSubscribed() bool { + if s == nil || s.Subscribed == nil { + return false + } + return *s.Subscribed +} + +// GetThreadURL returns the ThreadURL field if it's non-nil, zero value otherwise. +func (s *Subscription) GetThreadURL() string { + if s == nil || s.ThreadURL == nil { + return "" + } + return *s.ThreadURL +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (s *Subscription) GetURL() string { + if s == nil || s.URL == nil { + return "" + } + return *s.URL +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (t *Tag) GetMessage() string { + if t == nil || t.Message == nil { + return "" + } + return *t.Message +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (t *Tag) GetNodeID() string { + if t == nil || t.NodeID == nil { + return "" + } + return *t.NodeID +} + +// GetObject returns the Object field. +func (t *Tag) GetObject() *GitObject { + if t == nil { + return nil + } + return t.Object +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (t *Tag) GetSHA() string { + if t == nil || t.SHA == nil { + return "" + } + return *t.SHA +} + +// GetTag returns the Tag field if it's non-nil, zero value otherwise. +func (t *Tag) GetTag() string { + if t == nil || t.Tag == nil { + return "" + } + return *t.Tag +} + +// GetTagger returns the Tagger field. +func (t *Tag) GetTagger() *CommitAuthor { + if t == nil { + return nil + } + return t.Tagger +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *Tag) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetVerification returns the Verification field. +func (t *Tag) GetVerification() *SignatureVerification { + if t == nil { + return nil + } + return t.Verification +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (t *TagProtection) GetID() int64 { + if t == nil || t.ID == nil { + return 0 + } + return *t.ID +} + +// GetPattern returns the Pattern field if it's non-nil, zero value otherwise. +func (t *TagProtection) GetPattern() string { + if t == nil || t.Pattern == nil { + return "" + } + return *t.Pattern +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetCompletedAt() Timestamp { + if t == nil || t.CompletedAt == nil { + return Timestamp{} + } + return *t.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetConclusion() string { + if t == nil || t.Conclusion == nil { + return "" + } + return *t.Conclusion +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetNumber() int64 { + if t == nil || t.Number == nil { + return 0 + } + return *t.Number +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetStartedAt() Timestamp { + if t == nil || t.StartedAt == nil { + return Timestamp{} + } + return *t.StartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (t *TaskStep) GetStatus() string { + if t == nil || t.Status == nil { + return "" + } + return *t.Status +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (t *Team) GetDescription() string { + if t == nil || t.Description == nil { + return "" + } + return *t.Description +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (t *Team) GetHTMLURL() string { + if t == nil || t.HTMLURL == nil { + return "" + } + return *t.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (t *Team) GetID() int64 { + if t == nil || t.ID == nil { + return 0 + } + return *t.ID +} + +// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. +func (t *Team) GetLDAPDN() string { + if t == nil || t.LDAPDN == nil { + return "" + } + return *t.LDAPDN +} + +// GetMembersCount returns the MembersCount field if it's non-nil, zero value otherwise. +func (t *Team) GetMembersCount() int { + if t == nil || t.MembersCount == nil { + return 0 + } + return *t.MembersCount +} + +// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. +func (t *Team) GetMembersURL() string { + if t == nil || t.MembersURL == nil { + return "" + } + return *t.MembersURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *Team) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (t *Team) GetNodeID() string { + if t == nil || t.NodeID == nil { + return "" + } + return *t.NodeID +} + +// GetOrganization returns the Organization field. +func (t *Team) GetOrganization() *Organization { + if t == nil { + return nil + } + return t.Organization +} + +// GetParent returns the Parent field. +func (t *Team) GetParent() *Team { + if t == nil { + return nil + } + return t.Parent +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (t *Team) GetPermission() string { + if t == nil || t.Permission == nil { + return "" + } + return *t.Permission +} + +// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. +func (t *Team) GetPermissions() map[string]bool { + if t == nil || t.Permissions == nil { + return map[string]bool{} + } + return t.Permissions +} + +// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. +func (t *Team) GetPrivacy() string { + if t == nil || t.Privacy == nil { + return "" + } + return *t.Privacy +} + +// GetReposCount returns the ReposCount field if it's non-nil, zero value otherwise. +func (t *Team) GetReposCount() int { + if t == nil || t.ReposCount == nil { + return 0 + } + return *t.ReposCount +} + +// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. +func (t *Team) GetRepositoriesURL() string { + if t == nil || t.RepositoriesURL == nil { + return "" + } + return *t.RepositoriesURL +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (t *Team) GetSlug() string { + if t == nil || t.Slug == nil { + return "" + } + return *t.Slug +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *Team) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetInstallation returns the Installation field. +func (t *TeamAddEvent) GetInstallation() *Installation { + if t == nil { + return nil + } + return t.Installation +} + +// GetOrg returns the Org field. +func (t *TeamAddEvent) GetOrg() *Organization { + if t == nil { + return nil + } + return t.Org +} + +// GetRepo returns the Repo field. +func (t *TeamAddEvent) GetRepo() *Repository { + if t == nil { + return nil + } + return t.Repo +} + +// GetSender returns the Sender field. +func (t *TeamAddEvent) GetSender() *User { + if t == nil { + return nil + } + return t.Sender +} + +// GetTeam returns the Team field. +func (t *TeamAddEvent) GetTeam() *Team { + if t == nil { + return nil + } + return t.Team +} + +// GetDescription returns the Description field. +func (t *TeamChange) GetDescription() *TeamDescription { + if t == nil { + return nil + } + return t.Description +} + +// GetName returns the Name field. +func (t *TeamChange) GetName() *TeamName { + if t == nil { + return nil + } + return t.Name +} + +// GetPrivacy returns the Privacy field. +func (t *TeamChange) GetPrivacy() *TeamPrivacy { + if t == nil { + return nil + } + return t.Privacy +} + +// GetRepository returns the Repository field. +func (t *TeamChange) GetRepository() *TeamRepository { + if t == nil { + return nil + } + return t.Repository +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (t *TeamDescription) GetFrom() string { + if t == nil || t.From == nil { + return "" + } + return *t.From +} + +// GetAuthor returns the Author field. +func (t *TeamDiscussion) GetAuthor() *User { + if t == nil { + return nil + } + return t.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBody() string { + if t == nil || t.Body == nil { + return "" + } + return *t.Body +} + +// GetBodyHTML returns the BodyHTML field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBodyHTML() string { + if t == nil || t.BodyHTML == nil { + return "" + } + return *t.BodyHTML +} + +// GetBodyVersion returns the BodyVersion field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetBodyVersion() string { + if t == nil || t.BodyVersion == nil { + return "" + } + return *t.BodyVersion +} + +// GetCommentsCount returns the CommentsCount field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCommentsCount() int { + if t == nil || t.CommentsCount == nil { + return 0 + } + return *t.CommentsCount +} + +// GetCommentsURL returns the CommentsURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCommentsURL() string { + if t == nil || t.CommentsURL == nil { + return "" + } + return *t.CommentsURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetCreatedAt() Timestamp { + if t == nil || t.CreatedAt == nil { + return Timestamp{} + } + return *t.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetHTMLURL() string { + if t == nil || t.HTMLURL == nil { + return "" + } + return *t.HTMLURL +} + +// GetLastEditedAt returns the LastEditedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetLastEditedAt() Timestamp { + if t == nil || t.LastEditedAt == nil { + return Timestamp{} + } + return *t.LastEditedAt +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetNodeID() string { + if t == nil || t.NodeID == nil { + return "" + } + return *t.NodeID +} + +// GetNumber returns the Number field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetNumber() int { + if t == nil || t.Number == nil { + return 0 + } + return *t.Number +} + +// GetPinned returns the Pinned field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetPinned() bool { + if t == nil || t.Pinned == nil { + return false + } + return *t.Pinned +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetPrivate() bool { + if t == nil || t.Private == nil { + return false + } + return *t.Private +} + +// GetReactions returns the Reactions field. +func (t *TeamDiscussion) GetReactions() *Reactions { + if t == nil { + return nil + } + return t.Reactions +} + +// GetTeamURL returns the TeamURL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetTeamURL() string { + if t == nil || t.TeamURL == nil { + return "" + } + return *t.TeamURL +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetTitle() string { + if t == nil || t.Title == nil { + return "" + } + return *t.Title +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetUpdatedAt() Timestamp { + if t == nil || t.UpdatedAt == nil { + return Timestamp{} + } + return *t.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *TeamDiscussion) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (t *TeamEvent) GetAction() string { + if t == nil || t.Action == nil { + return "" + } + return *t.Action +} + +// GetChanges returns the Changes field. +func (t *TeamEvent) GetChanges() *TeamChange { + if t == nil { + return nil + } + return t.Changes +} + +// GetInstallation returns the Installation field. +func (t *TeamEvent) GetInstallation() *Installation { + if t == nil { + return nil + } + return t.Installation +} + +// GetOrg returns the Org field. +func (t *TeamEvent) GetOrg() *Organization { + if t == nil { + return nil + } + return t.Org +} + +// GetRepo returns the Repo field. +func (t *TeamEvent) GetRepo() *Repository { + if t == nil { + return nil + } + return t.Repo +} + +// GetSender returns the Sender field. +func (t *TeamEvent) GetSender() *User { + if t == nil { + return nil + } + return t.Sender +} + +// GetTeam returns the Team field. +func (t *TeamEvent) GetTeam() *Team { + if t == nil { + return nil + } + return t.Team +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetDescription() string { + if t == nil || t.Description == nil { + return "" + } + return *t.Description +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetID() int64 { + if t == nil || t.ID == nil { + return 0 + } + return *t.ID +} + +// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetLDAPDN() string { + if t == nil || t.LDAPDN == nil { + return "" + } + return *t.LDAPDN +} + +// GetMembersURL returns the MembersURL field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetMembersURL() string { + if t == nil || t.MembersURL == nil { + return "" + } + return *t.MembersURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetPermission() string { + if t == nil || t.Permission == nil { + return "" + } + return *t.Permission +} + +// GetPrivacy returns the Privacy field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetPrivacy() string { + if t == nil || t.Privacy == nil { + return "" + } + return *t.Privacy +} + +// GetRepositoriesURL returns the RepositoriesURL field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetRepositoriesURL() string { + if t == nil || t.RepositoriesURL == nil { + return "" + } + return *t.RepositoriesURL +} + +// GetSlug returns the Slug field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetSlug() string { + if t == nil || t.Slug == nil { + return "" + } + return *t.Slug +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *TeamLDAPMapping) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (t *TeamName) GetFrom() string { + if t == nil || t.From == nil { + return "" + } + return *t.From +} + +// GetFrom returns the From field. +func (t *TeamPermissions) GetFrom() *TeamPermissionsFrom { + if t == nil { + return nil + } + return t.From +} + +// GetAdmin returns the Admin field if it's non-nil, zero value otherwise. +func (t *TeamPermissionsFrom) GetAdmin() bool { + if t == nil || t.Admin == nil { + return false + } + return *t.Admin +} + +// GetPull returns the Pull field if it's non-nil, zero value otherwise. +func (t *TeamPermissionsFrom) GetPull() bool { + if t == nil || t.Pull == nil { + return false + } + return *t.Pull +} + +// GetPush returns the Push field if it's non-nil, zero value otherwise. +func (t *TeamPermissionsFrom) GetPush() bool { + if t == nil || t.Push == nil { + return false + } + return *t.Push +} + +// GetFrom returns the From field if it's non-nil, zero value otherwise. +func (t *TeamPrivacy) GetFrom() string { + if t == nil || t.From == nil { + return "" + } + return *t.From +} + +// GetPermission returns the Permission field if it's non-nil, zero value otherwise. +func (t *TeamProjectOptions) GetPermission() string { + if t == nil || t.Permission == nil { + return "" + } + return *t.Permission +} + +// GetPermissions returns the Permissions field. +func (t *TeamRepository) GetPermissions() *TeamPermissions { + if t == nil { + return nil + } + return t.Permissions +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (t *TemplateRepoRequest) GetDescription() string { + if t == nil || t.Description == nil { + return "" + } + return *t.Description +} + +// GetIncludeAllBranches returns the IncludeAllBranches field if it's non-nil, zero value otherwise. +func (t *TemplateRepoRequest) GetIncludeAllBranches() bool { + if t == nil || t.IncludeAllBranches == nil { + return false + } + return *t.IncludeAllBranches +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *TemplateRepoRequest) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetOwner returns the Owner field if it's non-nil, zero value otherwise. +func (t *TemplateRepoRequest) GetOwner() string { + if t == nil || t.Owner == nil { + return "" + } + return *t.Owner +} + +// GetPrivate returns the Private field if it's non-nil, zero value otherwise. +func (t *TemplateRepoRequest) GetPrivate() bool { + if t == nil || t.Private == nil { + return false + } + return *t.Private +} + +// GetFragment returns the Fragment field if it's non-nil, zero value otherwise. +func (t *TextMatch) GetFragment() string { + if t == nil || t.Fragment == nil { + return "" + } + return *t.Fragment +} + +// GetObjectType returns the ObjectType field if it's non-nil, zero value otherwise. +func (t *TextMatch) GetObjectType() string { + if t == nil || t.ObjectType == nil { + return "" + } + return *t.ObjectType +} + +// GetObjectURL returns the ObjectURL field if it's non-nil, zero value otherwise. +func (t *TextMatch) GetObjectURL() string { + if t == nil || t.ObjectURL == nil { + return "" + } + return *t.ObjectURL +} + +// GetProperty returns the Property field if it's non-nil, zero value otherwise. +func (t *TextMatch) GetProperty() string { + if t == nil || t.Property == nil { + return "" + } + return *t.Property +} + +// GetActor returns the Actor field. +func (t *Timeline) GetActor() *User { + if t == nil { + return nil + } + return t.Actor +} + +// GetAssignee returns the Assignee field. +func (t *Timeline) GetAssignee() *User { + if t == nil { + return nil + } + return t.Assignee +} + +// GetAssigner returns the Assigner field. +func (t *Timeline) GetAssigner() *User { + if t == nil { + return nil + } + return t.Assigner +} + +// GetAuthor returns the Author field. +func (t *Timeline) GetAuthor() *CommitAuthor { + if t == nil { + return nil + } + return t.Author +} + +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (t *Timeline) GetBody() string { + if t == nil || t.Body == nil { + return "" + } + return *t.Body +} + +// GetCommitID returns the CommitID field if it's non-nil, zero value otherwise. +func (t *Timeline) GetCommitID() string { + if t == nil || t.CommitID == nil { + return "" + } + return *t.CommitID +} + +// GetCommitter returns the Committer field. +func (t *Timeline) GetCommitter() *CommitAuthor { + if t == nil { + return nil + } + return t.Committer +} + +// GetCommitURL returns the CommitURL field if it's non-nil, zero value otherwise. +func (t *Timeline) GetCommitURL() string { + if t == nil || t.CommitURL == nil { + return "" + } + return *t.CommitURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (t *Timeline) GetCreatedAt() Timestamp { + if t == nil || t.CreatedAt == nil { + return Timestamp{} + } + return *t.CreatedAt +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (t *Timeline) GetEvent() string { + if t == nil || t.Event == nil { + return "" + } + return *t.Event +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (t *Timeline) GetID() int64 { + if t == nil || t.ID == nil { + return 0 + } + return *t.ID +} + +// GetLabel returns the Label field. +func (t *Timeline) GetLabel() *Label { + if t == nil { + return nil + } + return t.Label +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (t *Timeline) GetMessage() string { + if t == nil || t.Message == nil { + return "" + } + return *t.Message +} + +// GetMilestone returns the Milestone field. +func (t *Timeline) GetMilestone() *Milestone { + if t == nil { + return nil + } + return t.Milestone +} + +// GetPerformedViaGithubApp returns the PerformedViaGithubApp field. +func (t *Timeline) GetPerformedViaGithubApp() *App { + if t == nil { + return nil + } + return t.PerformedViaGithubApp +} + +// GetProjectCard returns the ProjectCard field. +func (t *Timeline) GetProjectCard() *ProjectCard { + if t == nil { + return nil + } + return t.ProjectCard +} + +// GetRename returns the Rename field. +func (t *Timeline) GetRename() *Rename { + if t == nil { + return nil + } + return t.Rename +} + +// GetRequestedTeam returns the RequestedTeam field. +func (t *Timeline) GetRequestedTeam() *Team { + if t == nil { + return nil + } + return t.RequestedTeam +} + +// GetRequester returns the Requester field. +func (t *Timeline) GetRequester() *User { + if t == nil { + return nil + } + return t.Requester +} + +// GetReviewer returns the Reviewer field. +func (t *Timeline) GetReviewer() *User { + if t == nil { + return nil + } + return t.Reviewer +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (t *Timeline) GetSHA() string { + if t == nil || t.SHA == nil { + return "" + } + return *t.SHA +} + +// GetSource returns the Source field. +func (t *Timeline) GetSource() *Source { + if t == nil { + return nil + } + return t.Source +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (t *Timeline) GetState() string { + if t == nil || t.State == nil { + return "" + } + return *t.State +} + +// GetSubmittedAt returns the SubmittedAt field if it's non-nil, zero value otherwise. +func (t *Timeline) GetSubmittedAt() Timestamp { + if t == nil || t.SubmittedAt == nil { + return Timestamp{} + } + return *t.SubmittedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *Timeline) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetUser returns the User field. +func (t *Timeline) GetUser() *User { + if t == nil { + return nil + } + return t.User +} + +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (t *Tool) GetGUID() string { + if t == nil || t.GUID == nil { + return "" + } + return *t.GUID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *Tool) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetVersion returns the Version field if it's non-nil, zero value otherwise. +func (t *Tool) GetVersion() string { + if t == nil || t.Version == nil { + return "" + } + return *t.Version +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetCreatedAt() Timestamp { + if t == nil || t.CreatedAt == nil { + return Timestamp{} + } + return *t.CreatedAt +} + +// GetCreatedBy returns the CreatedBy field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetCreatedBy() string { + if t == nil || t.CreatedBy == nil { + return "" + } + return *t.CreatedBy +} + +// GetCurated returns the Curated field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetCurated() bool { + if t == nil || t.Curated == nil { + return false + } + return *t.Curated +} + +// GetDescription returns the Description field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetDescription() string { + if t == nil || t.Description == nil { + return "" + } + return *t.Description +} + +// GetDisplayName returns the DisplayName field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetDisplayName() string { + if t == nil || t.DisplayName == nil { + return "" + } + return *t.DisplayName +} + +// GetFeatured returns the Featured field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetFeatured() bool { + if t == nil || t.Featured == nil { + return false + } + return *t.Featured +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetName() string { + if t == nil || t.Name == nil { + return "" + } + return *t.Name +} + +// GetScore returns the Score field. +func (t *TopicResult) GetScore() *float64 { + if t == nil { + return nil + } + return t.Score +} + +// GetShortDescription returns the ShortDescription field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetShortDescription() string { + if t == nil || t.ShortDescription == nil { + return "" + } + return *t.ShortDescription +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (t *TopicResult) GetUpdatedAt() string { + if t == nil || t.UpdatedAt == nil { + return "" + } + return *t.UpdatedAt +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (t *TopicsSearchResult) GetIncompleteResults() bool { + if t == nil || t.IncompleteResults == nil { + return false + } + return *t.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (t *TopicsSearchResult) GetTotal() int { + if t == nil || t.Total == nil { + return 0 + } + return *t.Total +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (t *TrafficClones) GetCount() int { + if t == nil || t.Count == nil { + return 0 + } + return *t.Count +} + +// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. +func (t *TrafficClones) GetUniques() int { + if t == nil || t.Uniques == nil { + return 0 + } + return *t.Uniques +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (t *TrafficData) GetCount() int { + if t == nil || t.Count == nil { + return 0 + } + return *t.Count +} + +// GetTimestamp returns the Timestamp field if it's non-nil, zero value otherwise. +func (t *TrafficData) GetTimestamp() Timestamp { + if t == nil || t.Timestamp == nil { + return Timestamp{} + } + return *t.Timestamp +} + +// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. +func (t *TrafficData) GetUniques() int { + if t == nil || t.Uniques == nil { + return 0 + } + return *t.Uniques +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (t *TrafficPath) GetCount() int { + if t == nil || t.Count == nil { + return 0 + } + return *t.Count +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (t *TrafficPath) GetPath() string { + if t == nil || t.Path == nil { + return "" + } + return *t.Path +} + +// GetTitle returns the Title field if it's non-nil, zero value otherwise. +func (t *TrafficPath) GetTitle() string { + if t == nil || t.Title == nil { + return "" + } + return *t.Title +} + +// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. +func (t *TrafficPath) GetUniques() int { + if t == nil || t.Uniques == nil { + return 0 + } + return *t.Uniques +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (t *TrafficReferrer) GetCount() int { + if t == nil || t.Count == nil { + return 0 + } + return *t.Count +} + +// GetReferrer returns the Referrer field if it's non-nil, zero value otherwise. +func (t *TrafficReferrer) GetReferrer() string { + if t == nil || t.Referrer == nil { + return "" + } + return *t.Referrer +} + +// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. +func (t *TrafficReferrer) GetUniques() int { + if t == nil || t.Uniques == nil { + return 0 + } + return *t.Uniques +} + +// GetCount returns the Count field if it's non-nil, zero value otherwise. +func (t *TrafficViews) GetCount() int { + if t == nil || t.Count == nil { + return 0 + } + return *t.Count +} + +// GetUniques returns the Uniques field if it's non-nil, zero value otherwise. +func (t *TrafficViews) GetUniques() int { + if t == nil || t.Uniques == nil { + return 0 + } + return *t.Uniques +} + +// GetNewName returns the NewName field if it's non-nil, zero value otherwise. +func (t *TransferRequest) GetNewName() string { + if t == nil || t.NewName == nil { + return "" + } + return *t.NewName +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (t *Tree) GetSHA() string { + if t == nil || t.SHA == nil { + return "" + } + return *t.SHA +} + +// GetTruncated returns the Truncated field if it's non-nil, zero value otherwise. +func (t *Tree) GetTruncated() bool { + if t == nil || t.Truncated == nil { + return false + } + return *t.Truncated +} + +// GetContent returns the Content field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetContent() string { + if t == nil || t.Content == nil { + return "" + } + return *t.Content +} + +// GetMode returns the Mode field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetMode() string { + if t == nil || t.Mode == nil { + return "" + } + return *t.Mode +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetPath() string { + if t == nil || t.Path == nil { + return "" + } + return *t.Path +} + +// GetSHA returns the SHA field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetSHA() string { + if t == nil || t.SHA == nil { + return "" + } + return *t.SHA +} + +// GetSize returns the Size field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetSize() int { + if t == nil || t.Size == nil { + return 0 + } + return *t.Size +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetType() string { + if t == nil || t.Type == nil { + return "" + } + return *t.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (t *TreeEntry) GetURL() string { + if t == nil || t.URL == nil { + return "" + } + return *t.URL +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (u *UpdateAttributeForSCIMUserOperations) GetPath() string { + if u == nil || u.Path == nil { + return "" + } + return *u.Path +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetCompletedAt() Timestamp { + if u == nil || u.CompletedAt == nil { + return Timestamp{} + } + return *u.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetConclusion() string { + if u == nil || u.Conclusion == nil { + return "" + } + return *u.Conclusion +} + +// GetDetailsURL returns the DetailsURL field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetDetailsURL() string { + if u == nil || u.DetailsURL == nil { + return "" + } + return *u.DetailsURL +} + +// GetExternalID returns the ExternalID field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetExternalID() string { + if u == nil || u.ExternalID == nil { + return "" + } + return *u.ExternalID +} + +// GetOutput returns the Output field. +func (u *UpdateCheckRunOptions) GetOutput() *CheckRunOutput { + if u == nil { + return nil + } + return u.Output +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (u *UpdateCheckRunOptions) GetStatus() string { + if u == nil || u.Status == nil { + return "" + } + return *u.Status +} + +// GetQuerySuite returns the QuerySuite field if it's non-nil, zero value otherwise. +func (u *UpdateDefaultSetupConfigurationOptions) GetQuerySuite() string { + if u == nil || u.QuerySuite == nil { + return "" + } + return *u.QuerySuite +} + +// GetRunID returns the RunID field if it's non-nil, zero value otherwise. +func (u *UpdateDefaultSetupConfigurationResponse) GetRunID() int64 { + if u == nil || u.RunID == nil { + return 0 + } + return *u.RunID +} + +// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. +func (u *UpdateDefaultSetupConfigurationResponse) GetRunURL() string { + if u == nil || u.RunURL == nil { + return "" + } + return *u.RunURL +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (u *UpdateEnterpriseRunnerGroupRequest) GetAllowsPublicRepositories() bool { + if u == nil || u.AllowsPublicRepositories == nil { + return false + } + return *u.AllowsPublicRepositories +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (u *UpdateEnterpriseRunnerGroupRequest) GetName() string { + if u == nil || u.Name == nil { + return "" + } + return *u.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (u *UpdateEnterpriseRunnerGroupRequest) GetRestrictedToWorkflows() bool { + if u == nil || u.RestrictedToWorkflows == nil { + return false + } + return *u.RestrictedToWorkflows +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (u *UpdateEnterpriseRunnerGroupRequest) GetVisibility() string { + if u == nil || u.Visibility == nil { + return "" + } + return *u.Visibility +} + +// GetAllowsPublicRepositories returns the AllowsPublicRepositories field if it's non-nil, zero value otherwise. +func (u *UpdateRunnerGroupRequest) GetAllowsPublicRepositories() bool { + if u == nil || u.AllowsPublicRepositories == nil { + return false + } + return *u.AllowsPublicRepositories +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (u *UpdateRunnerGroupRequest) GetName() string { + if u == nil || u.Name == nil { + return "" + } + return *u.Name +} + +// GetRestrictedToWorkflows returns the RestrictedToWorkflows field if it's non-nil, zero value otherwise. +func (u *UpdateRunnerGroupRequest) GetRestrictedToWorkflows() bool { + if u == nil || u.RestrictedToWorkflows == nil { + return false + } + return *u.RestrictedToWorkflows +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (u *UpdateRunnerGroupRequest) GetVisibility() string { + if u == nil || u.Visibility == nil { + return "" + } + return *u.Visibility +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (u *User) GetAvatarURL() string { + if u == nil || u.AvatarURL == nil { + return "" + } + return *u.AvatarURL +} + +// GetBio returns the Bio field if it's non-nil, zero value otherwise. +func (u *User) GetBio() string { + if u == nil || u.Bio == nil { + return "" + } + return *u.Bio +} + +// GetBlog returns the Blog field if it's non-nil, zero value otherwise. +func (u *User) GetBlog() string { + if u == nil || u.Blog == nil { + return "" + } + return *u.Blog +} + +// GetCollaborators returns the Collaborators field if it's non-nil, zero value otherwise. +func (u *User) GetCollaborators() int { + if u == nil || u.Collaborators == nil { + return 0 + } + return *u.Collaborators +} + +// GetCompany returns the Company field if it's non-nil, zero value otherwise. +func (u *User) GetCompany() string { + if u == nil || u.Company == nil { + return "" + } + return *u.Company +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (u *User) GetCreatedAt() Timestamp { + if u == nil || u.CreatedAt == nil { + return Timestamp{} + } + return *u.CreatedAt +} + +// GetDiskUsage returns the DiskUsage field if it's non-nil, zero value otherwise. +func (u *User) GetDiskUsage() int { + if u == nil || u.DiskUsage == nil { + return 0 + } + return *u.DiskUsage +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (u *User) GetEmail() string { + if u == nil || u.Email == nil { + return "" + } + return *u.Email +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (u *User) GetEventsURL() string { + if u == nil || u.EventsURL == nil { + return "" + } + return *u.EventsURL +} + +// GetFollowers returns the Followers field if it's non-nil, zero value otherwise. +func (u *User) GetFollowers() int { + if u == nil || u.Followers == nil { + return 0 + } + return *u.Followers +} + +// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. +func (u *User) GetFollowersURL() string { + if u == nil || u.FollowersURL == nil { + return "" + } + return *u.FollowersURL +} + +// GetFollowing returns the Following field if it's non-nil, zero value otherwise. +func (u *User) GetFollowing() int { + if u == nil || u.Following == nil { + return 0 + } + return *u.Following +} + +// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. +func (u *User) GetFollowingURL() string { + if u == nil || u.FollowingURL == nil { + return "" + } + return *u.FollowingURL +} + +// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. +func (u *User) GetGistsURL() string { + if u == nil || u.GistsURL == nil { + return "" + } + return *u.GistsURL +} + +// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. +func (u *User) GetGravatarID() string { + if u == nil || u.GravatarID == nil { + return "" + } + return *u.GravatarID +} + +// GetHireable returns the Hireable field if it's non-nil, zero value otherwise. +func (u *User) GetHireable() bool { + if u == nil || u.Hireable == nil { + return false + } + return *u.Hireable +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (u *User) GetHTMLURL() string { + if u == nil || u.HTMLURL == nil { + return "" + } + return *u.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (u *User) GetID() int64 { + if u == nil || u.ID == nil { + return 0 + } + return *u.ID +} + +// GetLdapDn returns the LdapDn field if it's non-nil, zero value otherwise. +func (u *User) GetLdapDn() string { + if u == nil || u.LdapDn == nil { + return "" + } + return *u.LdapDn +} + +// GetLocation returns the Location field if it's non-nil, zero value otherwise. +func (u *User) GetLocation() string { + if u == nil || u.Location == nil { + return "" + } + return *u.Location +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (u *User) GetLogin() string { + if u == nil || u.Login == nil { + return "" + } + return *u.Login +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (u *User) GetName() string { + if u == nil || u.Name == nil { + return "" + } + return *u.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (u *User) GetNodeID() string { + if u == nil || u.NodeID == nil { + return "" + } + return *u.NodeID +} + +// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. +func (u *User) GetOrganizationsURL() string { + if u == nil || u.OrganizationsURL == nil { + return "" + } + return *u.OrganizationsURL +} + +// GetOwnedPrivateRepos returns the OwnedPrivateRepos field if it's non-nil, zero value otherwise. +func (u *User) GetOwnedPrivateRepos() int64 { + if u == nil || u.OwnedPrivateRepos == nil { + return 0 + } + return *u.OwnedPrivateRepos +} + +// GetPermissions returns the Permissions map if it's non-nil, an empty map otherwise. +func (u *User) GetPermissions() map[string]bool { + if u == nil || u.Permissions == nil { + return map[string]bool{} + } + return u.Permissions +} + +// GetPlan returns the Plan field. +func (u *User) GetPlan() *Plan { + if u == nil { + return nil + } + return u.Plan +} + +// GetPrivateGists returns the PrivateGists field if it's non-nil, zero value otherwise. +func (u *User) GetPrivateGists() int { + if u == nil || u.PrivateGists == nil { + return 0 + } + return *u.PrivateGists +} + +// GetPublicGists returns the PublicGists field if it's non-nil, zero value otherwise. +func (u *User) GetPublicGists() int { + if u == nil || u.PublicGists == nil { + return 0 + } + return *u.PublicGists +} + +// GetPublicRepos returns the PublicRepos field if it's non-nil, zero value otherwise. +func (u *User) GetPublicRepos() int { + if u == nil || u.PublicRepos == nil { + return 0 + } + return *u.PublicRepos +} + +// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. +func (u *User) GetReceivedEventsURL() string { + if u == nil || u.ReceivedEventsURL == nil { + return "" + } + return *u.ReceivedEventsURL +} + +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (u *User) GetReposURL() string { + if u == nil || u.ReposURL == nil { + return "" + } + return *u.ReposURL +} + +// GetRoleName returns the RoleName field if it's non-nil, zero value otherwise. +func (u *User) GetRoleName() string { + if u == nil || u.RoleName == nil { + return "" + } + return *u.RoleName +} + +// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. +func (u *User) GetSiteAdmin() bool { + if u == nil || u.SiteAdmin == nil { + return false + } + return *u.SiteAdmin +} + +// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. +func (u *User) GetStarredURL() string { + if u == nil || u.StarredURL == nil { + return "" + } + return *u.StarredURL +} + +// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. +func (u *User) GetSubscriptionsURL() string { + if u == nil || u.SubscriptionsURL == nil { + return "" + } + return *u.SubscriptionsURL +} + +// GetSuspendedAt returns the SuspendedAt field if it's non-nil, zero value otherwise. +func (u *User) GetSuspendedAt() Timestamp { + if u == nil || u.SuspendedAt == nil { + return Timestamp{} + } + return *u.SuspendedAt +} + +// GetTotalPrivateRepos returns the TotalPrivateRepos field if it's non-nil, zero value otherwise. +func (u *User) GetTotalPrivateRepos() int64 { + if u == nil || u.TotalPrivateRepos == nil { + return 0 + } + return *u.TotalPrivateRepos +} + +// GetTwitterUsername returns the TwitterUsername field if it's non-nil, zero value otherwise. +func (u *User) GetTwitterUsername() string { + if u == nil || u.TwitterUsername == nil { + return "" + } + return *u.TwitterUsername +} + +// GetTwoFactorAuthentication returns the TwoFactorAuthentication field if it's non-nil, zero value otherwise. +func (u *User) GetTwoFactorAuthentication() bool { + if u == nil || u.TwoFactorAuthentication == nil { + return false + } + return *u.TwoFactorAuthentication +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (u *User) GetType() string { + if u == nil || u.Type == nil { + return "" + } + return *u.Type +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (u *User) GetUpdatedAt() Timestamp { + if u == nil || u.UpdatedAt == nil { + return Timestamp{} + } + return *u.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (u *User) GetURL() string { + if u == nil || u.URL == nil { + return "" + } + return *u.URL +} + +// GetApp returns the App field. +func (u *UserAuthorization) GetApp() *OAuthAPP { + if u == nil { + return nil + } + return u.App +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetCreatedAt() Timestamp { + if u == nil || u.CreatedAt == nil { + return Timestamp{} + } + return *u.CreatedAt +} + +// GetFingerprint returns the Fingerprint field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetFingerprint() string { + if u == nil || u.Fingerprint == nil { + return "" + } + return *u.Fingerprint +} + +// GetHashedToken returns the HashedToken field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetHashedToken() string { + if u == nil || u.HashedToken == nil { + return "" + } + return *u.HashedToken +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetID() int64 { + if u == nil || u.ID == nil { + return 0 + } + return *u.ID +} + +// GetNote returns the Note field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetNote() string { + if u == nil || u.Note == nil { + return "" + } + return *u.Note +} + +// GetNoteURL returns the NoteURL field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetNoteURL() string { + if u == nil || u.NoteURL == nil { + return "" + } + return *u.NoteURL +} + +// GetToken returns the Token field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetToken() string { + if u == nil || u.Token == nil { + return "" + } + return *u.Token +} + +// GetTokenLastEight returns the TokenLastEight field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetTokenLastEight() string { + if u == nil || u.TokenLastEight == nil { + return "" + } + return *u.TokenLastEight +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetUpdatedAt() Timestamp { + if u == nil || u.UpdatedAt == nil { + return Timestamp{} + } + return *u.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (u *UserAuthorization) GetURL() string { + if u == nil || u.URL == nil { + return "" + } + return *u.URL +} + +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (u *UserContext) GetMessage() string { + if u == nil || u.Message == nil { + return "" + } + return *u.Message +} + +// GetOcticon returns the Octicon field if it's non-nil, zero value otherwise. +func (u *UserContext) GetOcticon() string { + if u == nil || u.Octicon == nil { + return "" + } + return *u.Octicon +} + +// GetEmail returns the Email field if it's non-nil, zero value otherwise. +func (u *UserEmail) GetEmail() string { + if u == nil || u.Email == nil { + return "" + } + return *u.Email +} + +// GetPrimary returns the Primary field if it's non-nil, zero value otherwise. +func (u *UserEmail) GetPrimary() bool { + if u == nil || u.Primary == nil { + return false + } + return *u.Primary +} + +// GetVerified returns the Verified field if it's non-nil, zero value otherwise. +func (u *UserEmail) GetVerified() bool { + if u == nil || u.Verified == nil { + return false + } + return *u.Verified +} + +// GetVisibility returns the Visibility field if it's non-nil, zero value otherwise. +func (u *UserEmail) GetVisibility() string { + if u == nil || u.Visibility == nil { + return "" + } + return *u.Visibility +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (u *UserEvent) GetAction() string { + if u == nil || u.Action == nil { + return "" + } + return *u.Action +} + +// GetEnterprise returns the Enterprise field. +func (u *UserEvent) GetEnterprise() *Enterprise { + if u == nil { + return nil + } + return u.Enterprise +} + +// GetInstallation returns the Installation field. +func (u *UserEvent) GetInstallation() *Installation { + if u == nil { + return nil + } + return u.Installation +} + +// GetSender returns the Sender field. +func (u *UserEvent) GetSender() *User { + if u == nil { + return nil + } + return u.Sender +} + +// GetUser returns the User field. +func (u *UserEvent) GetUser() *User { + if u == nil { + return nil + } + return u.User +} + +// GetAvatarURL returns the AvatarURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetAvatarURL() string { + if u == nil || u.AvatarURL == nil { + return "" + } + return *u.AvatarURL +} + +// GetEventsURL returns the EventsURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetEventsURL() string { + if u == nil || u.EventsURL == nil { + return "" + } + return *u.EventsURL +} + +// GetFollowersURL returns the FollowersURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetFollowersURL() string { + if u == nil || u.FollowersURL == nil { + return "" + } + return *u.FollowersURL +} + +// GetFollowingURL returns the FollowingURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetFollowingURL() string { + if u == nil || u.FollowingURL == nil { + return "" + } + return *u.FollowingURL +} + +// GetGistsURL returns the GistsURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetGistsURL() string { + if u == nil || u.GistsURL == nil { + return "" + } + return *u.GistsURL +} + +// GetGravatarID returns the GravatarID field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetGravatarID() string { + if u == nil || u.GravatarID == nil { + return "" + } + return *u.GravatarID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetID() int64 { + if u == nil || u.ID == nil { + return 0 + } + return *u.ID +} + +// GetLDAPDN returns the LDAPDN field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetLDAPDN() string { + if u == nil || u.LDAPDN == nil { + return "" + } + return *u.LDAPDN +} + +// GetLogin returns the Login field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetLogin() string { + if u == nil || u.Login == nil { + return "" + } + return *u.Login +} + +// GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetOrganizationsURL() string { + if u == nil || u.OrganizationsURL == nil { + return "" + } + return *u.OrganizationsURL +} + +// GetReceivedEventsURL returns the ReceivedEventsURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetReceivedEventsURL() string { + if u == nil || u.ReceivedEventsURL == nil { + return "" + } + return *u.ReceivedEventsURL +} + +// GetReposURL returns the ReposURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetReposURL() string { + if u == nil || u.ReposURL == nil { + return "" + } + return *u.ReposURL +} + +// GetSiteAdmin returns the SiteAdmin field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetSiteAdmin() bool { + if u == nil || u.SiteAdmin == nil { + return false + } + return *u.SiteAdmin +} + +// GetStarredURL returns the StarredURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetStarredURL() string { + if u == nil || u.StarredURL == nil { + return "" + } + return *u.StarredURL +} + +// GetSubscriptionsURL returns the SubscriptionsURL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetSubscriptionsURL() string { + if u == nil || u.SubscriptionsURL == nil { + return "" + } + return *u.SubscriptionsURL +} + +// GetType returns the Type field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetType() string { + if u == nil || u.Type == nil { + return "" + } + return *u.Type +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (u *UserLDAPMapping) GetURL() string { + if u == nil || u.URL == nil { + return "" + } + return *u.URL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetCreatedAt() string { + if u == nil || u.CreatedAt == nil { + return "" + } + return *u.CreatedAt +} + +// GetExcludeAttachments returns the ExcludeAttachments field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetExcludeAttachments() bool { + if u == nil || u.ExcludeAttachments == nil { + return false + } + return *u.ExcludeAttachments +} + +// GetGUID returns the GUID field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetGUID() string { + if u == nil || u.GUID == nil { + return "" + } + return *u.GUID +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetID() int64 { + if u == nil || u.ID == nil { + return 0 + } + return *u.ID +} + +// GetLockRepositories returns the LockRepositories field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetLockRepositories() bool { + if u == nil || u.LockRepositories == nil { + return false + } + return *u.LockRepositories +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetState() string { + if u == nil || u.State == nil { + return "" + } + return *u.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetUpdatedAt() string { + if u == nil || u.UpdatedAt == nil { + return "" + } + return *u.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (u *UserMigration) GetURL() string { + if u == nil || u.URL == nil { + return "" + } + return *u.URL +} + +// GetIncompleteResults returns the IncompleteResults field if it's non-nil, zero value otherwise. +func (u *UsersSearchResult) GetIncompleteResults() bool { + if u == nil || u.IncompleteResults == nil { + return false + } + return *u.IncompleteResults +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (u *UsersSearchResult) GetTotal() int { + if u == nil || u.Total == nil { + return 0 + } + return *u.Total +} + +// GetAdminUsers returns the AdminUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetAdminUsers() int { + if u == nil || u.AdminUsers == nil { + return 0 + } + return *u.AdminUsers +} + +// GetSuspendedUsers returns the SuspendedUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetSuspendedUsers() int { + if u == nil || u.SuspendedUsers == nil { + return 0 + } + return *u.SuspendedUsers +} + +// GetTotalUsers returns the TotalUsers field if it's non-nil, zero value otherwise. +func (u *UserStats) GetTotalUsers() int { + if u == nil || u.TotalUsers == nil { + return 0 + } + return *u.TotalUsers +} + +// GetReason returns the Reason field if it's non-nil, zero value otherwise. +func (u *UserSuspendOptions) GetReason() string { + if u == nil || u.Reason == nil { + return "" + } + return *u.Reason +} + +// GetEcosystem returns the Ecosystem field if it's non-nil, zero value otherwise. +func (v *VulnerabilityPackage) GetEcosystem() string { + if v == nil || v.Ecosystem == nil { + return "" + } + return *v.Ecosystem +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (v *VulnerabilityPackage) GetName() string { + if v == nil || v.Name == nil { + return "" + } + return *v.Name +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (w *WatchEvent) GetAction() string { + if w == nil || w.Action == nil { + return "" + } + return *w.Action +} + +// GetInstallation returns the Installation field. +func (w *WatchEvent) GetInstallation() *Installation { + if w == nil { + return nil + } + return w.Installation +} + +// GetOrg returns the Org field. +func (w *WatchEvent) GetOrg() *Organization { + if w == nil { + return nil + } + return w.Org +} + +// GetRepo returns the Repo field. +func (w *WatchEvent) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WatchEvent) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + +// GetTotal returns the Total field if it's non-nil, zero value otherwise. +func (w *WeeklyCommitActivity) GetTotal() int { + if w == nil || w.Total == nil { + return 0 + } + return *w.Total +} + +// GetWeek returns the Week field if it's non-nil, zero value otherwise. +func (w *WeeklyCommitActivity) GetWeek() Timestamp { + if w == nil || w.Week == nil { + return Timestamp{} + } + return *w.Week +} + +// GetAdditions returns the Additions field if it's non-nil, zero value otherwise. +func (w *WeeklyStats) GetAdditions() int { + if w == nil || w.Additions == nil { + return 0 + } + return *w.Additions +} + +// GetCommits returns the Commits field if it's non-nil, zero value otherwise. +func (w *WeeklyStats) GetCommits() int { + if w == nil || w.Commits == nil { + return 0 + } + return *w.Commits +} + +// GetDeletions returns the Deletions field if it's non-nil, zero value otherwise. +func (w *WeeklyStats) GetDeletions() int { + if w == nil || w.Deletions == nil { + return 0 + } + return *w.Deletions +} + +// GetWeek returns the Week field if it's non-nil, zero value otherwise. +func (w *WeeklyStats) GetWeek() Timestamp { + if w == nil || w.Week == nil { + return Timestamp{} + } + return *w.Week +} + +// GetBadgeURL returns the BadgeURL field if it's non-nil, zero value otherwise. +func (w *Workflow) GetBadgeURL() string { + if w == nil || w.BadgeURL == nil { + return "" + } + return *w.BadgeURL +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (w *Workflow) GetCreatedAt() Timestamp { + if w == nil || w.CreatedAt == nil { + return Timestamp{} + } + return *w.CreatedAt +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (w *Workflow) GetHTMLURL() string { + if w == nil || w.HTMLURL == nil { + return "" + } + return *w.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (w *Workflow) GetID() int64 { + if w == nil || w.ID == nil { + return 0 + } + return *w.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (w *Workflow) GetName() string { + if w == nil || w.Name == nil { + return "" + } + return *w.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (w *Workflow) GetNodeID() string { + if w == nil || w.NodeID == nil { + return "" + } + return *w.NodeID +} + +// GetPath returns the Path field if it's non-nil, zero value otherwise. +func (w *Workflow) GetPath() string { + if w == nil || w.Path == nil { + return "" + } + return *w.Path +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (w *Workflow) GetState() string { + if w == nil || w.State == nil { + return "" + } + return *w.State +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (w *Workflow) GetUpdatedAt() Timestamp { + if w == nil || w.UpdatedAt == nil { + return Timestamp{} + } + return *w.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (w *Workflow) GetURL() string { + if w == nil || w.URL == nil { + return "" + } + return *w.URL +} + +// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. +func (w *WorkflowBill) GetTotalMS() int64 { + if w == nil || w.TotalMS == nil { + return 0 + } + return *w.TotalMS +} + +// GetInstallation returns the Installation field. +func (w *WorkflowDispatchEvent) GetInstallation() *Installation { + if w == nil { + return nil + } + return w.Installation +} + +// GetOrg returns the Org field. +func (w *WorkflowDispatchEvent) GetOrg() *Organization { + if w == nil { + return nil + } + return w.Org +} + +// GetRef returns the Ref field if it's non-nil, zero value otherwise. +func (w *WorkflowDispatchEvent) GetRef() string { + if w == nil || w.Ref == nil { + return "" + } + return *w.Ref +} + +// GetRepo returns the Repo field. +func (w *WorkflowDispatchEvent) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WorkflowDispatchEvent) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + +// GetWorkflow returns the Workflow field if it's non-nil, zero value otherwise. +func (w *WorkflowDispatchEvent) GetWorkflow() string { + if w == nil || w.Workflow == nil { + return "" + } + return *w.Workflow +} + +// GetCheckRunURL returns the CheckRunURL field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetCheckRunURL() string { + if w == nil || w.CheckRunURL == nil { + return "" + } + return *w.CheckRunURL +} + +// GetCompletedAt returns the CompletedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetCompletedAt() Timestamp { + if w == nil || w.CompletedAt == nil { + return Timestamp{} + } + return *w.CompletedAt +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetConclusion() string { + if w == nil || w.Conclusion == nil { + return "" + } + return *w.Conclusion +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetCreatedAt() Timestamp { + if w == nil || w.CreatedAt == nil { + return Timestamp{} + } + return *w.CreatedAt +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetHeadBranch() string { + if w == nil || w.HeadBranch == nil { + return "" + } + return *w.HeadBranch +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetHeadSHA() string { + if w == nil || w.HeadSHA == nil { + return "" + } + return *w.HeadSHA +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetHTMLURL() string { + if w == nil || w.HTMLURL == nil { + return "" + } + return *w.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetID() int64 { + if w == nil || w.ID == nil { + return 0 + } + return *w.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetName() string { + if w == nil || w.Name == nil { + return "" + } + return *w.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetNodeID() string { + if w == nil || w.NodeID == nil { + return "" + } + return *w.NodeID +} + +// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunAttempt() int64 { + if w == nil || w.RunAttempt == nil { + return 0 + } + return *w.RunAttempt +} + +// GetRunID returns the RunID field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunID() int64 { + if w == nil || w.RunID == nil { + return 0 + } + return *w.RunID +} + +// GetRunnerGroupID returns the RunnerGroupID field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunnerGroupID() int64 { + if w == nil || w.RunnerGroupID == nil { + return 0 + } + return *w.RunnerGroupID +} + +// GetRunnerGroupName returns the RunnerGroupName field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunnerGroupName() string { + if w == nil || w.RunnerGroupName == nil { + return "" + } + return *w.RunnerGroupName +} + +// GetRunnerID returns the RunnerID field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunnerID() int64 { + if w == nil || w.RunnerID == nil { + return 0 + } + return *w.RunnerID +} + +// GetRunnerName returns the RunnerName field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunnerName() string { + if w == nil || w.RunnerName == nil { + return "" + } + return *w.RunnerName +} + +// GetRunURL returns the RunURL field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetRunURL() string { + if w == nil || w.RunURL == nil { + return "" + } + return *w.RunURL +} + +// GetStartedAt returns the StartedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetStartedAt() Timestamp { + if w == nil || w.StartedAt == nil { + return Timestamp{} + } + return *w.StartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetStatus() string { + if w == nil || w.Status == nil { + return "" + } + return *w.Status +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetURL() string { + if w == nil || w.URL == nil { + return "" + } + return *w.URL +} + +// GetWorkflowName returns the WorkflowName field if it's non-nil, zero value otherwise. +func (w *WorkflowJob) GetWorkflowName() string { + if w == nil || w.WorkflowName == nil { + return "" + } + return *w.WorkflowName +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (w *WorkflowJobEvent) GetAction() string { + if w == nil || w.Action == nil { + return "" + } + return *w.Action +} + +// GetInstallation returns the Installation field. +func (w *WorkflowJobEvent) GetInstallation() *Installation { + if w == nil { + return nil + } + return w.Installation +} + +// GetOrg returns the Org field. +func (w *WorkflowJobEvent) GetOrg() *Organization { + if w == nil { + return nil + } + return w.Org +} + +// GetRepo returns the Repo field. +func (w *WorkflowJobEvent) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WorkflowJobEvent) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + +// GetWorkflowJob returns the WorkflowJob field. +func (w *WorkflowJobEvent) GetWorkflowJob() *WorkflowJob { + if w == nil { + return nil + } + return w.WorkflowJob +} + +// GetActor returns the Actor field. +func (w *WorkflowRun) GetActor() *User { + if w == nil { + return nil + } + return w.Actor +} + +// GetArtifactsURL returns the ArtifactsURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetArtifactsURL() string { + if w == nil || w.ArtifactsURL == nil { + return "" + } + return *w.ArtifactsURL +} + +// GetCancelURL returns the CancelURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetCancelURL() string { + if w == nil || w.CancelURL == nil { + return "" + } + return *w.CancelURL +} + +// GetCheckSuiteID returns the CheckSuiteID field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetCheckSuiteID() int64 { + if w == nil || w.CheckSuiteID == nil { + return 0 + } + return *w.CheckSuiteID +} + +// GetCheckSuiteNodeID returns the CheckSuiteNodeID field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetCheckSuiteNodeID() string { + if w == nil || w.CheckSuiteNodeID == nil { + return "" + } + return *w.CheckSuiteNodeID +} + +// GetCheckSuiteURL returns the CheckSuiteURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetCheckSuiteURL() string { + if w == nil || w.CheckSuiteURL == nil { + return "" + } + return *w.CheckSuiteURL +} + +// GetConclusion returns the Conclusion field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetConclusion() string { + if w == nil || w.Conclusion == nil { + return "" + } + return *w.Conclusion +} + +// GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetCreatedAt() Timestamp { + if w == nil || w.CreatedAt == nil { + return Timestamp{} + } + return *w.CreatedAt +} + +// GetDisplayTitle returns the DisplayTitle field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetDisplayTitle() string { + if w == nil || w.DisplayTitle == nil { + return "" + } + return *w.DisplayTitle +} + +// GetEvent returns the Event field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetEvent() string { + if w == nil || w.Event == nil { + return "" + } + return *w.Event +} + +// GetHeadBranch returns the HeadBranch field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetHeadBranch() string { + if w == nil || w.HeadBranch == nil { + return "" + } + return *w.HeadBranch +} + +// GetHeadCommit returns the HeadCommit field. +func (w *WorkflowRun) GetHeadCommit() *HeadCommit { + if w == nil { + return nil + } + return w.HeadCommit +} + +// GetHeadRepository returns the HeadRepository field. +func (w *WorkflowRun) GetHeadRepository() *Repository { + if w == nil { + return nil + } + return w.HeadRepository +} + +// GetHeadSHA returns the HeadSHA field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetHeadSHA() string { + if w == nil || w.HeadSHA == nil { + return "" + } + return *w.HeadSHA +} + +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetHTMLURL() string { + if w == nil || w.HTMLURL == nil { + return "" + } + return *w.HTMLURL +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetID() int64 { + if w == nil || w.ID == nil { + return 0 + } + return *w.ID +} + +// GetJobsURL returns the JobsURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetJobsURL() string { + if w == nil || w.JobsURL == nil { + return "" + } + return *w.JobsURL +} + +// GetLogsURL returns the LogsURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetLogsURL() string { + if w == nil || w.LogsURL == nil { + return "" + } + return *w.LogsURL +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetName() string { + if w == nil || w.Name == nil { + return "" + } + return *w.Name +} + +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetNodeID() string { + if w == nil || w.NodeID == nil { + return "" + } + return *w.NodeID +} + +// GetPreviousAttemptURL returns the PreviousAttemptURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetPreviousAttemptURL() string { + if w == nil || w.PreviousAttemptURL == nil { + return "" + } + return *w.PreviousAttemptURL +} + +// GetRepository returns the Repository field. +func (w *WorkflowRun) GetRepository() *Repository { + if w == nil { + return nil + } + return w.Repository +} + +// GetRerunURL returns the RerunURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetRerunURL() string { + if w == nil || w.RerunURL == nil { + return "" + } + return *w.RerunURL +} + +// GetRunAttempt returns the RunAttempt field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetRunAttempt() int { + if w == nil || w.RunAttempt == nil { + return 0 + } + return *w.RunAttempt +} + +// GetRunNumber returns the RunNumber field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetRunNumber() int { + if w == nil || w.RunNumber == nil { + return 0 + } + return *w.RunNumber +} + +// GetRunStartedAt returns the RunStartedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetRunStartedAt() Timestamp { + if w == nil || w.RunStartedAt == nil { + return Timestamp{} + } + return *w.RunStartedAt +} + +// GetStatus returns the Status field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetStatus() string { + if w == nil || w.Status == nil { + return "" + } + return *w.Status +} + +// GetTriggeringActor returns the TriggeringActor field. +func (w *WorkflowRun) GetTriggeringActor() *User { + if w == nil { + return nil + } + return w.TriggeringActor +} + +// GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetUpdatedAt() Timestamp { + if w == nil || w.UpdatedAt == nil { + return Timestamp{} + } + return *w.UpdatedAt +} + +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetURL() string { + if w == nil || w.URL == nil { + return "" + } + return *w.URL +} + +// GetWorkflowID returns the WorkflowID field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetWorkflowID() int64 { + if w == nil || w.WorkflowID == nil { + return 0 + } + return *w.WorkflowID +} + +// GetWorkflowURL returns the WorkflowURL field if it's non-nil, zero value otherwise. +func (w *WorkflowRun) GetWorkflowURL() string { + if w == nil || w.WorkflowURL == nil { + return "" + } + return *w.WorkflowURL +} + +// GetExcludePullRequests returns the ExcludePullRequests field if it's non-nil, zero value otherwise. +func (w *WorkflowRunAttemptOptions) GetExcludePullRequests() bool { + if w == nil || w.ExcludePullRequests == nil { + return false + } + return *w.ExcludePullRequests +} + +// GetJobs returns the Jobs field if it's non-nil, zero value otherwise. +func (w *WorkflowRunBill) GetJobs() int { + if w == nil || w.Jobs == nil { + return 0 + } + return *w.Jobs +} + +// GetTotalMS returns the TotalMS field if it's non-nil, zero value otherwise. +func (w *WorkflowRunBill) GetTotalMS() int64 { + if w == nil || w.TotalMS == nil { + return 0 + } + return *w.TotalMS +} + +// GetAction returns the Action field if it's non-nil, zero value otherwise. +func (w *WorkflowRunEvent) GetAction() string { + if w == nil || w.Action == nil { + return "" + } + return *w.Action +} + +// GetInstallation returns the Installation field. +func (w *WorkflowRunEvent) GetInstallation() *Installation { + if w == nil { + return nil + } + return w.Installation +} + +// GetOrg returns the Org field. +func (w *WorkflowRunEvent) GetOrg() *Organization { + if w == nil { + return nil + } + return w.Org +} + +// GetRepo returns the Repo field. +func (w *WorkflowRunEvent) GetRepo() *Repository { + if w == nil { + return nil + } + return w.Repo +} + +// GetSender returns the Sender field. +func (w *WorkflowRunEvent) GetSender() *User { + if w == nil { + return nil + } + return w.Sender +} + +// GetWorkflow returns the Workflow field. +func (w *WorkflowRunEvent) GetWorkflow() *Workflow { + if w == nil { + return nil + } + return w.Workflow +} + +// GetWorkflowRun returns the WorkflowRun field. +func (w *WorkflowRunEvent) GetWorkflowRun() *WorkflowRun { + if w == nil { + return nil + } + return w.WorkflowRun +} + +// GetDurationMS returns the DurationMS field if it's non-nil, zero value otherwise. +func (w *WorkflowRunJobRun) GetDurationMS() int64 { + if w == nil || w.DurationMS == nil { + return 0 + } + return *w.DurationMS +} + +// GetJobID returns the JobID field if it's non-nil, zero value otherwise. +func (w *WorkflowRunJobRun) GetJobID() int { + if w == nil || w.JobID == nil { + return 0 + } + return *w.JobID +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (w *WorkflowRuns) GetTotalCount() int { + if w == nil || w.TotalCount == nil { + return 0 + } + return *w.TotalCount +} + +// GetBillable returns the Billable field. +func (w *WorkflowRunUsage) GetBillable() *WorkflowRunBillMap { + if w == nil { + return nil + } + return w.Billable +} + +// GetRunDurationMS returns the RunDurationMS field if it's non-nil, zero value otherwise. +func (w *WorkflowRunUsage) GetRunDurationMS() int64 { + if w == nil || w.RunDurationMS == nil { + return 0 + } + return *w.RunDurationMS +} + +// GetTotalCount returns the TotalCount field if it's non-nil, zero value otherwise. +func (w *Workflows) GetTotalCount() int { + if w == nil || w.TotalCount == nil { + return 0 + } + return *w.TotalCount +} + +// GetBillable returns the Billable field. +func (w *WorkflowUsage) GetBillable() *WorkflowBillMap { + if w == nil { + return nil + } + return w.Billable +} diff --git a/vendor/github.com/google/go-github/v56/github/github.go b/vendor/github.com/google/go-github/v56/github/github.go new file mode 100644 index 000000000..d71236ef0 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/github.go @@ -0,0 +1,1617 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run gen-accessors.go +//go:generate go run gen-stringify-test.go + +package github + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/google/go-querystring/query" +) + +const ( + Version = "v56.0.0" + + defaultAPIVersion = "2022-11-28" + defaultBaseURL = "https://api.github.com/" + defaultUserAgent = "go-github" + "/" + Version + uploadBaseURL = "https://uploads.github.com/" + + headerAPIVersion = "X-GitHub-Api-Version" + headerRateLimit = "X-RateLimit-Limit" + headerRateRemaining = "X-RateLimit-Remaining" + headerRateReset = "X-RateLimit-Reset" + headerOTP = "X-GitHub-OTP" + headerRetryAfter = "Retry-After" + + headerTokenExpiration = "GitHub-Authentication-Token-Expiration" + + mediaTypeV3 = "application/vnd.github.v3+json" + defaultMediaType = "application/octet-stream" + mediaTypeV3SHA = "application/vnd.github.v3.sha" + mediaTypeV3Diff = "application/vnd.github.v3.diff" + mediaTypeV3Patch = "application/vnd.github.v3.patch" + mediaTypeOrgPermissionRepo = "application/vnd.github.v3.repository+json" + mediaTypeIssueImportAPI = "application/vnd.github.golden-comet-preview+json" + + // Media Type values to access preview APIs + // These media types will be added to the API request as headers + // and used to enable particular features on GitHub API that are still in preview. + // After some time, specific media types will be promoted (to a "stable" state). + // From then on, the preview headers are not required anymore to activate the additional + // feature on GitHub.com's API. However, this API header might still be needed for users + // to run a GitHub Enterprise Server on-premise. + // It's not uncommon for GitHub Enterprise Server customers to run older versions which + // would probably rely on the preview headers for some time. + // While the header promotion is going out for GitHub.com, it may be some time before it + // even arrives in GitHub Enterprise Server. + // We keep those preview headers around to avoid breaking older GitHub Enterprise Server + // versions. Additionally, non-functional (preview) headers don't create any side effects + // on GitHub Cloud version. + // + // See https://github.com/google/go-github/pull/2125 for full context. + + // https://developer.github.com/changes/2014-12-09-new-attributes-for-stars-api/ + mediaTypeStarringPreview = "application/vnd.github.v3.star+json" + + // https://help.github.com/enterprise/2.4/admin/guides/migrations/exporting-the-github-com-organization-s-repositories/ + mediaTypeMigrationsPreview = "application/vnd.github.wyandotte-preview+json" + + // https://developer.github.com/changes/2016-04-06-deployment-and-deployment-status-enhancements/ + mediaTypeDeploymentStatusPreview = "application/vnd.github.ant-man-preview+json" + + // https://developer.github.com/changes/2018-10-16-deployments-environments-states-and-auto-inactive-updates/ + mediaTypeExpandDeploymentStatusPreview = "application/vnd.github.flash-preview+json" + + // https://developer.github.com/changes/2016-05-12-reactions-api-preview/ + mediaTypeReactionsPreview = "application/vnd.github.squirrel-girl-preview" + + // https://developer.github.com/changes/2016-05-23-timeline-preview-api/ + mediaTypeTimelinePreview = "application/vnd.github.mockingbird-preview+json" + + // https://developer.github.com/changes/2016-09-14-projects-api/ + mediaTypeProjectsPreview = "application/vnd.github.inertia-preview+json" + + // https://developer.github.com/changes/2017-01-05-commit-search-api/ + mediaTypeCommitSearchPreview = "application/vnd.github.cloak-preview+json" + + // https://developer.github.com/changes/2017-02-28-user-blocking-apis-and-webhook/ + mediaTypeBlockUsersPreview = "application/vnd.github.giant-sentry-fist-preview+json" + + // https://developer.github.com/changes/2017-05-23-coc-api/ + mediaTypeCodesOfConductPreview = "application/vnd.github.scarlet-witch-preview+json" + + // https://developer.github.com/changes/2017-07-17-update-topics-on-repositories/ + mediaTypeTopicsPreview = "application/vnd.github.mercy-preview+json" + + // https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ + mediaTypeRequiredApprovingReviewsPreview = "application/vnd.github.luke-cage-preview+json" + + // https://developer.github.com/changes/2018-05-07-new-checks-api-public-beta/ + mediaTypeCheckRunsPreview = "application/vnd.github.antiope-preview+json" + + // https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/ + mediaTypePreReceiveHooksPreview = "application/vnd.github.eye-scream-preview" + + // https://developer.github.com/changes/2018-02-22-protected-branches-required-signatures/ + mediaTypeSignaturePreview = "application/vnd.github.zzzax-preview+json" + + // https://developer.github.com/changes/2018-09-05-project-card-events/ + mediaTypeProjectCardDetailsPreview = "application/vnd.github.starfox-preview+json" + + // https://developer.github.com/changes/2018-12-18-interactions-preview/ + mediaTypeInteractionRestrictionsPreview = "application/vnd.github.sombra-preview+json" + + // https://developer.github.com/changes/2019-03-14-enabling-disabling-pages/ + mediaTypeEnablePagesAPIPreview = "application/vnd.github.switcheroo-preview+json" + + // https://developer.github.com/changes/2019-04-24-vulnerability-alerts/ + mediaTypeRequiredVulnerabilityAlertsPreview = "application/vnd.github.dorian-preview+json" + + // https://developer.github.com/changes/2019-05-29-update-branch-api/ + mediaTypeUpdatePullRequestBranchPreview = "application/vnd.github.lydian-preview+json" + + // https://developer.github.com/changes/2019-04-11-pulls-branches-for-commit/ + mediaTypeListPullsOrBranchesForCommitPreview = "application/vnd.github.groot-preview+json" + + // https://docs.github.com/en/rest/previews/#repository-creation-permissions + mediaTypeMemberAllowedRepoCreationTypePreview = "application/vnd.github.surtur-preview+json" + + // https://docs.github.com/en/rest/previews/#create-and-use-repository-templates + mediaTypeRepositoryTemplatePreview = "application/vnd.github.baptiste-preview+json" + + // https://developer.github.com/changes/2019-10-03-multi-line-comments/ + mediaTypeMultiLineCommentsPreview = "application/vnd.github.comfort-fade-preview+json" + + // https://developer.github.com/changes/2019-11-05-deprecated-passwords-and-authorizations-api/ + mediaTypeOAuthAppPreview = "application/vnd.github.doctor-strange-preview+json" + + // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/ + mediaTypeRepositoryVisibilityPreview = "application/vnd.github.nebula-preview+json" + + // https://developer.github.com/changes/2018-12-10-content-attachments-api/ + mediaTypeContentAttachmentsPreview = "application/vnd.github.corsair-preview+json" +) + +var errNonNilContext = errors.New("context must be non-nil") + +// A Client manages communication with the GitHub API. +type Client struct { + clientMu sync.Mutex // clientMu protects the client during calls that modify the CheckRedirect func. + client *http.Client // HTTP client used to communicate with the API. + + // Base URL for API requests. Defaults to the public GitHub API, but can be + // set to a domain endpoint to use with GitHub Enterprise. BaseURL should + // always be specified with a trailing slash. + BaseURL *url.URL + + // Base URL for uploading files. + UploadURL *url.URL + + // User agent used when communicating with the GitHub API. + UserAgent string + + rateMu sync.Mutex + rateLimits [categories]Rate // Rate limits for the client as determined by the most recent API calls. + secondaryRateLimitReset time.Time // Secondary rate limit reset for the client as determined by the most recent API calls. + + common service // Reuse a single struct instead of allocating one for each service on the heap. + + // Services used for talking to different parts of the GitHub API. + Actions *ActionsService + Activity *ActivityService + Admin *AdminService + Apps *AppsService + Authorizations *AuthorizationsService + Billing *BillingService + Checks *ChecksService + CodeScanning *CodeScanningService + CodesOfConduct *CodesOfConductService + Codespaces *CodespacesService + Dependabot *DependabotService + DependencyGraph *DependencyGraphService + Emojis *EmojisService + Enterprise *EnterpriseService + Gists *GistsService + Git *GitService + Gitignores *GitignoresService + Interactions *InteractionsService + IssueImport *IssueImportService + Issues *IssuesService + Licenses *LicensesService + Markdown *MarkdownService + Marketplace *MarketplaceService + Meta *MetaService + Migrations *MigrationService + Organizations *OrganizationsService + Projects *ProjectsService + PullRequests *PullRequestsService + Reactions *ReactionsService + Repositories *RepositoriesService + SCIM *SCIMService + Search *SearchService + SecretScanning *SecretScanningService + SecurityAdvisories *SecurityAdvisoriesService + Teams *TeamsService + Users *UsersService +} + +type service struct { + client *Client +} + +// Client returns the http.Client used by this GitHub client. +func (c *Client) Client() *http.Client { + c.clientMu.Lock() + defer c.clientMu.Unlock() + clientCopy := *c.client + return &clientCopy +} + +// ListOptions specifies the optional parameters to various List methods that +// support offset pagination. +type ListOptions struct { + // For paginated result sets, page of results to retrieve. + Page int `url:"page,omitempty"` + + // For paginated result sets, the number of results to include per page. + PerPage int `url:"per_page,omitempty"` +} + +// ListCursorOptions specifies the optional parameters to various List methods that +// support cursor pagination. +type ListCursorOptions struct { + // For paginated result sets, page of results to retrieve. + Page string `url:"page,omitempty"` + + // For paginated result sets, the number of results to include per page. + PerPage int `url:"per_page,omitempty"` + + // For paginated result sets, the number of results per page (max 100), starting from the first matching result. + // This parameter must not be used in combination with last. + First int `url:"first,omitempty"` + + // For paginated result sets, the number of results per page (max 100), starting from the last matching result. + // This parameter must not be used in combination with first. + Last int `url:"last,omitempty"` + + // A cursor, as given in the Link header. If specified, the query only searches for events after this cursor. + After string `url:"after,omitempty"` + + // A cursor, as given in the Link header. If specified, the query only searches for events before this cursor. + Before string `url:"before,omitempty"` + + // A cursor, as given in the Link header. If specified, the query continues the search using this cursor. + Cursor string `url:"cursor,omitempty"` +} + +// UploadOptions specifies the parameters to methods that support uploads. +type UploadOptions struct { + Name string `url:"name,omitempty"` + Label string `url:"label,omitempty"` + MediaType string `url:"-"` +} + +// RawType represents type of raw format of a request instead of JSON. +type RawType uint8 + +const ( + // Diff format. + Diff RawType = 1 + iota + // Patch format. + Patch +) + +// RawOptions specifies parameters when user wants to get raw format of +// a response instead of JSON. +type RawOptions struct { + Type RawType +} + +// addOptions adds the parameters in opts as URL query parameters to s. opts +// must be a struct whose fields may contain "url" tags. +func addOptions(s string, opts interface{}) (string, error) { + v := reflect.ValueOf(opts) + if v.Kind() == reflect.Ptr && v.IsNil() { + return s, nil + } + + u, err := url.Parse(s) + if err != nil { + return s, err + } + + qs, err := query.Values(opts) + if err != nil { + return s, err + } + + u.RawQuery = qs.Encode() + return u.String(), nil +} + +// NewClient returns a new GitHub API client. If a nil httpClient is +// provided, a new http.Client will be used. To use API methods which require +// authentication, either use Client.WithAuthToken or provide NewClient with +// an http.Client that will perform the authentication for you (such as that +// provided by the golang.org/x/oauth2 library). +func NewClient(httpClient *http.Client) *Client { + c := &Client{client: httpClient} + c.initialize() + return c +} + +// WithAuthToken returns a copy of the client configured to use the provided token for the Authorization header. +func (c *Client) WithAuthToken(token string) *Client { + c2 := c.copy() + defer c2.initialize() + transport := c2.client.Transport + if transport == nil { + transport = http.DefaultTransport + } + c2.client.Transport = roundTripperFunc( + func(req *http.Request) (*http.Response, error) { + req = req.Clone(req.Context()) + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) + return transport.RoundTrip(req) + }, + ) + return c2 +} + +// WithEnterpriseURLs returns a copy of the client configured to use the provided base and +// upload URLs. If the base URL does not have the suffix "/api/v3/", it will be added +// automatically. If the upload URL does not have the suffix "/api/uploads", it will be +// added automatically. +// +// Note that WithEnterpriseURLs is a convenience helper only; +// its behavior is equivalent to setting the BaseURL and UploadURL fields. +// +// Another important thing is that by default, the GitHub Enterprise URL format +// should be http(s)://[hostname]/api/v3/ or you will always receive the 406 status code. +// The upload URL format should be http(s)://[hostname]/api/uploads/. +func (c *Client) WithEnterpriseURLs(baseURL, uploadURL string) (*Client, error) { + c2 := c.copy() + defer c2.initialize() + var err error + c2.BaseURL, err = url.Parse(baseURL) + if err != nil { + return nil, err + } + + if !strings.HasSuffix(c2.BaseURL.Path, "/") { + c2.BaseURL.Path += "/" + } + if !strings.HasSuffix(c2.BaseURL.Path, "/api/v3/") && + !strings.HasPrefix(c2.BaseURL.Host, "api.") && + !strings.Contains(c2.BaseURL.Host, ".api.") { + c2.BaseURL.Path += "api/v3/" + } + + c2.UploadURL, err = url.Parse(uploadURL) + if err != nil { + return nil, err + } + + if !strings.HasSuffix(c2.UploadURL.Path, "/") { + c2.UploadURL.Path += "/" + } + if !strings.HasSuffix(c2.UploadURL.Path, "/api/uploads/") && + !strings.HasPrefix(c2.UploadURL.Host, "api.") && + !strings.Contains(c2.UploadURL.Host, ".api.") { + c2.UploadURL.Path += "api/uploads/" + } + return c2, nil +} + +// initialize sets default values and initializes services. +func (c *Client) initialize() { + if c.client == nil { + c.client = &http.Client{} + } + if c.BaseURL == nil { + c.BaseURL, _ = url.Parse(defaultBaseURL) + } + if c.UploadURL == nil { + c.UploadURL, _ = url.Parse(uploadBaseURL) + } + if c.UserAgent == "" { + c.UserAgent = defaultUserAgent + } + c.common.client = c + c.Actions = (*ActionsService)(&c.common) + c.Activity = (*ActivityService)(&c.common) + c.Admin = (*AdminService)(&c.common) + c.Apps = (*AppsService)(&c.common) + c.Authorizations = (*AuthorizationsService)(&c.common) + c.Billing = (*BillingService)(&c.common) + c.Checks = (*ChecksService)(&c.common) + c.CodeScanning = (*CodeScanningService)(&c.common) + c.Codespaces = (*CodespacesService)(&c.common) + c.CodesOfConduct = (*CodesOfConductService)(&c.common) + c.Dependabot = (*DependabotService)(&c.common) + c.DependencyGraph = (*DependencyGraphService)(&c.common) + c.Emojis = (*EmojisService)(&c.common) + c.Enterprise = (*EnterpriseService)(&c.common) + c.Gists = (*GistsService)(&c.common) + c.Git = (*GitService)(&c.common) + c.Gitignores = (*GitignoresService)(&c.common) + c.Interactions = (*InteractionsService)(&c.common) + c.IssueImport = (*IssueImportService)(&c.common) + c.Issues = (*IssuesService)(&c.common) + c.Licenses = (*LicensesService)(&c.common) + c.Markdown = (*MarkdownService)(&c.common) + c.Marketplace = &MarketplaceService{client: c} + c.Meta = (*MetaService)(&c.common) + c.Migrations = (*MigrationService)(&c.common) + c.Organizations = (*OrganizationsService)(&c.common) + c.Projects = (*ProjectsService)(&c.common) + c.PullRequests = (*PullRequestsService)(&c.common) + c.Reactions = (*ReactionsService)(&c.common) + c.Repositories = (*RepositoriesService)(&c.common) + c.SCIM = (*SCIMService)(&c.common) + c.Search = (*SearchService)(&c.common) + c.SecretScanning = (*SecretScanningService)(&c.common) + c.SecurityAdvisories = (*SecurityAdvisoriesService)(&c.common) + c.Teams = (*TeamsService)(&c.common) + c.Users = (*UsersService)(&c.common) +} + +// copy returns a copy of the current client. It must be initialized before use. +func (c *Client) copy() *Client { + c.clientMu.Lock() + // can't use *c here because that would copy mutexes by value. + clone := Client{ + client: c.client, + UserAgent: c.UserAgent, + BaseURL: c.BaseURL, + UploadURL: c.UploadURL, + secondaryRateLimitReset: c.secondaryRateLimitReset, + } + c.clientMu.Unlock() + if clone.client == nil { + clone.client = &http.Client{} + } + c.rateMu.Lock() + copy(clone.rateLimits[:], c.rateLimits[:]) + c.rateMu.Unlock() + return &clone +} + +// NewClientWithEnvProxy enhances NewClient with the HttpProxy env. +func NewClientWithEnvProxy() *Client { + return NewClient(&http.Client{Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}}) +} + +// NewTokenClient returns a new GitHub API client authenticated with the provided token. +// Deprecated: Use NewClient(nil).WithAuthToken(token) instead. +func NewTokenClient(_ context.Context, token string) *Client { + // This always returns a nil error. + return NewClient(nil).WithAuthToken(token) +} + +// NewEnterpriseClient returns a new GitHub API client with provided +// base URL and upload URL (often is your GitHub Enterprise hostname). +// +// Deprecated: Use NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) instead. +func NewEnterpriseClient(baseURL, uploadURL string, httpClient *http.Client) (*Client, error) { + return NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) +} + +// RequestOption represents an option that can modify an http.Request. +type RequestOption func(req *http.Request) + +// WithVersion overrides the GitHub v3 API version for this individual request. +// For more information, see: +// https://github.blog/2022-11-28-to-infinity-and-beyond-enabling-the-future-of-githubs-rest-api-with-api-versioning/ +func WithVersion(version string) RequestOption { + return func(req *http.Request) { + req.Header.Set(headerAPIVersion, version) + } +} + +// NewRequest creates an API request. A relative URL can be provided in urlStr, +// in which case it is resolved relative to the BaseURL of the Client. +// Relative URLs should always be specified without a preceding slash. If +// specified, the value pointed to by body is JSON encoded and included as the +// request body. +func (c *Client) NewRequest(method, urlStr string, body interface{}, opts ...RequestOption) (*http.Request, error) { + if !strings.HasSuffix(c.BaseURL.Path, "/") { + return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) + } + + u, err := c.BaseURL.Parse(urlStr) + if err != nil { + return nil, err + } + + var buf io.ReadWriter + if body != nil { + buf = &bytes.Buffer{} + enc := json.NewEncoder(buf) + enc.SetEscapeHTML(false) + err := enc.Encode(body) + if err != nil { + return nil, err + } + } + + req, err := http.NewRequest(method, u.String(), buf) + if err != nil { + return nil, err + } + + if body != nil { + req.Header.Set("Content-Type", "application/json") + } + req.Header.Set("Accept", mediaTypeV3) + if c.UserAgent != "" { + req.Header.Set("User-Agent", c.UserAgent) + } + req.Header.Set(headerAPIVersion, defaultAPIVersion) + + for _, opt := range opts { + opt(req) + } + + return req, nil +} + +// NewFormRequest creates an API request. A relative URL can be provided in urlStr, +// in which case it is resolved relative to the BaseURL of the Client. +// Relative URLs should always be specified without a preceding slash. +// Body is sent with Content-Type: application/x-www-form-urlencoded. +func (c *Client) NewFormRequest(urlStr string, body io.Reader, opts ...RequestOption) (*http.Request, error) { + if !strings.HasSuffix(c.BaseURL.Path, "/") { + return nil, fmt.Errorf("BaseURL must have a trailing slash, but %q does not", c.BaseURL) + } + + u, err := c.BaseURL.Parse(urlStr) + if err != nil { + return nil, err + } + + req, err := http.NewRequest(http.MethodPost, u.String(), body) + if err != nil { + return nil, err + } + + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("Accept", mediaTypeV3) + if c.UserAgent != "" { + req.Header.Set("User-Agent", c.UserAgent) + } + req.Header.Set(headerAPIVersion, defaultAPIVersion) + + for _, opt := range opts { + opt(req) + } + + return req, nil +} + +// NewUploadRequest creates an upload request. A relative URL can be provided in +// urlStr, in which case it is resolved relative to the UploadURL of the Client. +// Relative URLs should always be specified without a preceding slash. +func (c *Client) NewUploadRequest(urlStr string, reader io.Reader, size int64, mediaType string, opts ...RequestOption) (*http.Request, error) { + if !strings.HasSuffix(c.UploadURL.Path, "/") { + return nil, fmt.Errorf("UploadURL must have a trailing slash, but %q does not", c.UploadURL) + } + u, err := c.UploadURL.Parse(urlStr) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", u.String(), reader) + if err != nil { + return nil, err + } + + req.ContentLength = size + + if mediaType == "" { + mediaType = defaultMediaType + } + req.Header.Set("Content-Type", mediaType) + req.Header.Set("Accept", mediaTypeV3) + req.Header.Set("User-Agent", c.UserAgent) + req.Header.Set(headerAPIVersion, defaultAPIVersion) + + for _, opt := range opts { + opt(req) + } + + return req, nil +} + +// Response is a GitHub API response. This wraps the standard http.Response +// returned from GitHub and provides convenient access to things like +// pagination links. +type Response struct { + *http.Response + + // These fields provide the page values for paginating through a set of + // results. Any or all of these may be set to the zero value for + // responses that are not part of a paginated set, or for which there + // are no additional pages. + // + // These fields support what is called "offset pagination" and should + // be used with the ListOptions struct. + NextPage int + PrevPage int + FirstPage int + LastPage int + + // Additionally, some APIs support "cursor pagination" instead of offset. + // This means that a token points directly to the next record which + // can lead to O(1) performance compared to O(n) performance provided + // by offset pagination. + // + // For APIs that support cursor pagination (such as + // TeamsService.ListIDPGroupsInOrganization), the following field + // will be populated to point to the next page. + // + // To use this token, set ListCursorOptions.Page to this value before + // calling the endpoint again. + NextPageToken string + + // For APIs that support cursor pagination, such as RepositoriesService.ListHookDeliveries, + // the following field will be populated to point to the next page. + // Set ListCursorOptions.Cursor to this value when calling the endpoint again. + Cursor string + + // For APIs that support before/after pagination, such as OrganizationsService.AuditLog. + Before string + After string + + // Explicitly specify the Rate type so Rate's String() receiver doesn't + // propagate to Response. + Rate Rate + + // token's expiration date. Timestamp is 0001-01-01 when token doesn't expire. + // So it is valid for TokenExpiration.Equal(Timestamp{}) or TokenExpiration.Time.After(time.Now()) + TokenExpiration Timestamp +} + +// newResponse creates a new Response for the provided http.Response. +// r must not be nil. +func newResponse(r *http.Response) *Response { + response := &Response{Response: r} + response.populatePageValues() + response.Rate = parseRate(r) + response.TokenExpiration = parseTokenExpiration(r) + return response +} + +// populatePageValues parses the HTTP Link response headers and populates the +// various pagination link values in the Response. +func (r *Response) populatePageValues() { + if links, ok := r.Response.Header["Link"]; ok && len(links) > 0 { + for _, link := range strings.Split(links[0], ",") { + segments := strings.Split(strings.TrimSpace(link), ";") + + // link must at least have href and rel + if len(segments) < 2 { + continue + } + + // ensure href is properly formatted + if !strings.HasPrefix(segments[0], "<") || !strings.HasSuffix(segments[0], ">") { + continue + } + + // try to pull out page parameter + url, err := url.Parse(segments[0][1 : len(segments[0])-1]) + if err != nil { + continue + } + + q := url.Query() + + if cursor := q.Get("cursor"); cursor != "" { + for _, segment := range segments[1:] { + switch strings.TrimSpace(segment) { + case `rel="next"`: + r.Cursor = cursor + } + } + + continue + } + + page := q.Get("page") + since := q.Get("since") + before := q.Get("before") + after := q.Get("after") + + if page == "" && before == "" && after == "" && since == "" { + continue + } + + if since != "" && page == "" { + page = since + } + + for _, segment := range segments[1:] { + switch strings.TrimSpace(segment) { + case `rel="next"`: + if r.NextPage, err = strconv.Atoi(page); err != nil { + r.NextPageToken = page + } + r.After = after + case `rel="prev"`: + r.PrevPage, _ = strconv.Atoi(page) + r.Before = before + case `rel="first"`: + r.FirstPage, _ = strconv.Atoi(page) + case `rel="last"`: + r.LastPage, _ = strconv.Atoi(page) + } + } + } + } +} + +// parseRate parses the rate related headers. +func parseRate(r *http.Response) Rate { + var rate Rate + if limit := r.Header.Get(headerRateLimit); limit != "" { + rate.Limit, _ = strconv.Atoi(limit) + } + if remaining := r.Header.Get(headerRateRemaining); remaining != "" { + rate.Remaining, _ = strconv.Atoi(remaining) + } + if reset := r.Header.Get(headerRateReset); reset != "" { + if v, _ := strconv.ParseInt(reset, 10, 64); v != 0 { + rate.Reset = Timestamp{time.Unix(v, 0)} + } + } + return rate +} + +// parseSecondaryRate parses the secondary rate related headers, +// and returns the time to retry after. +func parseSecondaryRate(r *http.Response) *time.Duration { + // According to GitHub support, the "Retry-After" header value will be + // an integer which represents the number of seconds that one should + // wait before resuming making requests. + if v := r.Header.Get(headerRetryAfter); v != "" { + retryAfterSeconds, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. + retryAfter := time.Duration(retryAfterSeconds) * time.Second + return &retryAfter + } + + // According to GitHub support, endpoints might return x-ratelimit-reset instead, + // as an integer which represents the number of seconds since epoch UTC, + // represting the time to resume making requests. + if v := r.Header.Get(headerRateReset); v != "" { + secondsSinceEpoch, _ := strconv.ParseInt(v, 10, 64) // Error handling is noop. + retryAfter := time.Until(time.Unix(secondsSinceEpoch, 0)) + return &retryAfter + } + + return nil +} + +// parseTokenExpiration parses the TokenExpiration related headers. +// Returns 0001-01-01 if the header is not defined or could not be parsed. +func parseTokenExpiration(r *http.Response) Timestamp { + if v := r.Header.Get(headerTokenExpiration); v != "" { + if t, err := time.Parse("2006-01-02 15:04:05 MST", v); err == nil { + return Timestamp{t.Local()} + } + // Some tokens include the timezone offset instead of the timezone. + // https://github.com/google/go-github/issues/2649 + if t, err := time.Parse("2006-01-02 15:04:05 -0700", v); err == nil { + return Timestamp{t.Local()} + } + } + return Timestamp{} // 0001-01-01 00:00:00 +} + +type requestContext uint8 + +const ( + bypassRateLimitCheck requestContext = iota +) + +// BareDo sends an API request and lets you handle the api response. If an error +// or API Error occurs, the error will contain more information. Otherwise you +// are supposed to read and close the response's Body. If rate limit is exceeded +// and reset time is in the future, BareDo returns *RateLimitError immediately +// without making a network API call. +// +// The provided ctx must be non-nil, if it is nil an error is returned. If it is +// canceled or times out, ctx.Err() will be returned. +func (c *Client) BareDo(ctx context.Context, req *http.Request) (*Response, error) { + if ctx == nil { + return nil, errNonNilContext + } + + req = withContext(ctx, req) + + rateLimitCategory := category(req.Method, req.URL.Path) + + if bypass := ctx.Value(bypassRateLimitCheck); bypass == nil { + // If we've hit rate limit, don't make further requests before Reset time. + if err := c.checkRateLimitBeforeDo(req, rateLimitCategory); err != nil { + return &Response{ + Response: err.Response, + Rate: err.Rate, + }, err + } + // If we've hit a secondary rate limit, don't make further requests before Retry After. + if err := c.checkSecondaryRateLimitBeforeDo(req); err != nil { + return &Response{ + Response: err.Response, + }, err + } + } + + resp, err := c.client.Do(req) + if err != nil { + // If we got an error, and the context has been canceled, + // the context's error is probably more useful. + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + // If the error type is *url.Error, sanitize its URL before returning. + if e, ok := err.(*url.Error); ok { + if url, err := url.Parse(e.URL); err == nil { + e.URL = sanitizeURL(url).String() + return nil, e + } + } + + return nil, err + } + + response := newResponse(resp) + + // Don't update the rate limits if this was a cached response. + // X-From-Cache is set by https://github.com/gregjones/httpcache + if response.Header.Get("X-From-Cache") == "" { + c.rateMu.Lock() + c.rateLimits[rateLimitCategory] = response.Rate + c.rateMu.Unlock() + } + + err = CheckResponse(resp) + if err != nil { + defer resp.Body.Close() + // Special case for AcceptedErrors. If an AcceptedError + // has been encountered, the response's payload will be + // added to the AcceptedError and returned. + // + // Issue #1022 + aerr, ok := err.(*AcceptedError) + if ok { + b, readErr := io.ReadAll(resp.Body) + if readErr != nil { + return response, readErr + } + + aerr.Raw = b + err = aerr + } + + // Update the secondary rate limit if we hit it. + rerr, ok := err.(*AbuseRateLimitError) + if ok && rerr.RetryAfter != nil { + c.rateMu.Lock() + c.secondaryRateLimitReset = time.Now().Add(*rerr.RetryAfter) + c.rateMu.Unlock() + } + } + return response, err +} + +// Do sends an API request and returns the API response. The API response is +// JSON decoded and stored in the value pointed to by v, or returned as an +// error if an API error has occurred. If v implements the io.Writer interface, +// the raw response body will be written to v, without attempting to first +// decode it. If v is nil, and no error hapens, the response is returned as is. +// If rate limit is exceeded and reset time is in the future, Do returns +// *RateLimitError immediately without making a network API call. +// +// The provided ctx must be non-nil, if it is nil an error is returned. If it +// is canceled or times out, ctx.Err() will be returned. +func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) { + resp, err := c.BareDo(ctx, req) + if err != nil { + return resp, err + } + defer resp.Body.Close() + + switch v := v.(type) { + case nil: + case io.Writer: + _, err = io.Copy(v, resp.Body) + default: + decErr := json.NewDecoder(resp.Body).Decode(v) + if decErr == io.EOF { + decErr = nil // ignore EOF errors caused by empty response body + } + if decErr != nil { + err = decErr + } + } + return resp, err +} + +// checkRateLimitBeforeDo does not make any network calls, but uses existing knowledge from +// current client state in order to quickly check if *RateLimitError can be immediately returned +// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. +// Otherwise it returns nil, and Client.Do should proceed normally. +func (c *Client) checkRateLimitBeforeDo(req *http.Request, rateLimitCategory rateLimitCategory) *RateLimitError { + c.rateMu.Lock() + rate := c.rateLimits[rateLimitCategory] + c.rateMu.Unlock() + if !rate.Reset.Time.IsZero() && rate.Remaining == 0 && time.Now().Before(rate.Reset.Time) { + // Create a fake response. + resp := &http.Response{ + Status: http.StatusText(http.StatusForbidden), + StatusCode: http.StatusForbidden, + Request: req, + Header: make(http.Header), + Body: io.NopCloser(strings.NewReader("")), + } + return &RateLimitError{ + Rate: rate, + Response: resp, + Message: fmt.Sprintf("API rate limit of %v still exceeded until %v, not making remote request.", rate.Limit, rate.Reset.Time), + } + } + + return nil +} + +// checkSecondaryRateLimitBeforeDo does not make any network calls, but uses existing knowledge from +// current client state in order to quickly check if *AbuseRateLimitError can be immediately returned +// from Client.Do, and if so, returns it so that Client.Do can skip making a network API call unnecessarily. +// Otherwise it returns nil, and Client.Do should proceed normally. +func (c *Client) checkSecondaryRateLimitBeforeDo(req *http.Request) *AbuseRateLimitError { + c.rateMu.Lock() + secondary := c.secondaryRateLimitReset + c.rateMu.Unlock() + if !secondary.IsZero() && time.Now().Before(secondary) { + // Create a fake response. + resp := &http.Response{ + Status: http.StatusText(http.StatusForbidden), + StatusCode: http.StatusForbidden, + Request: req, + Header: make(http.Header), + Body: io.NopCloser(strings.NewReader("")), + } + + retryAfter := time.Until(secondary) + return &AbuseRateLimitError{ + Response: resp, + Message: fmt.Sprintf("API secondary rate limit exceeded until %v, not making remote request.", secondary), + RetryAfter: &retryAfter, + } + } + + return nil +} + +// compareHTTPResponse returns whether two http.Response objects are equal or not. +// Currently, only StatusCode is checked. This function is used when implementing the +// Is(error) bool interface for the custom error types in this package. +func compareHTTPResponse(r1, r2 *http.Response) bool { + if r1 == nil && r2 == nil { + return true + } + + if r1 != nil && r2 != nil { + return r1.StatusCode == r2.StatusCode + } + return false +} + +/* +An ErrorResponse reports one or more errors caused by an API request. + +GitHub API docs: https://docs.github.com/en/rest/#client-errors +*/ +type ErrorResponse struct { + Response *http.Response `json:"-"` // HTTP response that caused this error + Message string `json:"message"` // error message + Errors []Error `json:"errors"` // more detail on individual errors + // Block is only populated on certain types of errors such as code 451. + Block *ErrorBlock `json:"block,omitempty"` + // Most errors will also include a documentation_url field pointing + // to some content that might help you resolve the error, see + // https://docs.github.com/en/rest/#client-errors + DocumentationURL string `json:"documentation_url,omitempty"` +} + +// ErrorBlock contains a further explanation for the reason of an error. +// See https://developer.github.com/changes/2016-03-17-the-451-status-code-is-now-supported/ +// for more information. +type ErrorBlock struct { + Reason string `json:"reason,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` +} + +func (r *ErrorResponse) Error() string { + return fmt.Sprintf("%v %v: %d %v %+v", + r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), + r.Response.StatusCode, r.Message, r.Errors) +} + +// Is returns whether the provided error equals this error. +func (r *ErrorResponse) Is(target error) bool { + v, ok := target.(*ErrorResponse) + if !ok { + return false + } + + if r.Message != v.Message || (r.DocumentationURL != v.DocumentationURL) || + !compareHTTPResponse(r.Response, v.Response) { + return false + } + + // Compare Errors. + if len(r.Errors) != len(v.Errors) { + return false + } + for idx := range r.Errors { + if r.Errors[idx] != v.Errors[idx] { + return false + } + } + + // Compare Block. + if (r.Block != nil && v.Block == nil) || (r.Block == nil && v.Block != nil) { + return false + } + if r.Block != nil && v.Block != nil { + if r.Block.Reason != v.Block.Reason { + return false + } + if (r.Block.CreatedAt != nil && v.Block.CreatedAt == nil) || (r.Block.CreatedAt == + nil && v.Block.CreatedAt != nil) { + return false + } + if r.Block.CreatedAt != nil && v.Block.CreatedAt != nil { + if *(r.Block.CreatedAt) != *(v.Block.CreatedAt) { + return false + } + } + } + + return true +} + +// TwoFactorAuthError occurs when using HTTP Basic Authentication for a user +// that has two-factor authentication enabled. The request can be reattempted +// by providing a one-time password in the request. +type TwoFactorAuthError ErrorResponse + +func (r *TwoFactorAuthError) Error() string { return (*ErrorResponse)(r).Error() } + +// RateLimitError occurs when GitHub returns 403 Forbidden response with a rate limit +// remaining value of 0. +type RateLimitError struct { + Rate Rate // Rate specifies last known rate limit for the client + Response *http.Response // HTTP response that caused this error + Message string `json:"message"` // error message +} + +func (r *RateLimitError) Error() string { + return fmt.Sprintf("%v %v: %d %v %v", + r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), + r.Response.StatusCode, r.Message, formatRateReset(time.Until(r.Rate.Reset.Time))) +} + +// Is returns whether the provided error equals this error. +func (r *RateLimitError) Is(target error) bool { + v, ok := target.(*RateLimitError) + if !ok { + return false + } + + return r.Rate == v.Rate && + r.Message == v.Message && + compareHTTPResponse(r.Response, v.Response) +} + +// AcceptedError occurs when GitHub returns 202 Accepted response with an +// empty body, which means a job was scheduled on the GitHub side to process +// the information needed and cache it. +// Technically, 202 Accepted is not a real error, it's just used to +// indicate that results are not ready yet, but should be available soon. +// The request can be repeated after some time. +type AcceptedError struct { + // Raw contains the response body. + Raw []byte +} + +func (*AcceptedError) Error() string { + return "job scheduled on GitHub side; try again later" +} + +// Is returns whether the provided error equals this error. +func (ae *AcceptedError) Is(target error) bool { + v, ok := target.(*AcceptedError) + if !ok { + return false + } + return bytes.Equal(ae.Raw, v.Raw) +} + +// AbuseRateLimitError occurs when GitHub returns 403 Forbidden response with the +// "documentation_url" field value equal to "https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits". +type AbuseRateLimitError struct { + Response *http.Response // HTTP response that caused this error + Message string `json:"message"` // error message + + // RetryAfter is provided with some abuse rate limit errors. If present, + // it is the amount of time that the client should wait before retrying. + // Otherwise, the client should try again later (after an unspecified amount of time). + RetryAfter *time.Duration +} + +func (r *AbuseRateLimitError) Error() string { + return fmt.Sprintf("%v %v: %d %v", + r.Response.Request.Method, sanitizeURL(r.Response.Request.URL), + r.Response.StatusCode, r.Message) +} + +// Is returns whether the provided error equals this error. +func (r *AbuseRateLimitError) Is(target error) bool { + v, ok := target.(*AbuseRateLimitError) + if !ok { + return false + } + + return r.Message == v.Message && + r.RetryAfter == v.RetryAfter && + compareHTTPResponse(r.Response, v.Response) +} + +// sanitizeURL redacts the client_secret parameter from the URL which may be +// exposed to the user. +func sanitizeURL(uri *url.URL) *url.URL { + if uri == nil { + return nil + } + params := uri.Query() + if len(params.Get("client_secret")) > 0 { + params.Set("client_secret", "REDACTED") + uri.RawQuery = params.Encode() + } + return uri +} + +/* +An Error reports more details on an individual error in an ErrorResponse. +These are the possible validation error codes: + + missing: + resource does not exist + missing_field: + a required field on a resource has not been set + invalid: + the formatting of a field is invalid + already_exists: + another resource has the same valid as this field + custom: + some resources return this (e.g. github.User.CreateKey()), additional + information is set in the Message field of the Error + +GitHub error responses structure are often undocumented and inconsistent. +Sometimes error is just a simple string (Issue #540). +In such cases, Message represents an error message as a workaround. + +GitHub API docs: https://docs.github.com/en/rest/#client-errors +*/ +type Error struct { + Resource string `json:"resource"` // resource on which the error occurred + Field string `json:"field"` // field on which the error occurred + Code string `json:"code"` // validation error code + Message string `json:"message"` // Message describing the error. Errors with Code == "custom" will always have this set. +} + +func (e *Error) Error() string { + return fmt.Sprintf("%v error caused by %v field on %v resource", + e.Code, e.Field, e.Resource) +} + +func (e *Error) UnmarshalJSON(data []byte) error { + type aliasError Error // avoid infinite recursion by using type alias. + if err := json.Unmarshal(data, (*aliasError)(e)); err != nil { + return json.Unmarshal(data, &e.Message) // data can be json string. + } + return nil +} + +// CheckResponse checks the API response for errors, and returns them if +// present. A response is considered an error if it has a status code outside +// the 200 range or equal to 202 Accepted. +// API error responses are expected to have response +// body, and a JSON response body that maps to ErrorResponse. +// +// The error type will be *RateLimitError for rate limit exceeded errors, +// *AcceptedError for 202 Accepted status codes, +// and *TwoFactorAuthError for two-factor authentication errors. +func CheckResponse(r *http.Response) error { + if r.StatusCode == http.StatusAccepted { + return &AcceptedError{} + } + if c := r.StatusCode; 200 <= c && c <= 299 { + return nil + } + + errorResponse := &ErrorResponse{Response: r} + data, err := io.ReadAll(r.Body) + if err == nil && data != nil { + err = json.Unmarshal(data, errorResponse) + if err != nil { + // reset the response as if this never happened + errorResponse = &ErrorResponse{Response: r} + } + } + // Re-populate error response body because GitHub error responses are often + // undocumented and inconsistent. + // Issue #1136, #540. + r.Body = io.NopCloser(bytes.NewBuffer(data)) + switch { + case r.StatusCode == http.StatusUnauthorized && strings.HasPrefix(r.Header.Get(headerOTP), "required"): + return (*TwoFactorAuthError)(errorResponse) + case r.StatusCode == http.StatusForbidden && r.Header.Get(headerRateRemaining) == "0": + return &RateLimitError{ + Rate: parseRate(r), + Response: errorResponse.Response, + Message: errorResponse.Message, + } + case r.StatusCode == http.StatusForbidden && + (strings.HasSuffix(errorResponse.DocumentationURL, "#abuse-rate-limits") || + strings.HasSuffix(errorResponse.DocumentationURL, "#secondary-rate-limits")): + abuseRateLimitError := &AbuseRateLimitError{ + Response: errorResponse.Response, + Message: errorResponse.Message, + } + if retryAfter := parseSecondaryRate(r); retryAfter != nil { + abuseRateLimitError.RetryAfter = retryAfter + } + return abuseRateLimitError + default: + return errorResponse + } +} + +// parseBoolResponse determines the boolean result from a GitHub API response. +// Several GitHub API methods return boolean responses indicated by the HTTP +// status code in the response (true indicated by a 204, false indicated by a +// 404). This helper function will determine that result and hide the 404 +// error if present. Any other error will be returned through as-is. +func parseBoolResponse(err error) (bool, error) { + if err == nil { + return true, nil + } + + if err, ok := err.(*ErrorResponse); ok && err.Response.StatusCode == http.StatusNotFound { + // Simply false. In this one case, we do not pass the error through. + return false, nil + } + + // some other real error occurred + return false, err +} + +// Rate represents the rate limit for the current client. +type Rate struct { + // The number of requests per hour the client is currently limited to. + Limit int `json:"limit"` + + // The number of remaining requests the client can make this hour. + Remaining int `json:"remaining"` + + // The time at which the current rate limit will reset. + Reset Timestamp `json:"reset"` +} + +func (r Rate) String() string { + return Stringify(r) +} + +// RateLimits represents the rate limits for the current client. +type RateLimits struct { + // The rate limit for non-search API requests. Unauthenticated + // requests are limited to 60 per hour. Authenticated requests are + // limited to 5,000 per hour. + // + // GitHub API docs: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting + Core *Rate `json:"core"` + + // The rate limit for search API requests. Unauthenticated requests + // are limited to 10 requests per minutes. Authenticated requests are + // limited to 30 per minute. + // + // GitHub API docs: https://docs.github.com/en/rest/search#rate-limit + Search *Rate `json:"search"` + + // GitHub API docs: https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit + GraphQL *Rate `json:"graphql"` + + // GitHub API dos: https://docs.github.com/en/rest/rate-limit + IntegrationManifest *Rate `json:"integration_manifest"` + + SourceImport *Rate `json:"source_import"` + CodeScanningUpload *Rate `json:"code_scanning_upload"` + ActionsRunnerRegistration *Rate `json:"actions_runner_registration"` + SCIM *Rate `json:"scim"` +} + +func (r RateLimits) String() string { + return Stringify(r) +} + +type rateLimitCategory uint8 + +const ( + coreCategory rateLimitCategory = iota + searchCategory + graphqlCategory + integrationManifestCategory + sourceImportCategory + codeScanningUploadCategory + actionsRunnerRegistrationCategory + scimCategory + + categories // An array of this length will be able to contain all rate limit categories. +) + +// category returns the rate limit category of the endpoint, determined by HTTP method and Request.URL.Path. +func category(method, path string) rateLimitCategory { + switch { + // https://docs.github.com/en/rest/rate-limit#about-rate-limits + default: + // NOTE: coreCategory is returned for actionsRunnerRegistrationCategory too, + // because no API found for this category. + return coreCategory + case strings.HasPrefix(path, "/search/"): + return searchCategory + case path == "/graphql": + return graphqlCategory + case strings.HasPrefix(path, "/app-manifests/") && + strings.HasSuffix(path, "/conversions") && + method == http.MethodPost: + return integrationManifestCategory + + // https://docs.github.com/en/rest/migrations/source-imports#start-an-import + case strings.HasPrefix(path, "/repos/") && + strings.HasSuffix(path, "/import") && + method == http.MethodPut: + return sourceImportCategory + + // https://docs.github.com/en/rest/code-scanning#upload-an-analysis-as-sarif-data + case strings.HasSuffix(path, "/code-scanning/sarifs"): + return codeScanningUploadCategory + + // https://docs.github.com/en/enterprise-cloud@latest/rest/scim + case strings.HasPrefix(path, "/scim/"): + return scimCategory + } +} + +// RateLimits returns the rate limits for the current client. +func (c *Client) RateLimits(ctx context.Context) (*RateLimits, *Response, error) { + req, err := c.NewRequest("GET", "rate_limit", nil) + if err != nil { + return nil, nil, err + } + + response := new(struct { + Resources *RateLimits `json:"resources"` + }) + + // This resource is not subject to rate limits. + ctx = context.WithValue(ctx, bypassRateLimitCheck, true) + resp, err := c.Do(ctx, req, response) + if err != nil { + return nil, resp, err + } + + if response.Resources != nil { + c.rateMu.Lock() + if response.Resources.Core != nil { + c.rateLimits[coreCategory] = *response.Resources.Core + } + if response.Resources.Search != nil { + c.rateLimits[searchCategory] = *response.Resources.Search + } + if response.Resources.GraphQL != nil { + c.rateLimits[graphqlCategory] = *response.Resources.GraphQL + } + if response.Resources.IntegrationManifest != nil { + c.rateLimits[integrationManifestCategory] = *response.Resources.IntegrationManifest + } + if response.Resources.SourceImport != nil { + c.rateLimits[sourceImportCategory] = *response.Resources.SourceImport + } + if response.Resources.CodeScanningUpload != nil { + c.rateLimits[codeScanningUploadCategory] = *response.Resources.CodeScanningUpload + } + if response.Resources.ActionsRunnerRegistration != nil { + c.rateLimits[actionsRunnerRegistrationCategory] = *response.Resources.ActionsRunnerRegistration + } + if response.Resources.SCIM != nil { + c.rateLimits[scimCategory] = *response.Resources.SCIM + } + c.rateMu.Unlock() + } + + return response.Resources, resp, nil +} + +func setCredentialsAsHeaders(req *http.Request, id, secret string) *http.Request { + // To set extra headers, we must make a copy of the Request so + // that we don't modify the Request we were given. This is required by the + // specification of http.RoundTripper. + // + // Since we are going to modify only req.Header here, we only need a deep copy + // of req.Header. + convertedRequest := new(http.Request) + *convertedRequest = *req + convertedRequest.Header = make(http.Header, len(req.Header)) + + for k, s := range req.Header { + convertedRequest.Header[k] = append([]string(nil), s...) + } + convertedRequest.SetBasicAuth(id, secret) + return convertedRequest +} + +/* +UnauthenticatedRateLimitedTransport allows you to make unauthenticated calls +that need to use a higher rate limit associated with your OAuth application. + + t := &github.UnauthenticatedRateLimitedTransport{ + ClientID: "your app's client ID", + ClientSecret: "your app's client secret", + } + client := github.NewClient(t.Client()) + +This will add the client id and secret as a base64-encoded string in the format +ClientID:ClientSecret and apply it as an "Authorization": "Basic" header. + +See https://docs.github.com/en/rest/#unauthenticated-rate-limited-requests for +more information. +*/ +type UnauthenticatedRateLimitedTransport struct { + // ClientID is the GitHub OAuth client ID of the current application, which + // can be found by selecting its entry in the list at + // https://github.com/settings/applications. + ClientID string + + // ClientSecret is the GitHub OAuth client secret of the current + // application. + ClientSecret string + + // Transport is the underlying HTTP transport to use when making requests. + // It will default to http.DefaultTransport if nil. + Transport http.RoundTripper +} + +// RoundTrip implements the RoundTripper interface. +func (t *UnauthenticatedRateLimitedTransport) RoundTrip(req *http.Request) (*http.Response, error) { + if t.ClientID == "" { + return nil, errors.New("t.ClientID is empty") + } + if t.ClientSecret == "" { + return nil, errors.New("t.ClientSecret is empty") + } + + req2 := setCredentialsAsHeaders(req, t.ClientID, t.ClientSecret) + // Make the HTTP request. + return t.transport().RoundTrip(req2) +} + +// Client returns an *http.Client that makes requests which are subject to the +// rate limit of your OAuth application. +func (t *UnauthenticatedRateLimitedTransport) Client() *http.Client { + return &http.Client{Transport: t} +} + +func (t *UnauthenticatedRateLimitedTransport) transport() http.RoundTripper { + if t.Transport != nil { + return t.Transport + } + return http.DefaultTransport +} + +// BasicAuthTransport is an http.RoundTripper that authenticates all requests +// using HTTP Basic Authentication with the provided username and password. It +// additionally supports users who have two-factor authentication enabled on +// their GitHub account. +type BasicAuthTransport struct { + Username string // GitHub username + Password string // GitHub password + OTP string // one-time password for users with two-factor auth enabled + + // Transport is the underlying HTTP transport to use when making requests. + // It will default to http.DefaultTransport if nil. + Transport http.RoundTripper +} + +// RoundTrip implements the RoundTripper interface. +func (t *BasicAuthTransport) RoundTrip(req *http.Request) (*http.Response, error) { + req2 := setCredentialsAsHeaders(req, t.Username, t.Password) + if t.OTP != "" { + req2.Header.Set(headerOTP, t.OTP) + } + return t.transport().RoundTrip(req2) +} + +// Client returns an *http.Client that makes requests that are authenticated +// using HTTP Basic Authentication. +func (t *BasicAuthTransport) Client() *http.Client { + return &http.Client{Transport: t} +} + +func (t *BasicAuthTransport) transport() http.RoundTripper { + if t.Transport != nil { + return t.Transport + } + return http.DefaultTransport +} + +// formatRateReset formats d to look like "[rate reset in 2s]" or +// "[rate reset in 87m02s]" for the positive durations. And like "[rate limit was reset 87m02s ago]" +// for the negative cases. +func formatRateReset(d time.Duration) string { + isNegative := d < 0 + if isNegative { + d *= -1 + } + secondsTotal := int(0.5 + d.Seconds()) + minutes := secondsTotal / 60 + seconds := secondsTotal - minutes*60 + + var timeString string + if minutes > 0 { + timeString = fmt.Sprintf("%dm%02ds", minutes, seconds) + } else { + timeString = fmt.Sprintf("%ds", seconds) + } + + if isNegative { + return fmt.Sprintf("[rate limit was reset %v ago]", timeString) + } + return fmt.Sprintf("[rate reset in %v]", timeString) +} + +// When using roundTripWithOptionalFollowRedirect, note that it +// is the responsibility of the caller to close the response body. +func (c *Client) roundTripWithOptionalFollowRedirect(ctx context.Context, u string, maxRedirects int, opts ...RequestOption) (*http.Response, error) { + req, err := c.NewRequest("GET", u, nil, opts...) + if err != nil { + return nil, err + } + + var resp *http.Response + // Use http.DefaultTransport if no custom Transport is configured + req = withContext(ctx, req) + if c.client.Transport == nil { + resp, err = http.DefaultTransport.RoundTrip(req) + } else { + resp, err = c.client.Transport.RoundTrip(req) + } + if err != nil { + return nil, err + } + + // If redirect response is returned, follow it + if maxRedirects > 0 && resp.StatusCode == http.StatusMovedPermanently { + _ = resp.Body.Close() + u = resp.Header.Get("Location") + resp, err = c.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects-1, opts...) + } + return resp, err +} + +// Bool is a helper routine that allocates a new bool value +// to store v and returns a pointer to it. +func Bool(v bool) *bool { return &v } + +// Int is a helper routine that allocates a new int value +// to store v and returns a pointer to it. +func Int(v int) *int { return &v } + +// Int64 is a helper routine that allocates a new int64 value +// to store v and returns a pointer to it. +func Int64(v int64) *int64 { return &v } + +// String is a helper routine that allocates a new string value +// to store v and returns a pointer to it. +func String(v string) *string { return &v } + +// roundTripperFunc creates a RoundTripper (transport) +type roundTripperFunc func(*http.Request) (*http.Response, error) + +func (fn roundTripperFunc) RoundTrip(r *http.Request) (*http.Response, error) { + return fn(r) +} diff --git a/vendor/github.com/google/go-github/v56/github/gitignore.go b/vendor/github.com/google/go-github/v56/github/gitignore.go new file mode 100644 index 000000000..a20a868b4 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/gitignore.go @@ -0,0 +1,64 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GitignoresService provides access to the gitignore related functions in the +// GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/gitignore/ +type GitignoresService service + +// Gitignore represents a .gitignore file as returned by the GitHub API. +type Gitignore struct { + Name *string `json:"name,omitempty"` + Source *string `json:"source,omitempty"` +} + +func (g Gitignore) String() string { + return Stringify(g) +} + +// List all available Gitignore templates. +// +// GitHub API docs: https://docs.github.com/en/rest/gitignore/#listing-available-templates +func (s *GitignoresService) List(ctx context.Context) ([]string, *Response, error) { + req, err := s.client.NewRequest("GET", "gitignore/templates", nil) + if err != nil { + return nil, nil, err + } + + var availableTemplates []string + resp, err := s.client.Do(ctx, req, &availableTemplates) + if err != nil { + return nil, resp, err + } + + return availableTemplates, resp, nil +} + +// Get a Gitignore by name. +// +// GitHub API docs: https://docs.github.com/en/rest/gitignore#get-a-gitignore-template +func (s *GitignoresService) Get(ctx context.Context, name string) (*Gitignore, *Response, error) { + u := fmt.Sprintf("gitignore/templates/%v", name) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + gitignore := new(Gitignore) + resp, err := s.client.Do(ctx, req, gitignore) + if err != nil { + return nil, resp, err + } + + return gitignore, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/interactions.go b/vendor/github.com/google/go-github/v56/github/interactions.go new file mode 100644 index 000000000..a690f6126 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/interactions.go @@ -0,0 +1,28 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// InteractionsService handles communication with the repository and organization related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/ +type InteractionsService service + +// InteractionRestriction represents the interaction restrictions for repository and organization. +type InteractionRestriction struct { + // Specifies the group of GitHub users who can + // comment, open issues, or create pull requests for the given repository. + // Possible values are: "existing_users", "contributors_only" and "collaborators_only". + Limit *string `json:"limit,omitempty"` + + // Origin specifies the type of the resource to interact with. + // Possible values are: "repository" and "organization". + Origin *string `json:"origin,omitempty"` + + // ExpiresAt specifies the time after which the interaction restrictions expire. + // The default expiry time is 24 hours from the time restriction is created. + ExpiresAt *Timestamp `json:"expires_at,omitempty"` +} diff --git a/vendor/github.com/google/go-github/v56/github/interactions_orgs.go b/vendor/github.com/google/go-github/v56/github/interactions_orgs.go new file mode 100644 index 000000000..5c7663f58 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/interactions_orgs.go @@ -0,0 +1,80 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetRestrictionsForOrg fetches the interaction restrictions for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/orgs#get-interaction-restrictions-for-an-organization +func (s *InteractionsService) GetRestrictionsForOrg(ctx context.Context, organization string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + organizationInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, organizationInteractions) + if err != nil { + return nil, resp, err + } + + return organizationInteractions, resp, nil +} + +// UpdateRestrictionsForOrg adds or updates the interaction restrictions for an organization. +// +// limit specifies the group of GitHub users who can comment, open issues, or create pull requests +// in public repositories for the given organization. +// Possible values are: "existing_users", "contributors_only", "collaborators_only". +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/orgs#set-interaction-restrictions-for-an-organization +func (s *InteractionsService) UpdateRestrictionsForOrg(ctx context.Context, organization, limit string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + + interaction := &InteractionRestriction{Limit: String(limit)} + + req, err := s.client.NewRequest("PUT", u, interaction) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + organizationInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, organizationInteractions) + if err != nil { + return nil, resp, err + } + + return organizationInteractions, resp, nil +} + +// RemoveRestrictionsFromOrg removes the interaction restrictions for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/orgs#remove-interaction-restrictions-for-an-organization +func (s *InteractionsService) RemoveRestrictionsFromOrg(ctx context.Context, organization string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/interaction-limits", organization) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/interactions_repos.go b/vendor/github.com/google/go-github/v56/github/interactions_repos.go new file mode 100644 index 000000000..41e6c5319 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/interactions_repos.go @@ -0,0 +1,80 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetRestrictionsForRepo fetches the interaction restrictions for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/repos#get-interaction-restrictions-for-a-repository +func (s *InteractionsService) GetRestrictionsForRepo(ctx context.Context, owner, repo string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + repositoryInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, repositoryInteractions) + if err != nil { + return nil, resp, err + } + + return repositoryInteractions, resp, nil +} + +// UpdateRestrictionsForRepo adds or updates the interaction restrictions for a repository. +// +// limit specifies the group of GitHub users who can comment, open issues, or create pull requests +// for the given repository. +// Possible values are: "existing_users", "contributors_only", "collaborators_only". +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/repos#set-interaction-restrictions-for-a-repository +func (s *InteractionsService) UpdateRestrictionsForRepo(ctx context.Context, owner, repo, limit string) (*InteractionRestriction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + + interaction := &InteractionRestriction{Limit: String(limit)} + + req, err := s.client.NewRequest("PUT", u, interaction) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + repositoryInteractions := new(InteractionRestriction) + + resp, err := s.client.Do(ctx, req, repositoryInteractions) + if err != nil { + return nil, resp, err + } + + return repositoryInteractions, resp, nil +} + +// RemoveRestrictionsFromRepo removes the interaction restrictions for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/interactions/repos#remove-interaction-restrictions-for-a-repository +func (s *InteractionsService) RemoveRestrictionsFromRepo(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/interaction-limits", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeInteractionRestrictionsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/issue_import.go b/vendor/github.com/google/go-github/v56/github/issue_import.go new file mode 100644 index 000000000..04899772b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issue_import.go @@ -0,0 +1,148 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "encoding/json" + "fmt" +) + +// IssueImportService handles communication with the issue import related +// methods of the Issue Import GitHub API. +type IssueImportService service + +// IssueImportRequest represents a request to create an issue. +// +// https://gist.github.com/jonmagic/5282384165e0f86ef105#supported-issue-and-comment-fields +type IssueImportRequest struct { + IssueImport IssueImport `json:"issue"` + Comments []*Comment `json:"comments,omitempty"` +} + +// IssueImport represents body of issue to import. +type IssueImport struct { + Title string `json:"title"` + Body string `json:"body"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Assignee *string `json:"assignee,omitempty"` + Milestone *int `json:"milestone,omitempty"` + Closed *bool `json:"closed,omitempty"` + Labels []string `json:"labels,omitempty"` +} + +// Comment represents comments of issue to import. +type Comment struct { + CreatedAt *Timestamp `json:"created_at,omitempty"` + Body string `json:"body"` +} + +// IssueImportResponse represents the response of an issue import create request. +// +// https://gist.github.com/jonmagic/5282384165e0f86ef105#import-issue-response +type IssueImportResponse struct { + ID *int `json:"id,omitempty"` + Status *string `json:"status,omitempty"` + URL *string `json:"url,omitempty"` + ImportIssuesURL *string `json:"import_issues_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Message *string `json:"message,omitempty"` + DocumentationURL *string `json:"documentation_url,omitempty"` + Errors []*IssueImportError `json:"errors,omitempty"` +} + +// IssueImportError represents errors of an issue import create request. +type IssueImportError struct { + Location *string `json:"location,omitempty"` + Resource *string `json:"resource,omitempty"` + Field *string `json:"field,omitempty"` + Value *string `json:"value,omitempty"` + Code *string `json:"code,omitempty"` +} + +// Create a new imported issue on the specified repository. +// +// https://gist.github.com/jonmagic/5282384165e0f86ef105#start-an-issue-import +func (s *IssueImportService) Create(ctx context.Context, owner, repo string, issue *IssueImportRequest) (*IssueImportResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/issues", owner, repo) + req, err := s.client.NewRequest("POST", u, issue) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeIssueImportAPI) + + i := new(IssueImportResponse) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + aerr, ok := err.(*AcceptedError) + if ok { + if err := json.Unmarshal(aerr.Raw, i); err != nil { + return i, resp, err + } + return i, resp, err + } + return nil, resp, err + } + + return i, resp, nil +} + +// CheckStatus checks the status of an imported issue. +// +// https://gist.github.com/jonmagic/5282384165e0f86ef105#import-status-request +func (s *IssueImportService) CheckStatus(ctx context.Context, owner, repo string, issueID int64) (*IssueImportResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/issues/%v", owner, repo, issueID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeIssueImportAPI) + + i := new(IssueImportResponse) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// CheckStatusSince checks the status of multiple imported issues since a given date. +// +// https://gist.github.com/jonmagic/5282384165e0f86ef105#check-status-of-multiple-issues +func (s *IssueImportService) CheckStatusSince(ctx context.Context, owner, repo string, since Timestamp) ([]*IssueImportResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/issues?since=%v", owner, repo, since.Format("2006-01-02")) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeIssueImportAPI) + + var b bytes.Buffer + resp, err := s.client.Do(ctx, req, &b) + if err != nil { + return nil, resp, err + } + + var i []*IssueImportResponse + err = json.Unmarshal(b.Bytes(), &i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/issues.go b/vendor/github.com/google/go-github/v56/github/issues.go new file mode 100644 index 000000000..42e58a17a --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues.go @@ -0,0 +1,362 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "time" +) + +// IssuesService handles communication with the issue related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/ +type IssuesService service + +// Issue represents a GitHub issue on a repository. +// +// Note: As far as the GitHub API is concerned, every pull request is an issue, +// but not every issue is a pull request. Some endpoints, events, and webhooks +// may also return pull requests via this struct. If PullRequestLinks is nil, +// this is an issue, and if PullRequestLinks is not nil, this is a pull request. +// The IsPullRequest helper method can be used to check that. +type Issue struct { + ID *int64 `json:"id,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + // StateReason can be one of: "completed", "not_planned", "reopened". + StateReason *string `json:"state_reason,omitempty"` + Locked *bool `json:"locked,omitempty"` + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` + AuthorAssociation *string `json:"author_association,omitempty"` + User *User `json:"user,omitempty"` + Labels []*Label `json:"labels,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Comments *int `json:"comments,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ClosedBy *User `json:"closed_by,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + LabelsURL *string `json:"labels_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + PullRequestLinks *PullRequestLinks `json:"pull_request,omitempty"` + Repository *Repository `json:"repository,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + Assignees []*User `json:"assignees,omitempty"` + NodeID *string `json:"node_id,omitempty"` + + // TextMatches is only populated from search results that request text matches + // See: search.go and https://docs.github.com/en/rest/search/#text-match-metadata + TextMatches []*TextMatch `json:"text_matches,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the issue. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` +} + +func (i Issue) String() string { + return Stringify(i) +} + +// IsPullRequest reports whether the issue is also a pull request. It uses the +// method recommended by GitHub's API documentation, which is to check whether +// PullRequestLinks is non-nil. +func (i Issue) IsPullRequest() bool { + return i.PullRequestLinks != nil +} + +// IssueRequest represents a request to create/edit an issue. +// It is separate from Issue above because otherwise Labels +// and Assignee fail to serialize to the correct JSON. +type IssueRequest struct { + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` + Labels *[]string `json:"labels,omitempty"` + Assignee *string `json:"assignee,omitempty"` + State *string `json:"state,omitempty"` + // StateReason can be 'completed' or 'not_planned'. + StateReason *string `json:"state_reason,omitempty"` + Milestone *int `json:"milestone,omitempty"` + Assignees *[]string `json:"assignees,omitempty"` +} + +// IssueListOptions specifies the optional parameters to the IssuesService.List +// and IssuesService.ListByOrg methods. +type IssueListOptions struct { + // Filter specifies which issues to list. Possible values are: assigned, + // created, mentioned, subscribed, all. Default is "assigned". + Filter string `url:"filter,omitempty"` + + // State filters issues based on their state. Possible values are: open, + // closed, all. Default is "open". + State string `url:"state,omitempty"` + + // Labels filters issues based on their label. + Labels []string `url:"labels,comma,omitempty"` + + // Sort specifies how to sort issues. Possible values are: created, updated, + // and comments. Default value is "created". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort issues. Possible values are: asc, desc. + // Default is "desc". + Direction string `url:"direction,omitempty"` + + // Since filters issues by time. + Since time.Time `url:"since,omitempty"` + + ListOptions +} + +// PullRequestLinks object is added to the Issue object when it's an issue included +// in the IssueCommentEvent webhook payload, if the webhook is fired by a comment on a PR. +type PullRequestLinks struct { + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + DiffURL *string `json:"diff_url,omitempty"` + PatchURL *string `json:"patch_url,omitempty"` +} + +// List the issues for the authenticated user. If all is true, list issues +// across all the user's visible repositories including owned, member, and +// organization repositories; if false, list only owned and member +// repositories. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#list-user-account-issues-assigned-to-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#list-issues-assigned-to-the-authenticated-user +func (s *IssuesService) List(ctx context.Context, all bool, opts *IssueListOptions) ([]*Issue, *Response, error) { + var u string + if all { + u = "issues" + } else { + u = "user/issues" + } + return s.listIssues(ctx, u, opts) +} + +// ListByOrg fetches the issues in the specified organization for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#list-organization-issues-assigned-to-the-authenticated-user +func (s *IssuesService) ListByOrg(ctx context.Context, org string, opts *IssueListOptions) ([]*Issue, *Response, error) { + u := fmt.Sprintf("orgs/%v/issues", org) + return s.listIssues(ctx, u, opts) +} + +func (s *IssuesService) listIssues(ctx context.Context, u string, opts *IssueListOptions) ([]*Issue, *Response, error) { + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var issues []*Issue + resp, err := s.client.Do(ctx, req, &issues) + if err != nil { + return nil, resp, err + } + + return issues, resp, nil +} + +// IssueListByRepoOptions specifies the optional parameters to the +// IssuesService.ListByRepo method. +type IssueListByRepoOptions struct { + // Milestone limits issues for the specified milestone. Possible values are + // a milestone number, "none" for issues with no milestone, "*" for issues + // with any milestone. + Milestone string `url:"milestone,omitempty"` + + // State filters issues based on their state. Possible values are: open, + // closed, all. Default is "open". + State string `url:"state,omitempty"` + + // Assignee filters issues based on their assignee. Possible values are a + // user name, "none" for issues that are not assigned, "*" for issues with + // any assigned user. + Assignee string `url:"assignee,omitempty"` + + // Creator filters issues based on their creator. + Creator string `url:"creator,omitempty"` + + // Mentioned filters issues to those mentioned a specific user. + Mentioned string `url:"mentioned,omitempty"` + + // Labels filters issues based on their label. + Labels []string `url:"labels,omitempty,comma"` + + // Sort specifies how to sort issues. Possible values are: created, updated, + // and comments. Default value is "created". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort issues. Possible values are: asc, desc. + // Default is "desc". + Direction string `url:"direction,omitempty"` + + // Since filters issues by time. + Since time.Time `url:"since,omitempty"` + + ListOptions +} + +// ListByRepo lists the issues for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#list-repository-issues +func (s *IssuesService) ListByRepo(ctx context.Context, owner string, repo string, opts *IssueListByRepoOptions) ([]*Issue, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var issues []*Issue + resp, err := s.client.Do(ctx, req, &issues) + if err != nil { + return nil, resp, err + } + + return issues, resp, nil +} + +// Get a single issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#get-an-issue +func (s *IssuesService) Get(ctx context.Context, owner string, repo string, number int) (*Issue, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + issue := new(Issue) + resp, err := s.client.Do(ctx, req, issue) + if err != nil { + return nil, resp, err + } + + return issue, resp, nil +} + +// Create a new issue on the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#create-an-issue +func (s *IssuesService) Create(ctx context.Context, owner string, repo string, issue *IssueRequest) (*Issue, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues", owner, repo) + req, err := s.client.NewRequest("POST", u, issue) + if err != nil { + return nil, nil, err + } + + i := new(Issue) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// Edit (update) an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#update-an-issue +func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, number int, issue *IssueRequest) (*Issue, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d", owner, repo, number) + req, err := s.client.NewRequest("PATCH", u, issue) + if err != nil { + return nil, nil, err + } + + i := new(Issue) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// Remove a milestone from an issue. +// +// This is a helper method to explicitly update an issue with a `null` milestone, thereby removing it. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#update-an-issue +func (s *IssuesService) RemoveMilestone(ctx context.Context, owner, repo string, issueNumber int) (*Issue, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%v", owner, repo, issueNumber) + req, err := s.client.NewRequest("PATCH", u, &struct { + Milestone *Milestone `json:"milestone"` + }{}) + if err != nil { + return nil, nil, err + } + + i := new(Issue) + resp, err := s.client.Do(ctx, req, i) + if err != nil { + return nil, resp, err + } + + return i, resp, nil +} + +// LockIssueOptions specifies the optional parameters to the +// IssuesService.Lock method. +type LockIssueOptions struct { + // LockReason specifies the reason to lock this issue. + // Providing a lock reason can help make it clearer to contributors why an issue + // was locked. Possible values are: "off-topic", "too heated", "resolved", and "spam". + LockReason string `json:"lock_reason,omitempty"` +} + +// Lock an issue's conversation. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#lock-an-issue +func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int, opts *LockIssueOptions) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unlock an issue's conversation. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/issues#unlock-an-issue +func (s *IssuesService) Unlock(ctx context.Context, owner string, repo string, number int) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_assignees.go b/vendor/github.com/google/go-github/v56/github/issues_assignees.go new file mode 100644 index 000000000..b7f2e8024 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_assignees.go @@ -0,0 +1,95 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListAssignees fetches all available assignees (owners and collaborators) to +// which issues may be assigned. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/assignees#list-assignees +func (s *IssuesService) ListAssignees(ctx context.Context, owner, repo string, opts *ListOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/assignees", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var assignees []*User + resp, err := s.client.Do(ctx, req, &assignees) + if err != nil { + return nil, resp, err + } + + return assignees, resp, nil +} + +// IsAssignee checks if a user is an assignee for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/assignees#check-if-a-user-can-be-assigned +func (s *IssuesService) IsAssignee(ctx context.Context, owner, repo, user string) (bool, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/assignees/%v", owner, repo, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + assignee, err := parseBoolResponse(err) + return assignee, resp, err +} + +// AddAssignees adds the provided GitHub users as assignees to the issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/assignees#add-assignees-to-an-issue +func (s *IssuesService) AddAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { + users := &struct { + Assignees []string `json:"assignees,omitempty"` + }{Assignees: assignees} + u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) + req, err := s.client.NewRequest("POST", u, users) + if err != nil { + return nil, nil, err + } + + issue := &Issue{} + resp, err := s.client.Do(ctx, req, issue) + if err != nil { + return nil, resp, err + } + + return issue, resp, nil +} + +// RemoveAssignees removes the provided GitHub users as assignees from the issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/assignees#remove-assignees-from-an-issue +func (s *IssuesService) RemoveAssignees(ctx context.Context, owner, repo string, number int, assignees []string) (*Issue, *Response, error) { + users := &struct { + Assignees []string `json:"assignees,omitempty"` + }{Assignees: assignees} + u := fmt.Sprintf("repos/%v/%v/issues/%v/assignees", owner, repo, number) + req, err := s.client.NewRequest("DELETE", u, users) + if err != nil { + return nil, nil, err + } + + issue := &Issue{} + resp, err := s.client.Do(ctx, req, issue) + if err != nil { + return nil, resp, err + } + + return issue, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_comments.go b/vendor/github.com/google/go-github/v56/github/issues_comments.go new file mode 100644 index 000000000..17881c093 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_comments.go @@ -0,0 +1,154 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "time" +) + +// IssueComment represents a comment left on an issue. +type IssueComment struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Body *string `json:"body,omitempty"` + User *User `json:"user,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + // AuthorAssociation is the comment author's relationship to the issue's repository. + // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". + AuthorAssociation *string `json:"author_association,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + IssueURL *string `json:"issue_url,omitempty"` +} + +func (i IssueComment) String() string { + return Stringify(i) +} + +// IssueListCommentsOptions specifies the optional parameters to the +// IssuesService.ListComments method. +type IssueListCommentsOptions struct { + // Sort specifies how to sort comments. Possible values are: created, updated. + Sort *string `url:"sort,omitempty"` + + // Direction in which to sort comments. Possible values are: asc, desc. + Direction *string `url:"direction,omitempty"` + + // Since filters comments by time. + Since *time.Time `url:"since,omitempty"` + + ListOptions +} + +// ListComments lists all comments on the specified issue. Specifying an issue +// number of 0 will return all comments on all issues for the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#list-issue-comments +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#list-issue-comments-for-a-repository +func (s *IssuesService) ListComments(ctx context.Context, owner string, repo string, number int, opts *IssueListCommentsOptions) ([]*IssueComment, *Response, error) { + var u string + if number == 0 { + u = fmt.Sprintf("repos/%v/%v/issues/comments", owner, repo) + } else { + u = fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var comments []*IssueComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// GetComment fetches the specified issue comment. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#get-an-issue-comment +func (s *IssuesService) GetComment(ctx context.Context, owner string, repo string, commentID int64) (*IssueComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + comment := new(IssueComment) + resp, err := s.client.Do(ctx, req, comment) + if err != nil { + return nil, resp, err + } + + return comment, resp, nil +} + +// CreateComment creates a new comment on the specified issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#create-an-issue-comment +func (s *IssuesService) CreateComment(ctx context.Context, owner string, repo string, number int, comment *IssueComment) (*IssueComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/comments", owner, repo, number) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + c := new(IssueComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// EditComment updates an issue comment. +// A non-nil comment.Body must be provided. Other comment fields should be left nil. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#update-an-issue-comment +func (s *IssuesService) EditComment(ctx context.Context, owner string, repo string, commentID int64, comment *IssueComment) (*IssueComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + c := new(IssueComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// DeleteComment deletes an issue comment. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/comments#delete-an-issue-comment +func (s *IssuesService) DeleteComment(ctx context.Context, owner string, repo string, commentID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%d", owner, repo, commentID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_events.go b/vendor/github.com/google/go-github/v56/github/issues_events.go new file mode 100644 index 000000000..0305fca11 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_events.go @@ -0,0 +1,180 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// IssueEvent represents an event that occurred around an Issue or Pull Request. +type IssueEvent struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + + // The User that generated this event. + Actor *User `json:"actor,omitempty"` + + // Event identifies the actual type of Event that occurred. Possible + // values are: + // + // closed + // The Actor closed the issue. + // If the issue was closed by commit message, CommitID holds the SHA1 hash of the commit. + // + // merged + // The Actor merged into master a branch containing a commit mentioning the issue. + // CommitID holds the SHA1 of the merge commit. + // + // referenced + // The Actor committed to master a commit mentioning the issue in its commit message. + // CommitID holds the SHA1 of the commit. + // + // reopened, unlocked + // The Actor did that to the issue. + // + // locked + // The Actor locked the issue. + // LockReason holds the reason of locking the issue (if provided while locking). + // + // renamed + // The Actor changed the issue title from Rename.From to Rename.To. + // + // mentioned + // Someone unspecified @mentioned the Actor [sic] in an issue comment body. + // + // assigned, unassigned + // The Assigner assigned the issue to or removed the assignment from the Assignee. + // + // labeled, unlabeled + // The Actor added or removed the Label from the issue. + // + // milestoned, demilestoned + // The Actor added or removed the issue from the Milestone. + // + // subscribed, unsubscribed + // The Actor subscribed to or unsubscribed from notifications for an issue. + // + // head_ref_deleted, head_ref_restored + // The pull request’s branch was deleted or restored. + // + // review_dismissed + // The review was dismissed and `DismissedReview` will be populated below. + // + // review_requested, review_request_removed + // The Actor requested or removed the request for a review. + // RequestedReviewer or RequestedTeam, and ReviewRequester will be populated below. + // + Event *string `json:"event,omitempty"` + + CreatedAt *Timestamp `json:"created_at,omitempty"` + Issue *Issue `json:"issue,omitempty"` + + // Only present on certain events; see above. + Assignee *User `json:"assignee,omitempty"` + Assigner *User `json:"assigner,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + Label *Label `json:"label,omitempty"` + Rename *Rename `json:"rename,omitempty"` + LockReason *string `json:"lock_reason,omitempty"` + ProjectCard *ProjectCard `json:"project_card,omitempty"` + DismissedReview *DismissedReview `json:"dismissed_review,omitempty"` + RequestedReviewer *User `json:"requested_reviewer,omitempty"` + RequestedTeam *Team `json:"requested_team,omitempty"` + ReviewRequester *User `json:"review_requester,omitempty"` + PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` +} + +// DismissedReview represents details for 'dismissed_review' events. +type DismissedReview struct { + // State represents the state of the dismissed review. + // Possible values are: "commented", "approved", and "changes_requested". + State *string `json:"state,omitempty"` + ReviewID *int64 `json:"review_id,omitempty"` + DismissalMessage *string `json:"dismissal_message,omitempty"` + DismissalCommitID *string `json:"dismissal_commit_id,omitempty"` +} + +// ListIssueEvents lists events for the specified issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/events#list-issue-events +func (s *IssuesService) ListIssueEvents(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*IssueEvent, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%v/events", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeProjectCardDetailsPreview) + + var events []*IssueEvent + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// ListRepositoryEvents lists events for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/events#list-issue-events-for-a-repository +func (s *IssuesService) ListRepositoryEvents(ctx context.Context, owner, repo string, opts *ListOptions) ([]*IssueEvent, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/events", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var events []*IssueEvent + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} + +// GetEvent returns the specified issue event. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/events#get-an-issue-event +func (s *IssuesService) GetEvent(ctx context.Context, owner, repo string, id int64) (*IssueEvent, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/events/%v", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + event := new(IssueEvent) + resp, err := s.client.Do(ctx, req, event) + if err != nil { + return nil, resp, err + } + + return event, resp, nil +} + +// Rename contains details for 'renamed' events. +type Rename struct { + From *string `json:"from,omitempty"` + To *string `json:"to,omitempty"` +} + +func (r Rename) String() string { + return Stringify(r) +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_labels.go b/vendor/github.com/google/go-github/v56/github/issues_labels.go new file mode 100644 index 000000000..d0f865c03 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_labels.go @@ -0,0 +1,231 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Label represents a GitHub label on an Issue +type Label struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Color *string `json:"color,omitempty"` + Description *string `json:"description,omitempty"` + Default *bool `json:"default,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (l Label) String() string { + return Stringify(l) +} + +// ListLabels lists all labels for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#list-labels-for-a-repository +func (s *IssuesService) ListLabels(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var labels []*Label + resp, err := s.client.Do(ctx, req, &labels) + if err != nil { + return nil, resp, err + } + + return labels, resp, nil +} + +// GetLabel gets a single label. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#get-a-label +func (s *IssuesService) GetLabel(ctx context.Context, owner string, repo string, name string) (*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + label := new(Label) + resp, err := s.client.Do(ctx, req, label) + if err != nil { + return nil, resp, err + } + + return label, resp, nil +} + +// CreateLabel creates a new label on the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#create-a-label +func (s *IssuesService) CreateLabel(ctx context.Context, owner string, repo string, label *Label) (*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/labels", owner, repo) + req, err := s.client.NewRequest("POST", u, label) + if err != nil { + return nil, nil, err + } + + l := new(Label) + resp, err := s.client.Do(ctx, req, l) + if err != nil { + return nil, resp, err + } + + return l, resp, nil +} + +// EditLabel edits a label. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#update-a-label +func (s *IssuesService) EditLabel(ctx context.Context, owner string, repo string, name string, label *Label) (*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) + req, err := s.client.NewRequest("PATCH", u, label) + if err != nil { + return nil, nil, err + } + + l := new(Label) + resp, err := s.client.Do(ctx, req, l) + if err != nil { + return nil, resp, err + } + + return l, resp, nil +} + +// DeleteLabel deletes a label. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#delete-a-label +func (s *IssuesService) DeleteLabel(ctx context.Context, owner string, repo string, name string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/labels/%v", owner, repo, name) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// ListLabelsByIssue lists all labels for an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#list-labels-for-an-issue +func (s *IssuesService) ListLabelsByIssue(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var labels []*Label + resp, err := s.client.Do(ctx, req, &labels) + if err != nil { + return nil, resp, err + } + + return labels, resp, nil +} + +// AddLabelsToIssue adds labels to an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#add-labels-to-an-issue +func (s *IssuesService) AddLabelsToIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) + req, err := s.client.NewRequest("POST", u, labels) + if err != nil { + return nil, nil, err + } + + var l []*Label + resp, err := s.client.Do(ctx, req, &l) + if err != nil { + return nil, resp, err + } + + return l, resp, nil +} + +// RemoveLabelForIssue removes a label for an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#remove-a-label-from-an-issue +func (s *IssuesService) RemoveLabelForIssue(ctx context.Context, owner string, repo string, number int, label string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/labels/%v", owner, repo, number, label) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ReplaceLabelsForIssue replaces all labels for an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#set-labels-for-an-issue +func (s *IssuesService) ReplaceLabelsForIssue(ctx context.Context, owner string, repo string, number int, labels []string) ([]*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) + req, err := s.client.NewRequest("PUT", u, labels) + if err != nil { + return nil, nil, err + } + + var l []*Label + resp, err := s.client.Do(ctx, req, &l) + if err != nil { + return nil, resp, err + } + + return l, resp, nil +} + +// RemoveLabelsForIssue removes all labels for an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#remove-all-labels-from-an-issue +func (s *IssuesService) RemoveLabelsForIssue(ctx context.Context, owner string, repo string, number int) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%d/labels", owner, repo, number) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListLabelsForMilestone lists labels for every issue in a milestone. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/labels#list-labels-for-issues-in-a-milestone +func (s *IssuesService) ListLabelsForMilestone(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*Label, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones/%d/labels", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var labels []*Label + resp, err := s.client.Do(ctx, req, &labels) + if err != nil { + return nil, resp, err + } + + return labels, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_milestones.go b/vendor/github.com/google/go-github/v56/github/issues_milestones.go new file mode 100644 index 000000000..897c7c0b6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_milestones.go @@ -0,0 +1,147 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Milestone represents a GitHub repository milestone. +type Milestone struct { + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + LabelsURL *string `json:"labels_url,omitempty"` + ID *int64 `json:"id,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + Creator *User `json:"creator,omitempty"` + OpenIssues *int `json:"open_issues,omitempty"` + ClosedIssues *int `json:"closed_issues,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + DueOn *Timestamp `json:"due_on,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (m Milestone) String() string { + return Stringify(m) +} + +// MilestoneListOptions specifies the optional parameters to the +// IssuesService.ListMilestones method. +type MilestoneListOptions struct { + // State filters milestones based on their state. Possible values are: + // open, closed, all. Default is "open". + State string `url:"state,omitempty"` + + // Sort specifies how to sort milestones. Possible values are: due_on, completeness. + // Default value is "due_on". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort milestones. Possible values are: asc, desc. + // Default is "asc". + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// ListMilestones lists all milestones for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/milestones#list-milestones +func (s *IssuesService) ListMilestones(ctx context.Context, owner string, repo string, opts *MilestoneListOptions) ([]*Milestone, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var milestones []*Milestone + resp, err := s.client.Do(ctx, req, &milestones) + if err != nil { + return nil, resp, err + } + + return milestones, resp, nil +} + +// GetMilestone gets a single milestone. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/milestones#get-a-milestone +func (s *IssuesService) GetMilestone(ctx context.Context, owner string, repo string, number int) (*Milestone, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + milestone := new(Milestone) + resp, err := s.client.Do(ctx, req, milestone) + if err != nil { + return nil, resp, err + } + + return milestone, resp, nil +} + +// CreateMilestone creates a new milestone on the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/milestones#create-a-milestone +func (s *IssuesService) CreateMilestone(ctx context.Context, owner string, repo string, milestone *Milestone) (*Milestone, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones", owner, repo) + req, err := s.client.NewRequest("POST", u, milestone) + if err != nil { + return nil, nil, err + } + + m := new(Milestone) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// EditMilestone edits a milestone. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/milestones#update-a-milestone +func (s *IssuesService) EditMilestone(ctx context.Context, owner string, repo string, number int, milestone *Milestone) (*Milestone, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) + req, err := s.client.NewRequest("PATCH", u, milestone) + if err != nil { + return nil, nil, err + } + + m := new(Milestone) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteMilestone deletes a milestone. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/milestones#delete-a-milestone +func (s *IssuesService) DeleteMilestone(ctx context.Context, owner string, repo string, number int) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/milestones/%d", owner, repo, number) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/issues_timeline.go b/vendor/github.com/google/go-github/v56/github/issues_timeline.go new file mode 100644 index 000000000..8ac02ac14 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/issues_timeline.go @@ -0,0 +1,193 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strings" +) + +// Timeline represents an event that occurred around an Issue or Pull Request. +// +// It is similar to an IssueEvent but may contain more information. +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/events/issue-event-types +type Timeline struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + CommitURL *string `json:"commit_url,omitempty"` + + // The User object that generated the event. + Actor *User `json:"actor,omitempty"` + + // The person who commented on the issue. + User *User `json:"user,omitempty"` + + // The person who authored the commit. + Author *CommitAuthor `json:"author,omitempty"` + // The person who committed the commit on behalf of the author. + Committer *CommitAuthor `json:"committer,omitempty"` + // The SHA of the commit in the pull request. + SHA *string `json:"sha,omitempty"` + // The commit message. + Message *string `json:"message,omitempty"` + // A list of parent commits. + Parents []*Commit `json:"parents,omitempty"` + + // Event identifies the actual type of Event that occurred. Possible values + // are: + // + // assigned + // The issue was assigned to the assignee. + // + // closed + // The issue was closed by the actor. When the commit_id is present, it + // identifies the commit that closed the issue using "closes / fixes #NN" + // syntax. + // + // commented + // A comment was added to the issue. + // + // committed + // A commit was added to the pull request's 'HEAD' branch. Only provided + // for pull requests. + // + // cross-referenced + // The issue was referenced from another issue. The 'source' attribute + // contains the 'id', 'actor', and 'url' of the reference's source. + // + // demilestoned + // The issue was removed from a milestone. + // + // head_ref_deleted + // The pull request's branch was deleted. + // + // head_ref_restored + // The pull request's branch was restored. + // + // labeled + // A label was added to the issue. + // + // locked + // The issue was locked by the actor. + // + // mentioned + // The actor was @mentioned in an issue body. + // + // merged + // The issue was merged by the actor. The 'commit_id' attribute is the + // SHA1 of the HEAD commit that was merged. + // + // milestoned + // The issue was added to a milestone. + // + // referenced + // The issue was referenced from a commit message. The 'commit_id' + // attribute is the commit SHA1 of where that happened. + // + // renamed + // The issue title was changed. + // + // reopened + // The issue was reopened by the actor. + // + // reviewed + // The pull request was reviewed. + // + // subscribed + // The actor subscribed to receive notifications for an issue. + // + // unassigned + // The assignee was unassigned from the issue. + // + // unlabeled + // A label was removed from the issue. + // + // unlocked + // The issue was unlocked by the actor. + // + // unsubscribed + // The actor unsubscribed to stop receiving notifications for an issue. + // + Event *string `json:"event,omitempty"` + + // The string SHA of a commit that referenced this Issue or Pull Request. + CommitID *string `json:"commit_id,omitempty"` + // The timestamp indicating when the event occurred. + CreatedAt *Timestamp `json:"created_at,omitempty"` + // The Label object including `name` and `color` attributes. Only provided for + // 'labeled' and 'unlabeled' events. + Label *Label `json:"label,omitempty"` + // The User object which was assigned to (or unassigned from) this Issue or + // Pull Request. Only provided for 'assigned' and 'unassigned' events. + Assignee *User `json:"assignee,omitempty"` + Assigner *User `json:"assigner,omitempty"` + + // The Milestone object including a 'title' attribute. + // Only provided for 'milestoned' and 'demilestoned' events. + Milestone *Milestone `json:"milestone,omitempty"` + // The 'id', 'actor', and 'url' for the source of a reference from another issue. + // Only provided for 'cross-referenced' events. + Source *Source `json:"source,omitempty"` + // An object containing rename details including 'from' and 'to' attributes. + // Only provided for 'renamed' events. + Rename *Rename `json:"rename,omitempty"` + ProjectCard *ProjectCard `json:"project_card,omitempty"` + // The state of a submitted review. Can be one of: 'commented', + // 'changes_requested' or 'approved'. + // Only provided for 'reviewed' events. + State *string `json:"state,omitempty"` + + // The person requested to review the pull request. + Reviewer *User `json:"requested_reviewer,omitempty"` + // RequestedTeam contains the team requested to review the pull request. + RequestedTeam *Team `json:"requested_team,omitempty"` + // The person who requested a review. + Requester *User `json:"review_requester,omitempty"` + + // The review summary text. + Body *string `json:"body,omitempty"` + SubmittedAt *Timestamp `json:"submitted_at,omitempty"` + + PerformedViaGithubApp *App `json:"performed_via_github_app,omitempty"` +} + +// Source represents a reference's source. +type Source struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Actor *User `json:"actor,omitempty"` + Type *string `json:"type,omitempty"` + Issue *Issue `json:"issue,omitempty"` +} + +// ListIssueTimeline lists events for the specified issue. +// +// GitHub API docs: https://docs.github.com/en/rest/issues/timeline#list-timeline-events-for-an-issue +func (s *IssuesService) ListIssueTimeline(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Timeline, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%v/timeline", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeTimelinePreview, mediaTypeProjectCardDetailsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var events []*Timeline + resp, err := s.client.Do(ctx, req, &events) + if err != nil { + return nil, resp, err + } + + return events, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/licenses.go b/vendor/github.com/google/go-github/v56/github/licenses.go new file mode 100644 index 000000000..0877b6d18 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/licenses.go @@ -0,0 +1,97 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// LicensesService handles communication with the license related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/licenses/ +type LicensesService service + +// RepositoryLicense represents the license for a repository. +type RepositoryLicense struct { + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + + SHA *string `json:"sha,omitempty"` + Size *int `json:"size,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + DownloadURL *string `json:"download_url,omitempty"` + Type *string `json:"type,omitempty"` + Content *string `json:"content,omitempty"` + Encoding *string `json:"encoding,omitempty"` + License *License `json:"license,omitempty"` +} + +func (l RepositoryLicense) String() string { + return Stringify(l) +} + +// License represents an open source license. +type License struct { + Key *string `json:"key,omitempty"` + Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` + + SPDXID *string `json:"spdx_id,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Featured *bool `json:"featured,omitempty"` + Description *string `json:"description,omitempty"` + Implementation *string `json:"implementation,omitempty"` + Permissions *[]string `json:"permissions,omitempty"` + Conditions *[]string `json:"conditions,omitempty"` + Limitations *[]string `json:"limitations,omitempty"` + Body *string `json:"body,omitempty"` +} + +func (l License) String() string { + return Stringify(l) +} + +// List popular open source licenses. +// +// GitHub API docs: https://docs.github.com/en/rest/licenses/#list-all-licenses +func (s *LicensesService) List(ctx context.Context) ([]*License, *Response, error) { + req, err := s.client.NewRequest("GET", "licenses", nil) + if err != nil { + return nil, nil, err + } + + var licenses []*License + resp, err := s.client.Do(ctx, req, &licenses) + if err != nil { + return nil, resp, err + } + + return licenses, resp, nil +} + +// Get extended metadata for one license. +// +// GitHub API docs: https://docs.github.com/en/rest/licenses#get-a-license +func (s *LicensesService) Get(ctx context.Context, licenseName string) (*License, *Response, error) { + u := fmt.Sprintf("licenses/%s", licenseName) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + license := new(License) + resp, err := s.client.Do(ctx, req, license) + if err != nil { + return nil, resp, err + } + + return license, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/markdown.go b/vendor/github.com/google/go-github/v56/github/markdown.go new file mode 100644 index 000000000..48b445b3d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/markdown.go @@ -0,0 +1,67 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" +) + +// MarkdownService provides access to markdown-related functions in the GitHub API. +type MarkdownService service + +// MarkdownOptions specifies optional parameters to the Render method. +type MarkdownOptions struct { + // Mode identifies the rendering mode. Possible values are: + // markdown - render a document as plain Render, just like + // README files are rendered. + // + // gfm - to render a document as user-content, e.g. like user + // comments or issues are rendered. In GFM mode, hard line breaks are + // always taken into account, and issue and user mentions are linked + // accordingly. + // + // Default is "markdown". + Mode string + + // Context identifies the repository context. Only taken into account + // when rendering as "gfm". + Context string +} + +type markdownRenderRequest struct { + Text *string `json:"text,omitempty"` + Mode *string `json:"mode,omitempty"` + Context *string `json:"context,omitempty"` +} + +// Render renders an arbitrary Render document. +// +// GitHub API docs: https://docs.github.com/rest/markdown/markdown#render-a-markdown-document +func (s *MarkdownService) Render(ctx context.Context, text string, opts *MarkdownOptions) (string, *Response, error) { + request := &markdownRenderRequest{Text: String(text)} + if opts != nil { + if opts.Mode != "" { + request.Mode = String(opts.Mode) + } + if opts.Context != "" { + request.Context = String(opts.Context) + } + } + + req, err := s.client.NewRequest("POST", "markdown", request) + if err != nil { + return "", nil, err + } + + buf := new(bytes.Buffer) + resp, err := s.client.Do(ctx, req, buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/messages.go b/vendor/github.com/google/go-github/v56/github/messages.go new file mode 100644 index 000000000..c16b60152 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/messages.go @@ -0,0 +1,352 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file provides functions for validating payloads from GitHub Webhooks. +// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github + +package github + +import ( + "crypto/hmac" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "hash" + "io" + "mime" + "net/http" + "net/url" + "reflect" + "sort" + "strings" +) + +const ( + // sha1Prefix is the prefix used by GitHub before the HMAC hexdigest. + sha1Prefix = "sha1" + // sha256Prefix and sha512Prefix are provided for future compatibility. + sha256Prefix = "sha256" + sha512Prefix = "sha512" + // SHA1SignatureHeader is the GitHub header key used to pass the HMAC-SHA1 hexdigest. + SHA1SignatureHeader = "X-Hub-Signature" + // SHA256SignatureHeader is the GitHub header key used to pass the HMAC-SHA256 hexdigest. + SHA256SignatureHeader = "X-Hub-Signature-256" + // EventTypeHeader is the GitHub header key used to pass the event type. + EventTypeHeader = "X-Github-Event" + // DeliveryIDHeader is the GitHub header key used to pass the unique ID for the webhook event. + DeliveryIDHeader = "X-Github-Delivery" +) + +var ( + // eventTypeMapping maps webhooks types to their corresponding go-github struct types. + eventTypeMapping = map[string]interface{}{ + "branch_protection_rule": &BranchProtectionRuleEvent{}, + "check_run": &CheckRunEvent{}, + "check_suite": &CheckSuiteEvent{}, + "code_scanning_alert": &CodeScanningAlertEvent{}, + "commit_comment": &CommitCommentEvent{}, + "content_reference": &ContentReferenceEvent{}, + "create": &CreateEvent{}, + "delete": &DeleteEvent{}, + "dependabot_alert": &DependabotAlertEvent{}, + "deploy_key": &DeployKeyEvent{}, + "deployment": &DeploymentEvent{}, + "deployment_status": &DeploymentStatusEvent{}, + "deployment_protection_rule": &DeploymentProtectionRuleEvent{}, + "discussion": &DiscussionEvent{}, + "discussion_comment": &DiscussionCommentEvent{}, + "fork": &ForkEvent{}, + "github_app_authorization": &GitHubAppAuthorizationEvent{}, + "gollum": &GollumEvent{}, + "installation": &InstallationEvent{}, + "installation_repositories": &InstallationRepositoriesEvent{}, + "installation_target": &InstallationTargetEvent{}, + "issue_comment": &IssueCommentEvent{}, + "issues": &IssuesEvent{}, + "label": &LabelEvent{}, + "marketplace_purchase": &MarketplacePurchaseEvent{}, + "member": &MemberEvent{}, + "membership": &MembershipEvent{}, + "merge_group": &MergeGroupEvent{}, + "meta": &MetaEvent{}, + "milestone": &MilestoneEvent{}, + "organization": &OrganizationEvent{}, + "org_block": &OrgBlockEvent{}, + "package": &PackageEvent{}, + "page_build": &PageBuildEvent{}, + "personal_access_token_request": &PersonalAccessTokenRequestEvent{}, + "ping": &PingEvent{}, + "project": &ProjectEvent{}, + "project_card": &ProjectCardEvent{}, + "project_column": &ProjectColumnEvent{}, + "projects_v2": &ProjectV2Event{}, + "projects_v2_item": &ProjectV2ItemEvent{}, + "public": &PublicEvent{}, + "pull_request": &PullRequestEvent{}, + "pull_request_review": &PullRequestReviewEvent{}, + "pull_request_review_comment": &PullRequestReviewCommentEvent{}, + "pull_request_review_thread": &PullRequestReviewThreadEvent{}, + "pull_request_target": &PullRequestTargetEvent{}, + "push": &PushEvent{}, + "repository": &RepositoryEvent{}, + "repository_dispatch": &RepositoryDispatchEvent{}, + "repository_import": &RepositoryImportEvent{}, + "repository_vulnerability_alert": &RepositoryVulnerabilityAlertEvent{}, + "release": &ReleaseEvent{}, + "secret_scanning_alert": &SecretScanningAlertEvent{}, + "security_advisory": &SecurityAdvisoryEvent{}, + "security_and_analysis": &SecurityAndAnalysisEvent{}, + "star": &StarEvent{}, + "status": &StatusEvent{}, + "team": &TeamEvent{}, + "team_add": &TeamAddEvent{}, + "user": &UserEvent{}, + "watch": &WatchEvent{}, + "workflow_dispatch": &WorkflowDispatchEvent{}, + "workflow_job": &WorkflowJobEvent{}, + "workflow_run": &WorkflowRunEvent{}, + } + // forward mapping of event types to the string names of the structs + messageToTypeName = make(map[string]string, len(eventTypeMapping)) + // Inverse map of the above + typeToMessageMapping = make(map[string]string, len(eventTypeMapping)) +) + +func init() { + for k, v := range eventTypeMapping { + typename := reflect.TypeOf(v).Elem().Name() + messageToTypeName[k] = typename + typeToMessageMapping[typename] = k + } +} + +// genMAC generates the HMAC signature for a message provided the secret key +// and hashFunc. +func genMAC(message, key []byte, hashFunc func() hash.Hash) []byte { + mac := hmac.New(hashFunc, key) + mac.Write(message) + return mac.Sum(nil) +} + +// checkMAC reports whether messageMAC is a valid HMAC tag for message. +func checkMAC(message, messageMAC, key []byte, hashFunc func() hash.Hash) bool { + expectedMAC := genMAC(message, key, hashFunc) + return hmac.Equal(messageMAC, expectedMAC) +} + +// messageMAC returns the hex-decoded HMAC tag from the signature and its +// corresponding hash function. +func messageMAC(signature string) ([]byte, func() hash.Hash, error) { + if signature == "" { + return nil, nil, errors.New("missing signature") + } + sigParts := strings.SplitN(signature, "=", 2) + if len(sigParts) != 2 { + return nil, nil, fmt.Errorf("error parsing signature %q", signature) + } + + var hashFunc func() hash.Hash + switch sigParts[0] { + case sha1Prefix: + hashFunc = sha1.New + case sha256Prefix: + hashFunc = sha256.New + case sha512Prefix: + hashFunc = sha512.New + default: + return nil, nil, fmt.Errorf("unknown hash type prefix: %q", sigParts[0]) + } + + buf, err := hex.DecodeString(sigParts[1]) + if err != nil { + return nil, nil, fmt.Errorf("error decoding signature %q: %v", signature, err) + } + return buf, hashFunc, nil +} + +// ValidatePayloadFromBody validates an incoming GitHub Webhook event request body +// and returns the (JSON) payload. +// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". +// If the Content-Type is neither then an error is returned. +// secretToken is the GitHub Webhook secret token. +// If your webhook does not contain a secret token, you can pass an empty secretToken. +// Webhooks without a secret token are not secure and should be avoided. +// +// Example usage: +// +// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { +// // read signature from request +// signature := "" +// payload, err := github.ValidatePayloadFromBody(r.Header.Get("Content-Type"), r.Body, signature, s.webhookSecretKey) +// if err != nil { ... } +// // Process payload... +// } +func ValidatePayloadFromBody(contentType string, readable io.Reader, signature string, secretToken []byte) (payload []byte, err error) { + var body []byte // Raw body that GitHub uses to calculate the signature. + + switch contentType { + case "application/json": + var err error + if body, err = io.ReadAll(readable); err != nil { + return nil, err + } + + // If the content type is application/json, + // the JSON payload is just the original body. + payload = body + + case "application/x-www-form-urlencoded": + // payloadFormParam is the name of the form parameter that the JSON payload + // will be in if a webhook has its content type set to application/x-www-form-urlencoded. + const payloadFormParam = "payload" + + var err error + if body, err = io.ReadAll(readable); err != nil { + return nil, err + } + + // If the content type is application/x-www-form-urlencoded, + // the JSON payload will be under the "payload" form param. + form, err := url.ParseQuery(string(body)) + if err != nil { + return nil, err + } + payload = []byte(form.Get(payloadFormParam)) + + default: + return nil, fmt.Errorf("webhook request has unsupported Content-Type %q", contentType) + } + + // Validate the signature if present or if one is expected (secretToken is non-empty). + if len(secretToken) > 0 || len(signature) > 0 { + if err := ValidateSignature(signature, body, secretToken); err != nil { + return nil, err + } + } + + return payload, nil +} + +// ValidatePayload validates an incoming GitHub Webhook event request +// and returns the (JSON) payload. +// The Content-Type header of the payload can be "application/json" or "application/x-www-form-urlencoded". +// If the Content-Type is neither then an error is returned. +// secretToken is the GitHub Webhook secret token. +// If your webhook does not contain a secret token, you can pass nil or an empty slice. +// This is intended for local development purposes only and all webhooks should ideally set up a secret token. +// +// Example usage: +// +// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { +// payload, err := github.ValidatePayload(r, s.webhookSecretKey) +// if err != nil { ... } +// // Process payload... +// } +func ValidatePayload(r *http.Request, secretToken []byte) (payload []byte, err error) { + signature := r.Header.Get(SHA256SignatureHeader) + if signature == "" { + signature = r.Header.Get(SHA1SignatureHeader) + } + + contentType, _, err := mime.ParseMediaType(r.Header.Get("Content-Type")) + if err != nil { + return nil, err + } + + return ValidatePayloadFromBody(contentType, r.Body, signature, secretToken) +} + +// ValidateSignature validates the signature for the given payload. +// signature is the GitHub hash signature delivered in the X-Hub-Signature header. +// payload is the JSON payload sent by GitHub Webhooks. +// secretToken is the GitHub Webhook secret token. +// +// GitHub API docs: https://developer.github.com/webhooks/securing/#validating-payloads-from-github +func ValidateSignature(signature string, payload, secretToken []byte) error { + messageMAC, hashFunc, err := messageMAC(signature) + if err != nil { + return err + } + if !checkMAC(payload, messageMAC, secretToken, hashFunc) { + return errors.New("payload signature check failed") + } + return nil +} + +// WebHookType returns the event type of webhook request r. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types +func WebHookType(r *http.Request) string { + return r.Header.Get(EventTypeHeader) +} + +// DeliveryID returns the unique delivery ID of webhook request r. +// +// GitHub API docs: https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types +func DeliveryID(r *http.Request) string { + return r.Header.Get(DeliveryIDHeader) +} + +// ParseWebHook parses the event payload. For recognized event types, a +// value of the corresponding struct type will be returned (as returned +// by Event.ParsePayload()). An error will be returned for unrecognized event +// types. +// +// Example usage: +// +// func (s *GitHubEventMonitor) ServeHTTP(w http.ResponseWriter, r *http.Request) { +// payload, err := github.ValidatePayload(r, s.webhookSecretKey) +// if err != nil { ... } +// event, err := github.ParseWebHook(github.WebHookType(r), payload) +// if err != nil { ... } +// switch event := event.(type) { +// case *github.CommitCommentEvent: +// processCommitCommentEvent(event) +// case *github.CreateEvent: +// processCreateEvent(event) +// ... +// } +// } +func ParseWebHook(messageType string, payload []byte) (interface{}, error) { + eventType, ok := messageToTypeName[messageType] + if !ok { + return nil, fmt.Errorf("unknown X-Github-Event in message: %v", messageType) + } + + event := Event{ + Type: &eventType, + RawPayload: (*json.RawMessage)(&payload), + } + return event.ParsePayload() +} + +// WebhookTypes returns a sorted list of all the known GitHub event type strings +// supported by go-github. +func MessageTypes() []string { + types := make([]string, 0, len(eventTypeMapping)) + for t := range eventTypeMapping { + types = append(types, t) + } + sort.Strings(types) + return types +} + +// EventForType returns an empty struct matching the specified GitHub event type. +// If messageType does not match any known event types, it returns nil. +func EventForType(messageType string) interface{} { + prototype := eventTypeMapping[messageType] + if prototype == nil { + return nil + } + // return a _copy_ of the pointed-to-object. Unfortunately, for this we + // need to use reflection. If we store the actual objects in the map, + // we still need to use reflection to convert from `any` to the actual + // type, so this was deemed the lesser of two evils. (#2865) + return reflect.New(reflect.TypeOf(prototype).Elem()).Interface() +} diff --git a/vendor/github.com/google/go-github/v56/github/meta.go b/vendor/github.com/google/go-github/v56/github/meta.go new file mode 100644 index 000000000..a4d9bac77 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/meta.go @@ -0,0 +1,146 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "fmt" + "net/url" +) + +// MetaService provides access to functions in the GitHub API that GitHub categorizes as "meta". +type MetaService service + +// APIMeta represents metadata about the GitHub API. +type APIMeta struct { + // An Array of IP addresses in CIDR format specifying the addresses + // that incoming service hooks will originate from on GitHub.com. + Hooks []string `json:"hooks,omitempty"` + + // An Array of IP addresses in CIDR format specifying the Git servers + // for GitHub.com. + Git []string `json:"git,omitempty"` + + // Whether authentication with username and password is supported. + // (GitHub Enterprise instances using CAS or OAuth for authentication + // will return false. Features like Basic Authentication with a + // username and password, sudo mode, and two-factor authentication are + // not supported on these servers.) + VerifiablePasswordAuthentication *bool `json:"verifiable_password_authentication,omitempty"` + + // An array of IP addresses in CIDR format specifying the addresses + // which serve GitHub Pages websites. + Pages []string `json:"pages,omitempty"` + + // An Array of IP addresses specifying the addresses that source imports + // will originate from on GitHub.com. + Importer []string `json:"importer,omitempty"` + + // An array of IP addresses in CIDR format specifying the IP addresses + // GitHub Actions will originate from. + Actions []string `json:"actions,omitempty"` + + // An array of IP addresses in CIDR format specifying the IP addresses + // Dependabot will originate from. + Dependabot []string `json:"dependabot,omitempty"` + + // A map of algorithms to SSH key fingerprints. + SSHKeyFingerprints map[string]string `json:"ssh_key_fingerprints,omitempty"` + + // An array of SSH keys. + SSHKeys []string `json:"ssh_keys,omitempty"` + + // An array of IP addresses in CIDR format specifying the addresses + // which serve GitHub websites. + Web []string `json:"web,omitempty"` + + // An array of IP addresses in CIDR format specifying the addresses + // which serve GitHub APIs. + API []string `json:"api,omitempty"` +} + +// Get returns information about GitHub.com, the service. Or, if you access +// this endpoint on your organization’s GitHub Enterprise installation, this +// endpoint provides information about that installation. +// +// GitHub API docs: https://docs.github.com/rest/meta/meta#get-github-meta-information +func (s *MetaService) Get(ctx context.Context) (*APIMeta, *Response, error) { + req, err := s.client.NewRequest("GET", "meta", nil) + if err != nil { + return nil, nil, err + } + + meta := new(APIMeta) + resp, err := s.client.Do(ctx, req, meta) + if err != nil { + return nil, resp, err + } + + return meta, resp, nil +} + +// APIMeta +// Deprecated: Use MetaService.Get instead. +func (c *Client) APIMeta(ctx context.Context) (*APIMeta, *Response, error) { + return c.Meta.Get(ctx) +} + +// Octocat returns an ASCII art octocat with the specified message in a speech +// bubble. If message is empty, a random zen phrase is used. +// +// GitHub API docs: https://docs.github.com/rest/meta/meta#get-octocat +func (s *MetaService) Octocat(ctx context.Context, message string) (string, *Response, error) { + u := "octocat" + if message != "" { + u = fmt.Sprintf("%s?s=%s", u, url.QueryEscape(message)) + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + + buf := new(bytes.Buffer) + resp, err := s.client.Do(ctx, req, buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// Octocat +// Deprecated: Use MetaService.Octocat instead. +func (c *Client) Octocat(ctx context.Context, message string) (string, *Response, error) { + return c.Meta.Octocat(ctx, message) +} + +// Zen returns a random line from The Zen of GitHub. +// +// See also: http://warpspire.com/posts/taste/ +// +// GitHub API docs: https://docs.github.com/rest/meta/meta#get-the-zen-of-github +func (s *MetaService) Zen(ctx context.Context) (string, *Response, error) { + req, err := s.client.NewRequest("GET", "zen", nil) + if err != nil { + return "", nil, err + } + + buf := new(bytes.Buffer) + resp, err := s.client.Do(ctx, req, buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// Zen +// Deprecated: Use MetaService.Zen instead. +func (c *Client) Zen(ctx context.Context) (string, *Response, error) { + return c.Meta.Zen(ctx) +} diff --git a/vendor/github.com/google/go-github/v56/github/migrations.go b/vendor/github.com/google/go-github/v56/github/migrations.go new file mode 100644 index 000000000..67989c078 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/migrations.go @@ -0,0 +1,228 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" +) + +// MigrationService provides access to the migration related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/migration/ +type MigrationService service + +// Migration represents a GitHub migration (archival). +type Migration struct { + ID *int64 `json:"id,omitempty"` + GUID *string `json:"guid,omitempty"` + // State is the current state of a migration. + // Possible values are: + // "pending" which means the migration hasn't started yet, + // "exporting" which means the migration is in progress, + // "exported" which means the migration finished successfully, or + // "failed" which means the migration failed. + State *string `json:"state,omitempty"` + // LockRepositories indicates whether repositories are locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` + URL *string `json:"url,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + UpdatedAt *string `json:"updated_at,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +func (m Migration) String() string { + return Stringify(m) +} + +// MigrationOptions specifies the optional parameters to Migration methods. +type MigrationOptions struct { + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories bool + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments bool +} + +// startMigration represents the body of a StartMigration request. +type startMigration struct { + // Repositories is a slice of repository names to migrate. + Repositories []string `json:"repositories,omitempty"` + + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` +} + +// StartMigration starts the generation of a migration archive. +// repos is a slice of repository names to migrate. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#start-an-organization-migration +func (s *MigrationService) StartMigration(ctx context.Context, org string, repos []string, opts *MigrationOptions) (*Migration, *Response, error) { + u := fmt.Sprintf("orgs/%v/migrations", org) + + body := &startMigration{Repositories: repos} + if opts != nil { + body.LockRepositories = Bool(opts.LockRepositories) + body.ExcludeAttachments = Bool(opts.ExcludeAttachments) + } + + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &Migration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// ListMigrations lists the most recent migrations. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#list-organization-migrations +func (s *MigrationService) ListMigrations(ctx context.Context, org string, opts *ListOptions) ([]*Migration, *Response, error) { + u := fmt.Sprintf("orgs/%v/migrations", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + var m []*Migration + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// MigrationStatus gets the status of a specific migration archive. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#get-an-organization-migration-status +func (s *MigrationService) MigrationStatus(ctx context.Context, org string, id int64) (*Migration, *Response, error) { + u := fmt.Sprintf("orgs/%v/migrations/%v", org, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &Migration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// MigrationArchiveURL fetches a migration archive URL. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#download-an-organization-migration-archive +func (s *MigrationService) MigrationArchiveURL(ctx context.Context, org string, id int64) (url string, err error) { + u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + s.client.clientMu.Lock() + defer s.client.clientMu.Unlock() + + // Disable the redirect mechanism because AWS fails if the GitHub auth token is provided. + var loc string + saveRedirect := s.client.client.CheckRedirect + s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + loc = req.URL.String() + return errors.New("disable redirect") + } + defer func() { s.client.client.CheckRedirect = saveRedirect }() + + _, err = s.client.Do(ctx, req, nil) // expect error from disable redirect + if err == nil { + return "", errors.New("expected redirect, none provided") + } + if !strings.Contains(err.Error(), "disable redirect") { + return "", err + } + return loc, nil +} + +// DeleteMigration deletes a previous migration archive. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#delete-an-organization-migration-archive +func (s *MigrationService) DeleteMigration(ctx context.Context, org string, id int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/migrations/%v/archive", org, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} + +// UnlockRepo unlocks a repository that was locked for migration. +// id is the migration ID. +// You should unlock each migrated repository and delete them when the migration +// is complete and you no longer need the source data. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/orgs#unlock-an-organization-repository +func (s *MigrationService) UnlockRepo(ctx context.Context, org string, id int64, repo string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/migrations/%v/repos/%v/lock", org, id, repo) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/migrations_source_import.go b/vendor/github.com/google/go-github/v56/github/migrations_source_import.go new file mode 100644 index 000000000..74a04b22a --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/migrations_source_import.go @@ -0,0 +1,305 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Import represents a repository import request. +type Import struct { + // The URL of the originating repository. + VCSURL *string `json:"vcs_url,omitempty"` + // The originating VCS type. Can be one of 'subversion', 'git', + // 'mercurial', or 'tfvc'. Without this parameter, the import job will + // take additional time to detect the VCS type before beginning the + // import. This detection step will be reflected in the response. + VCS *string `json:"vcs,omitempty"` + // VCSUsername and VCSPassword are only used for StartImport calls that + // are importing a password-protected repository. + VCSUsername *string `json:"vcs_username,omitempty"` + VCSPassword *string `json:"vcs_password,omitempty"` + // For a tfvc import, the name of the project that is being imported. + TFVCProject *string `json:"tfvc_project,omitempty"` + + // LFS related fields that may be preset in the Import Progress response + + // Describes whether the import has been opted in or out of using Git + // LFS. The value can be 'opt_in', 'opt_out', or 'undecided' if no + // action has been taken. + UseLFS *string `json:"use_lfs,omitempty"` + // Describes whether files larger than 100MB were found during the + // importing step. + HasLargeFiles *bool `json:"has_large_files,omitempty"` + // The total size in gigabytes of files larger than 100MB found in the + // originating repository. + LargeFilesSize *int `json:"large_files_size,omitempty"` + // The total number of files larger than 100MB found in the originating + // repository. To see a list of these files, call LargeFiles. + LargeFilesCount *int `json:"large_files_count,omitempty"` + + // Identifies the current status of an import. An import that does not + // have errors will progress through these steps: + // + // detecting - the "detection" step of the import is in progress + // because the request did not include a VCS parameter. The + // import is identifying the type of source control present at + // the URL. + // importing - the "raw" step of the import is in progress. This is + // where commit data is fetched from the original repository. + // The import progress response will include CommitCount (the + // total number of raw commits that will be imported) and + // Percent (0 - 100, the current progress through the import). + // mapping - the "rewrite" step of the import is in progress. This + // is where SVN branches are converted to Git branches, and + // where author updates are applied. The import progress + // response does not include progress information. + // pushing - the "push" step of the import is in progress. This is + // where the importer updates the repository on GitHub. The + // import progress response will include PushPercent, which is + // the percent value reported by git push when it is "Writing + // objects". + // complete - the import is complete, and the repository is ready + // on GitHub. + // + // If there are problems, you will see one of these in the status field: + // + // auth_failed - the import requires authentication in order to + // connect to the original repository. Make an UpdateImport + // request, and include VCSUsername and VCSPassword. + // error - the import encountered an error. The import progress + // response will include the FailedStep and an error message. + // Contact GitHub support for more information. + // detection_needs_auth - the importer requires authentication for + // the originating repository to continue detection. Make an + // UpdatImport request, and include VCSUsername and + // VCSPassword. + // detection_found_nothing - the importer didn't recognize any + // source control at the URL. + // detection_found_multiple - the importer found several projects + // or repositories at the provided URL. When this is the case, + // the Import Progress response will also include a + // ProjectChoices field with the possible project choices as + // values. Make an UpdateImport request, and include VCS and + // (if applicable) TFVCProject. + Status *string `json:"status,omitempty"` + CommitCount *int `json:"commit_count,omitempty"` + StatusText *string `json:"status_text,omitempty"` + AuthorsCount *int `json:"authors_count,omitempty"` + Percent *int `json:"percent,omitempty"` + PushPercent *int `json:"push_percent,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + AuthorsURL *string `json:"authors_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + Message *string `json:"message,omitempty"` + FailedStep *string `json:"failed_step,omitempty"` + + // Human readable display name, provided when the Import appears as + // part of ProjectChoices. + HumanName *string `json:"human_name,omitempty"` + + // When the importer finds several projects or repositories at the + // provided URLs, this will identify the available choices. Call + // UpdateImport with the selected Import value. + ProjectChoices []*Import `json:"project_choices,omitempty"` +} + +func (i Import) String() string { + return Stringify(i) +} + +// SourceImportAuthor identifies an author imported from a source repository. +// +// GitHub API docs: https://docs.github.com/en/rest/migration/source_imports/#get-commit-authors +type SourceImportAuthor struct { + ID *int64 `json:"id,omitempty"` + RemoteID *string `json:"remote_id,omitempty"` + RemoteName *string `json:"remote_name,omitempty"` + Email *string `json:"email,omitempty"` + Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` + ImportURL *string `json:"import_url,omitempty"` +} + +func (a SourceImportAuthor) String() string { + return Stringify(a) +} + +// LargeFile identifies a file larger than 100MB found during a repository import. +// +// GitHub API docs: https://docs.github.com/en/rest/migration/source_imports/#get-large-files +type LargeFile struct { + RefName *string `json:"ref_name,omitempty"` + Path *string `json:"path,omitempty"` + OID *string `json:"oid,omitempty"` + Size *int `json:"size,omitempty"` +} + +func (f LargeFile) String() string { + return Stringify(f) +} + +// StartImport initiates a repository import. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#start-an-import +func (s *MigrationService) StartImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import", owner, repo) + req, err := s.client.NewRequest("PUT", u, in) + if err != nil { + return nil, nil, err + } + + out := new(Import) + resp, err := s.client.Do(ctx, req, out) + if err != nil { + return nil, resp, err + } + + return out, resp, nil +} + +// ImportProgress queries for the status and progress of an ongoing repository import. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#get-an-import-status +func (s *MigrationService) ImportProgress(ctx context.Context, owner, repo string) (*Import, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + out := new(Import) + resp, err := s.client.Do(ctx, req, out) + if err != nil { + return nil, resp, err + } + + return out, resp, nil +} + +// UpdateImport initiates a repository import. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#update-an-import +func (s *MigrationService) UpdateImport(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import", owner, repo) + req, err := s.client.NewRequest("PATCH", u, in) + if err != nil { + return nil, nil, err + } + + out := new(Import) + resp, err := s.client.Do(ctx, req, out) + if err != nil { + return nil, resp, err + } + + return out, resp, nil +} + +// CommitAuthors gets the authors mapped from the original repository. +// +// Each type of source control system represents authors in a different way. +// For example, a Git commit author has a display name and an email address, +// but a Subversion commit author just has a username. The GitHub Importer will +// make the author information valid, but the author might not be correct. For +// example, it will change the bare Subversion username "hubot" into something +// like "hubot ". +// +// This method and MapCommitAuthor allow you to provide correct Git author +// information. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#get-commit-authors +func (s *MigrationService) CommitAuthors(ctx context.Context, owner, repo string) ([]*SourceImportAuthor, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/authors", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var authors []*SourceImportAuthor + resp, err := s.client.Do(ctx, req, &authors) + if err != nil { + return nil, resp, err + } + + return authors, resp, nil +} + +// MapCommitAuthor updates an author's identity for the import. Your +// application can continue updating authors any time before you push new +// commits to the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#map-a-commit-author +func (s *MigrationService) MapCommitAuthor(ctx context.Context, owner, repo string, id int64, author *SourceImportAuthor) (*SourceImportAuthor, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/authors/%v", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, author) + if err != nil { + return nil, nil, err + } + + out := new(SourceImportAuthor) + resp, err := s.client.Do(ctx, req, out) + if err != nil { + return nil, resp, err + } + + return out, resp, nil +} + +// SetLFSPreference sets whether imported repositories should use Git LFS for +// files larger than 100MB. Only the UseLFS field on the provided Import is +// used. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#update-git-lfs-preference +func (s *MigrationService) SetLFSPreference(ctx context.Context, owner, repo string, in *Import) (*Import, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/lfs", owner, repo) + req, err := s.client.NewRequest("PATCH", u, in) + if err != nil { + return nil, nil, err + } + + out := new(Import) + resp, err := s.client.Do(ctx, req, out) + if err != nil { + return nil, resp, err + } + + return out, resp, nil +} + +// LargeFiles lists files larger than 100MB found during the import. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#get-large-files +func (s *MigrationService) LargeFiles(ctx context.Context, owner, repo string) ([]*LargeFile, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/import/large_files", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var files []*LargeFile + resp, err := s.client.Do(ctx, req, &files) + if err != nil { + return nil, resp, err + } + + return files, resp, nil +} + +// CancelImport stops an import for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/source-imports#cancel-an-import +func (s *MigrationService) CancelImport(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/import", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/migrations_user.go b/vendor/github.com/google/go-github/v56/github/migrations_user.go new file mode 100644 index 000000000..6586fdb2d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/migrations_user.go @@ -0,0 +1,218 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "errors" + "fmt" + "net/http" +) + +// UserMigration represents a GitHub migration (archival). +type UserMigration struct { + ID *int64 `json:"id,omitempty"` + GUID *string `json:"guid,omitempty"` + // State is the current state of a migration. + // Possible values are: + // "pending" which means the migration hasn't started yet, + // "exporting" which means the migration is in progress, + // "exported" which means the migration finished successfully, or + // "failed" which means the migration failed. + State *string `json:"state,omitempty"` + // LockRepositories indicates whether repositories are locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` + URL *string `json:"url,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + UpdatedAt *string `json:"updated_at,omitempty"` + Repositories []*Repository `json:"repositories,omitempty"` +} + +func (m UserMigration) String() string { + return Stringify(m) +} + +// UserMigrationOptions specifies the optional parameters to Migration methods. +type UserMigrationOptions struct { + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories bool + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments bool +} + +// startUserMigration represents the body of a StartMigration request. +type startUserMigration struct { + // Repositories is a slice of repository names to migrate. + Repositories []string `json:"repositories,omitempty"` + + // LockRepositories indicates whether repositories should be locked (to prevent + // manipulation) while migrating data. + LockRepositories *bool `json:"lock_repositories,omitempty"` + + // ExcludeAttachments indicates whether attachments should be excluded from + // the migration (to reduce migration archive file size). + ExcludeAttachments *bool `json:"exclude_attachments,omitempty"` +} + +// StartUserMigration starts the generation of a migration archive. +// repos is a slice of repository names to migrate. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#start-a-user-migration +func (s *MigrationService) StartUserMigration(ctx context.Context, repos []string, opts *UserMigrationOptions) (*UserMigration, *Response, error) { + u := "user/migrations" + + body := &startUserMigration{Repositories: repos} + if opts != nil { + body.LockRepositories = Bool(opts.LockRepositories) + body.ExcludeAttachments = Bool(opts.ExcludeAttachments) + } + + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// ListUserMigrations lists the most recent migrations. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#list-user-migrations +func (s *MigrationService) ListUserMigrations(ctx context.Context, opts *ListOptions) ([]*UserMigration, *Response, error) { + u := "user/migrations" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + var m []*UserMigration + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// UserMigrationStatus gets the status of a specific migration archive. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#get-a-user-migration-status +func (s *MigrationService) UserMigrationStatus(ctx context.Context, id int64) (*UserMigration, *Response, error) { + u := fmt.Sprintf("user/migrations/%v", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// UserMigrationArchiveURL gets the URL for a specific migration archive. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#download-a-user-migration-archive +func (s *MigrationService) UserMigrationArchiveURL(ctx context.Context, id int64) (string, error) { + url := fmt.Sprintf("user/migrations/%v/archive", id) + + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return "", err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + m := &UserMigration{} + + var loc string + originalRedirect := s.client.client.CheckRedirect + s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + loc = req.URL.String() + return http.ErrUseLastResponse + } + defer func() { + s.client.client.CheckRedirect = originalRedirect + }() + resp, err := s.client.Do(ctx, req, m) + if err == nil { + return "", errors.New("expected redirect, none provided") + } + loc = resp.Header.Get("Location") + return loc, nil +} + +// DeleteUserMigration will delete a previous migration archive. +// id is the migration ID. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#delete-a-user-migration-archive +func (s *MigrationService) DeleteUserMigration(ctx context.Context, id int64) (*Response, error) { + url := fmt.Sprintf("user/migrations/%v/archive", id) + + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} + +// UnlockUserRepo will unlock a repo that was locked for migration. +// id is migration ID. +// You should unlock each migrated repository and delete them when the migration +// is complete and you no longer need the source data. +// +// GitHub API docs: https://docs.github.com/en/rest/migrations/users#unlock-a-user-repository +func (s *MigrationService) UnlockUserRepo(ctx context.Context, id int64, repo string) (*Response, error) { + url := fmt.Sprintf("user/migrations/%v/repos/%v/lock", id, repo) + + req, err := s.client.NewRequest("DELETE", url, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMigrationsPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs.go b/vendor/github.com/google/go-github/v56/github/orgs.go new file mode 100644 index 000000000..0c7e361b3 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs.go @@ -0,0 +1,301 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// OrganizationsService provides access to the organization related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/ +type OrganizationsService service + +// Organization represents a GitHub organization account. +type Organization struct { + Login *string `json:"login,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Name *string `json:"name,omitempty"` + Company *string `json:"company,omitempty"` + Blog *string `json:"blog,omitempty"` + Location *string `json:"location,omitempty"` + Email *string `json:"email,omitempty"` + TwitterUsername *string `json:"twitter_username,omitempty"` + Description *string `json:"description,omitempty"` + PublicRepos *int `json:"public_repos,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` + Followers *int `json:"followers,omitempty"` + Following *int `json:"following,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` + OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + DiskUsage *int `json:"disk_usage,omitempty"` + Collaborators *int `json:"collaborators,omitempty"` + BillingEmail *string `json:"billing_email,omitempty"` + Type *string `json:"type,omitempty"` + Plan *Plan `json:"plan,omitempty"` + TwoFactorRequirementEnabled *bool `json:"two_factor_requirement_enabled,omitempty"` + IsVerified *bool `json:"is_verified,omitempty"` + HasOrganizationProjects *bool `json:"has_organization_projects,omitempty"` + HasRepositoryProjects *bool `json:"has_repository_projects,omitempty"` + + // DefaultRepoPermission can be one of: "read", "write", "admin", or "none". (Default: "read"). + // It is only used in OrganizationsService.Edit. + DefaultRepoPermission *string `json:"default_repository_permission,omitempty"` + // DefaultRepoSettings can be one of: "read", "write", "admin", or "none". (Default: "read"). + // It is only used in OrganizationsService.Get. + DefaultRepoSettings *string `json:"default_repository_settings,omitempty"` + + // MembersCanCreateRepos default value is true and is only used in Organizations.Edit. + MembersCanCreateRepos *bool `json:"members_can_create_repositories,omitempty"` + + // https://developer.github.com/changes/2019-12-03-internal-visibility-changes/#rest-v3-api + MembersCanCreatePublicRepos *bool `json:"members_can_create_public_repositories,omitempty"` + MembersCanCreatePrivateRepos *bool `json:"members_can_create_private_repositories,omitempty"` + MembersCanCreateInternalRepos *bool `json:"members_can_create_internal_repositories,omitempty"` + + // MembersCanForkPrivateRepos toggles whether organization members can fork private organization repositories. + MembersCanForkPrivateRepos *bool `json:"members_can_fork_private_repositories,omitempty"` + + // MembersAllowedRepositoryCreationType denotes if organization members can create repositories + // and the type of repositories they can create. Possible values are: "all", "private", or "none". + // + // Deprecated: Use MembersCanCreatePublicRepos, MembersCanCreatePrivateRepos, MembersCanCreateInternalRepos + // instead. The new fields overrides the existing MembersAllowedRepositoryCreationType during 'edit' + // operation and does not consider 'internal' repositories during 'get' operation + MembersAllowedRepositoryCreationType *string `json:"members_allowed_repository_creation_type,omitempty"` + + // MembersCanCreatePages toggles whether organization members can create GitHub Pages sites. + MembersCanCreatePages *bool `json:"members_can_create_pages,omitempty"` + // MembersCanCreatePublicPages toggles whether organization members can create public GitHub Pages sites. + MembersCanCreatePublicPages *bool `json:"members_can_create_public_pages,omitempty"` + // MembersCanCreatePrivatePages toggles whether organization members can create private GitHub Pages sites. + MembersCanCreatePrivatePages *bool `json:"members_can_create_private_pages,omitempty"` + // WebCommitSignoffRequire toggles + WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` + // AdvancedSecurityAuditLogEnabled toggles whether the advanced security audit log is enabled. + AdvancedSecurityEnabledForNewRepos *bool `json:"advanced_security_enabled_for_new_repositories,omitempty"` + // DependabotAlertsEnabled toggles whether dependabot alerts are enabled. + DependabotAlertsEnabledForNewRepos *bool `json:"dependabot_alerts_enabled_for_new_repositories,omitempty"` + // DependabotSecurityUpdatesEnabled toggles whether dependabot security updates are enabled. + DependabotSecurityUpdatesEnabledForNewRepos *bool `json:"dependabot_security_updates_enabled_for_new_repositories,omitempty"` + // DependabotGraphEnabledForNewRepos toggles whether dependabot graph is enabled on new repositories. + DependencyGraphEnabledForNewRepos *bool `json:"dependency_graph_enabled_for_new_repositories,omitempty"` + // SecretScanningEnabled toggles whether secret scanning is enabled on new repositories. + SecretScanningEnabledForNewRepos *bool `json:"secret_scanning_enabled_for_new_repositories,omitempty"` + // SecretScanningPushProtectionEnabledForNewRepos toggles whether secret scanning push protection is enabled on new repositories. + SecretScanningPushProtectionEnabledForNewRepos *bool `json:"secret_scanning_push_protection_enabled_for_new_repositories,omitempty"` + + // API URLs + URL *string `json:"url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + HooksURL *string `json:"hooks_url,omitempty"` + IssuesURL *string `json:"issues_url,omitempty"` + MembersURL *string `json:"members_url,omitempty"` + PublicMembersURL *string `json:"public_members_url,omitempty"` + ReposURL *string `json:"repos_url,omitempty"` +} + +// OrganizationInstallations represents GitHub app installations for an organization. +type OrganizationInstallations struct { + TotalCount *int `json:"total_count,omitempty"` + Installations []*Installation `json:"installations,omitempty"` +} + +func (o Organization) String() string { + return Stringify(o) +} + +// Plan represents the payment plan for an account. See plans at https://github.com/plans. +type Plan struct { + Name *string `json:"name,omitempty"` + Space *int `json:"space,omitempty"` + Collaborators *int `json:"collaborators,omitempty"` + PrivateRepos *int64 `json:"private_repos,omitempty"` + FilledSeats *int `json:"filled_seats,omitempty"` + Seats *int `json:"seats,omitempty"` +} + +func (p Plan) String() string { + return Stringify(p) +} + +// OrganizationsListOptions specifies the optional parameters to the +// OrganizationsService.ListAll method. +type OrganizationsListOptions struct { + // Since filters Organizations by ID. + Since int64 `url:"since,omitempty"` + + // Note: Pagination is powered exclusively by the Since parameter, + // ListOptions.Page has no effect. + // ListOptions.PerPage controls an undocumented GitHub API parameter. + ListOptions +} + +// ListAll lists all organizations, in the order that they were created on GitHub. +// +// Note: Pagination is powered exclusively by the since parameter. To continue +// listing the next set of organizations, use the ID of the last-returned organization +// as the opts.Since parameter for the next call. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#list-organizations +func (s *OrganizationsService) ListAll(ctx context.Context, opts *OrganizationsListOptions) ([]*Organization, *Response, error) { + u, err := addOptions("organizations", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + orgs := []*Organization{} + resp, err := s.client.Do(ctx, req, &orgs) + if err != nil { + return nil, resp, err + } + return orgs, resp, nil +} + +// List the organizations for a user. Passing the empty string will list +// organizations for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#list-organizations-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#list-organizations-for-a-user +func (s *OrganizationsService) List(ctx context.Context, user string, opts *ListOptions) ([]*Organization, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/orgs", user) + } else { + u = "user/orgs" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var orgs []*Organization + resp, err := s.client.Do(ctx, req, &orgs) + if err != nil { + return nil, resp, err + } + + return orgs, resp, nil +} + +// Get fetches an organization by name. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#get-an-organization +func (s *OrganizationsService) Get(ctx context.Context, org string) (*Organization, *Response, error) { + u := fmt.Sprintf("orgs/%v", org) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) + + organization := new(Organization) + resp, err := s.client.Do(ctx, req, organization) + if err != nil { + return nil, resp, err + } + + return organization, resp, nil +} + +// GetByID fetches an organization. +// +// Note: GetByID uses the undocumented GitHub API endpoint /organizations/:id. +func (s *OrganizationsService) GetByID(ctx context.Context, id int64) (*Organization, *Response, error) { + u := fmt.Sprintf("organizations/%d", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + organization := new(Organization) + resp, err := s.client.Do(ctx, req, organization) + if err != nil { + return nil, resp, err + } + + return organization, resp, nil +} + +// Edit an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#update-an-organization +func (s *OrganizationsService) Edit(ctx context.Context, name string, org *Organization) (*Organization, *Response, error) { + u := fmt.Sprintf("orgs/%v", name) + req, err := s.client.NewRequest("PATCH", u, org) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeMemberAllowedRepoCreationTypePreview) + + o := new(Organization) + resp, err := s.client.Do(ctx, req, o) + if err != nil { + return nil, resp, err + } + + return o, resp, nil +} + +// Delete an organization by name. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#delete-an-organization +func (s *OrganizationsService) Delete(ctx context.Context, org string) (*Response, error) { + u := fmt.Sprintf("orgs/%v", org) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListInstallations lists installations for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#list-app-installations-for-an-organization +func (s *OrganizationsService) ListInstallations(ctx context.Context, org string, opts *ListOptions) (*OrganizationInstallations, *Response, error) { + u := fmt.Sprintf("orgs/%v/installations", org) + + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + result := new(OrganizationInstallations) + resp, err := s.client.Do(ctx, req, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_actions_allowed.go b/vendor/github.com/google/go-github/v56/github/orgs_actions_allowed.go new file mode 100644 index 000000000..c467c1154 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_actions_allowed.go @@ -0,0 +1,28 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" +) + +// GetActionsAllowed gets the actions that are allowed in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-an-organization +// Deprecated: please use `client.Actions.GetActionsAllowed` instead. +func (s *OrganizationsService) GetActionsAllowed(ctx context.Context, org string) (*ActionsAllowed, *Response, error) { + s2 := (*ActionsService)(s) + return s2.GetActionsAllowed(ctx, org) +} + +// EditActionsAllowed sets the actions that are allowed in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-an-organization +// Deprecated: please use `client.Actions.EditActionsAllowed` instead. +func (s *OrganizationsService) EditActionsAllowed(ctx context.Context, org string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { + s2 := (*ActionsService)(s) + return s2.EditActionsAllowed(ctx, org, actionsAllowed) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_actions_permissions.go b/vendor/github.com/google/go-github/v56/github/orgs_actions_permissions.go new file mode 100644 index 000000000..607ffca79 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_actions_permissions.go @@ -0,0 +1,28 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" +) + +// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-github-actions-permissions-for-an-organization +// Deprecated: please use `client.Actions.GetActionsPermissions` instead. +func (s *OrganizationsService) GetActionsPermissions(ctx context.Context, org string) (*ActionsPermissions, *Response, error) { + s2 := (*ActionsService)(s) + return s2.GetActionsPermissions(ctx, org) +} + +// EditActionsPermissions sets the permissions policy for repositories and allowed actions in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-github-actions-permissions-for-an-organization +// Deprecated: please use `client.Actions.EditActionsPermissions` instead. +func (s *OrganizationsService) EditActionsPermissions(ctx context.Context, org string, actionsPermissions ActionsPermissions) (*ActionsPermissions, *Response, error) { + s2 := (*ActionsService)(s) + return s2.EditActionsPermissions(ctx, org, actionsPermissions) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_audit_log.go b/vendor/github.com/google/go-github/v56/github/orgs_audit_log.go new file mode 100644 index 000000000..e2f8fc24f --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_audit_log.go @@ -0,0 +1,158 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetAuditLogOptions sets up optional parameters to query audit-log endpoint. +type GetAuditLogOptions struct { + Phrase *string `url:"phrase,omitempty"` // A search phrase. (Optional.) + Include *string `url:"include,omitempty"` // Event type includes. Can be one of "web", "git", "all". Default: "web". (Optional.) + Order *string `url:"order,omitempty"` // The order of audit log events. Can be one of "asc" or "desc". Default: "desc". (Optional.) + + ListCursorOptions +} + +// HookConfig describes metadata about a webhook configuration. +type HookConfig struct { + ContentType *string `json:"content_type,omitempty"` + InsecureSSL *string `json:"insecure_ssl,omitempty"` + URL *string `json:"url,omitempty"` + + // Secret is returned obfuscated by GitHub, but it can be set for outgoing requests. + Secret *string `json:"secret,omitempty"` +} + +// ActorLocation contains information about reported location for an actor. +type ActorLocation struct { + CountryCode *string `json:"country_code,omitempty"` +} + +// PolicyOverrideReason contains user-supplied information about why a policy was overridden. +type PolicyOverrideReason struct { + Code *string `json:"code,omitempty"` + Message *string `json:"message,omitempty"` +} + +// AuditEntry describes the fields that may be represented by various audit-log "action" entries. +// For a list of actions see - https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/reviewing-the-audit-log-for-your-organization#audit-log-actions +type AuditEntry struct { + ActorIP *string `json:"actor_ip,omitempty"` + Action *string `json:"action,omitempty"` // The name of the action that was performed, for example `user.login` or `repo.create`. + Active *bool `json:"active,omitempty"` + ActiveWas *bool `json:"active_was,omitempty"` + Actor *string `json:"actor,omitempty"` // The actor who performed the action. + ActorLocation *ActorLocation `json:"actor_location,omitempty"` + BlockedUser *string `json:"blocked_user,omitempty"` + Business *string `json:"business,omitempty"` + CancelledAt *Timestamp `json:"cancelled_at,omitempty"` + CompletedAt *Timestamp `json:"completed_at,omitempty"` + Conclusion *string `json:"conclusion,omitempty"` + Config *HookConfig `json:"config,omitempty"` + ConfigWas *HookConfig `json:"config_was,omitempty"` + ContentType *string `json:"content_type,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + DeployKeyFingerprint *string `json:"deploy_key_fingerprint,omitempty"` + DocumentID *string `json:"_document_id,omitempty"` + Emoji *string `json:"emoji,omitempty"` + EnvironmentName *string `json:"environment_name,omitempty"` + Event *string `json:"event,omitempty"` + Events []string `json:"events,omitempty"` + EventsWere []string `json:"events_were,omitempty"` + Explanation *string `json:"explanation,omitempty"` + ExternalIdentityNameID *string `json:"external_identity_nameid,omitempty"` + ExternalIdentityUsername *string `json:"external_identity_username,omitempty"` + Fingerprint *string `json:"fingerprint,omitempty"` + HashedToken *string `json:"hashed_token,omitempty"` + HeadBranch *string `json:"head_branch,omitempty"` + HeadSHA *string `json:"head_sha,omitempty"` + HookID *int64 `json:"hook_id,omitempty"` + IsHostedRunner *bool `json:"is_hosted_runner,omitempty"` + JobName *string `json:"job_name,omitempty"` + JobWorkflowRef *string `json:"job_workflow_ref,omitempty"` + LimitedAvailability *bool `json:"limited_availability,omitempty"` + Message *string `json:"message,omitempty"` + Name *string `json:"name,omitempty"` + OAuthApplicationID *int64 `json:"oauth_application_id,omitempty"` + OldUser *string `json:"old_user,omitempty"` + OldPermission *string `json:"old_permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. + OpenSSHPublicKey *string `json:"openssh_public_key,omitempty"` + OperationType *string `json:"operation_type,omitempty"` + Org *string `json:"org,omitempty"` + OrgID *int64 `json:"org_id,omitempty"` + OverriddenCodes []string `json:"overridden_codes,omitempty"` + Permission *string `json:"permission,omitempty"` // The permission level for membership changes, for example `admin` or `read`. + PreviousVisibility *string `json:"previous_visibility,omitempty"` + ProgrammaticAccessType *string `json:"programmatic_access_type,omitempty"` + PullRequestID *int64 `json:"pull_request_id,omitempty"` + PullRequestTitle *string `json:"pull_request_title,omitempty"` + PullRequestURL *string `json:"pull_request_url,omitempty"` + ReadOnly *string `json:"read_only,omitempty"` + Reasons []*PolicyOverrideReason `json:"reasons,omitempty"` + Repo *string `json:"repo,omitempty"` + Repository *string `json:"repository,omitempty"` + RepositoryPublic *bool `json:"repository_public,omitempty"` + RunAttempt *int64 `json:"run_attempt,omitempty"` + RunnerGroupID *int64 `json:"runner_group_id,omitempty"` + RunnerGroupName *string `json:"runner_group_name,omitempty"` + RunnerID *int64 `json:"runner_id,omitempty"` + RunnerLabels []string `json:"runner_labels,omitempty"` + RunnerName *string `json:"runner_name,omitempty"` + RunNumber *int64 `json:"run_number,omitempty"` + SecretsPassed []string `json:"secrets_passed,omitempty"` + SourceVersion *string `json:"source_version,omitempty"` + StartedAt *Timestamp `json:"started_at,omitempty"` + TargetLogin *string `json:"target_login,omitempty"` + TargetVersion *string `json:"target_version,omitempty"` + Team *string `json:"team,omitempty"` + Timestamp *Timestamp `json:"@timestamp,omitempty"` // The time the audit log event occurred, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + TokenID *int64 `json:"token_id,omitempty"` + TokenScopes *string `json:"token_scopes,omitempty"` + Topic *string `json:"topic,omitempty"` + TransportProtocolName *string `json:"transport_protocol_name,omitempty"` // A human readable name for the protocol (for example, HTTP or SSH) used to transfer Git data. + TransportProtocol *int `json:"transport_protocol,omitempty"` // The type of protocol (for example, HTTP=1 or SSH=2) used to transfer Git data. + TriggerID *int64 `json:"trigger_id,omitempty"` + User *string `json:"user,omitempty"` // The user that was affected by the action performed (if available). + UserAgent *string `json:"user_agent,omitempty"` + Visibility *string `json:"visibility,omitempty"` // The repository visibility, for example `public` or `private`. + WorkflowID *int64 `json:"workflow_id,omitempty"` + WorkflowRunID *int64 `json:"workflow_run_id,omitempty"` + + Data *AuditEntryData `json:"data,omitempty"` +} + +// AuditEntryData represents additional information stuffed into a `data` field. +type AuditEntryData struct { + OldName *string `json:"old_name,omitempty"` // The previous name of the repository, for a name change + OldLogin *string `json:"old_login,omitempty"` // The previous name of the organization, for a name change +} + +// GetAuditLog gets the audit-log entries for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/orgs#get-the-audit-log-for-an-organization +func (s *OrganizationsService) GetAuditLog(ctx context.Context, org string, opts *GetAuditLogOptions) ([]*AuditEntry, *Response, error) { + u := fmt.Sprintf("orgs/%v/audit-log", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var auditEntries []*AuditEntry + resp, err := s.client.Do(ctx, req, &auditEntries) + if err != nil { + return nil, resp, err + } + + return auditEntries, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_credential_authorizations.go b/vendor/github.com/google/go-github/v56/github/orgs_credential_authorizations.go new file mode 100644 index 000000000..6d56fe8f7 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_credential_authorizations.go @@ -0,0 +1,95 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" +) + +// CredentialAuthorization represents a credential authorized through SAML SSO. +type CredentialAuthorization struct { + // User login that owns the underlying credential. + Login *string `json:"login,omitempty"` + + // Unique identifier for the credential. + CredentialID *int64 `json:"credential_id,omitempty"` + + // Human-readable description of the credential type. + CredentialType *string `json:"credential_type,omitempty"` + + // Last eight characters of the credential. + // Only included in responses with credential_type of personal access token. + TokenLastEight *string `json:"token_last_eight,omitempty"` + + // Date when the credential was authorized for use. + CredentialAuthorizedAt *Timestamp `json:"credential_authorized_at,omitempty"` + + // Date when the credential was last accessed. + // May be null if it was never accessed. + CredentialAccessedAt *Timestamp `json:"credential_accessed_at,omitempty"` + + // List of oauth scopes the token has been granted. + Scopes []string `json:"scopes,omitempty"` + + // Unique string to distinguish the credential. + // Only included in responses with credential_type of SSH Key. + Fingerprint *string `json:"fingerprint,omitempty"` + + AuthorizedCredentialID *int64 `json:"authorized_credential_id,omitempty"` + + // The title given to the ssh key. + // This will only be present when the credential is an ssh key. + AuthorizedCredentialTitle *string `json:"authorized_credential_title,omitempty"` + + // The note given to the token. + // This will only be present when the credential is a token. + AuthorizedCredentialNote *string `json:"authorized_credential_note,omitempty"` + + // The expiry for the token. + // This will only be present when the credential is a token. + AuthorizedCredentialExpiresAt *Timestamp `json:"authorized_credential_expires_at,omitempty"` +} + +// ListCredentialAuthorizations lists credentials authorized through SAML SSO +// for a given organization. Only available with GitHub Enterprise Cloud. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/orgs/orgs?apiVersion=2022-11-28#list-saml-sso-authorizations-for-an-organization +func (s *OrganizationsService) ListCredentialAuthorizations(ctx context.Context, org string, opts *ListOptions) ([]*CredentialAuthorization, *Response, error) { + u := fmt.Sprintf("orgs/%v/credential-authorizations", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest(http.MethodGet, u, nil) + if err != nil { + return nil, nil, err + } + + var creds []*CredentialAuthorization + resp, err := s.client.Do(ctx, req, &creds) + if err != nil { + return nil, resp, err + } + + return creds, resp, nil +} + +// RemoveCredentialAuthorization revokes the SAML SSO authorization for a given +// credential within an organization. Only available with GitHub Enterprise Cloud. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/orgs/orgs?apiVersion=2022-11-28#remove-a-saml-sso-authorization-for-an-organization +func (s *OrganizationsService) RemoveCredentialAuthorization(ctx context.Context, org string, credentialID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/credential-authorizations/%v", org, credentialID) + req, err := s.client.NewRequest(http.MethodDelete, u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_custom_roles.go b/vendor/github.com/google/go-github/v56/github/orgs_custom_roles.go new file mode 100644 index 000000000..7c1b2d629 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_custom_roles.go @@ -0,0 +1,120 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// OrganizationCustomRepoRoles represents custom repository roles available in specified organization. +type OrganizationCustomRepoRoles struct { + TotalCount *int `json:"total_count,omitempty"` + CustomRepoRoles []*CustomRepoRoles `json:"custom_roles,omitempty"` +} + +// CustomRepoRoles represents custom repository roles for an organization. +// See https://docs.github.com/en/enterprise-cloud@latest/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization +// for more information. +type CustomRepoRoles struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + BaseRole *string `json:"base_role,omitempty"` + Permissions []string `json:"permissions,omitempty"` +} + +// ListCustomRepoRoles lists the custom repository roles available in this organization. +// In order to see custom repository roles in an organization, the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/custom-roles#list-custom-repository-roles-in-an-organization +func (s *OrganizationsService) ListCustomRepoRoles(ctx context.Context, org string) (*OrganizationCustomRepoRoles, *Response, error) { + u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + customRepoRoles := new(OrganizationCustomRepoRoles) + resp, err := s.client.Do(ctx, req, customRepoRoles) + if err != nil { + return nil, resp, err + } + + return customRepoRoles, resp, nil +} + +// CreateOrUpdateCustomRoleOptions represents options required to create or update a custom repository role. +type CreateOrUpdateCustomRoleOptions struct { + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + BaseRole *string `json:"base_role,omitempty"` + Permissions []string `json:"permissions,omitempty"` +} + +// CreateCustomRepoRole creates a custom repository role in this organization. +// In order to create custom repository roles in an organization, the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/custom-roles#create-a-custom-repository-role +func (s *OrganizationsService) CreateCustomRepoRole(ctx context.Context, org string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { + u := fmt.Sprintf("orgs/%v/custom-repository-roles", org) + + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + resultingRole := new(CustomRepoRoles) + resp, err := s.client.Do(ctx, req, resultingRole) + if err != nil { + return nil, resp, err + } + + return resultingRole, resp, err +} + +// UpdateCustomRepoRole updates a custom repository role in this organization. +// In order to update custom repository roles in an organization, the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/custom-roles#update-a-custom-repository-role +func (s *OrganizationsService) UpdateCustomRepoRole(ctx context.Context, org, roleID string, opts *CreateOrUpdateCustomRoleOptions) (*CustomRepoRoles, *Response, error) { + u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) + + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + resultingRole := new(CustomRepoRoles) + resp, err := s.client.Do(ctx, req, resultingRole) + if err != nil { + return nil, resp, err + } + + return resultingRole, resp, err +} + +// DeleteCustomRepoRole deletes an existing custom repository role in this organization. +// In order to delete custom repository roles in an organization, the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/custom-roles#delete-a-custom-repository-role +func (s *OrganizationsService) DeleteCustomRepoRole(ctx context.Context, org, roleID string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/custom-repository-roles/%v", org, roleID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resultingRole := new(CustomRepoRoles) + resp, err := s.client.Do(ctx, req, resultingRole) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_hooks.go b/vendor/github.com/google/go-github/v56/github/orgs_hooks.go new file mode 100644 index 000000000..c0dd51e24 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_hooks.go @@ -0,0 +1,130 @@ +// Copyright 2015 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListHooks lists all Hooks for the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#list-organization-webhooks +func (s *OrganizationsService) ListHooks(ctx context.Context, org string, opts *ListOptions) ([]*Hook, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var hooks []*Hook + resp, err := s.client.Do(ctx, req, &hooks) + if err != nil { + return nil, resp, err + } + + return hooks, resp, nil +} + +// GetHook returns a single specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#get-an-organization-webhook +func (s *OrganizationsService) GetHook(ctx context.Context, org string, id int64) (*Hook, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + hook := new(Hook) + resp, err := s.client.Do(ctx, req, hook) + if err != nil { + return nil, resp, err + } + + return hook, resp, nil +} + +// CreateHook creates a Hook for the specified org. +// Config is a required field. +// +// Note that only a subset of the hook fields are used and hook must +// not be nil. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#create-an-organization-webhook +func (s *OrganizationsService) CreateHook(ctx context.Context, org string, hook *Hook) (*Hook, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks", org) + + hookReq := &createHookRequest{ + Name: "web", + Events: hook.Events, + Active: hook.Active, + Config: hook.Config, + } + + req, err := s.client.NewRequest("POST", u, hookReq) + if err != nil { + return nil, nil, err + } + + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// EditHook updates a specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#update-an-organization-webhook +func (s *OrganizationsService) EditHook(ctx context.Context, org string, id int64, hook *Hook) (*Hook, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) + req, err := s.client.NewRequest("PATCH", u, hook) + if err != nil { + return nil, nil, err + } + + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// PingHook triggers a 'ping' event to be sent to the Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#ping-an-organization-webhook +func (s *OrganizationsService) PingHook(ctx context.Context, org string, id int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%d/pings", org, id) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteHook deletes a specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#delete-an-organization-webhook +func (s *OrganizationsService) DeleteHook(ctx context.Context, org string, id int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%d", org, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_hooks_configuration.go b/vendor/github.com/google/go-github/v56/github/orgs_hooks_configuration.go new file mode 100644 index 000000000..953a2329c --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_hooks_configuration.go @@ -0,0 +1,49 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetHookConfiguration returns the configuration for the specified organization webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks?apiVersion=2022-11-28#get-a-webhook-configuration-for-an-organization +func (s *OrganizationsService) GetHookConfiguration(ctx context.Context, org string, id int64) (*HookConfig, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + config := new(HookConfig) + resp, err := s.client.Do(ctx, req, config) + if err != nil { + return nil, resp, err + } + + return config, resp, nil +} + +// EditHookConfiguration updates the configuration for the specified organization webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks?apiVersion=2022-11-28#update-a-webhook-configuration-for-an-organization +func (s *OrganizationsService) EditHookConfiguration(ctx context.Context, org string, id int64, config *HookConfig) (*HookConfig, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/config", org, id) + req, err := s.client.NewRequest("PATCH", u, config) + if err != nil { + return nil, nil, err + } + + c := new(HookConfig) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_hooks_deliveries.go b/vendor/github.com/google/go-github/v56/github/orgs_hooks_deliveries.go new file mode 100644 index 000000000..1bfad409e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_hooks_deliveries.go @@ -0,0 +1,73 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListHookDeliveries lists webhook deliveries for a webhook configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#list-deliveries-for-an-organization-webhook +func (s *OrganizationsService) ListHookDeliveries(ctx context.Context, org string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries", org, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + deliveries := []*HookDelivery{} + resp, err := s.client.Do(ctx, req, &deliveries) + if err != nil { + return nil, resp, err + } + + return deliveries, resp, nil +} + +// GetHookDelivery returns a delivery for a webhook configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#get-a-webhook-delivery-for-an-organization-webhook +func (s *OrganizationsService) GetHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v", owner, hookID, deliveryID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// RedeliverHookDelivery redelivers a delivery for a webhook configured in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/webhooks#redeliver-a-delivery-for-an-organization-webhook +func (s *OrganizationsService) RedeliverHookDelivery(ctx context.Context, owner string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("orgs/%v/hooks/%v/deliveries/%v/attempts", owner, hookID, deliveryID) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_members.go b/vendor/github.com/google/go-github/v56/github/orgs_members.go new file mode 100644 index 000000000..79f8a6533 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_members.go @@ -0,0 +1,391 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Membership represents the status of a user's membership in an organization or team. +type Membership struct { + URL *string `json:"url,omitempty"` + + // State is the user's status within the organization or team. + // Possible values are: "active", "pending" + State *string `json:"state,omitempty"` + + // Role identifies the user's role within the organization or team. + // Possible values for organization membership: + // member - non-owner organization member + // admin - organization owner + // + // Possible values for team membership are: + // member - a normal member of the team + // maintainer - a team maintainer. Able to add/remove other team + // members, promote other team members to team + // maintainer, and edit the team’s name and description + Role *string `json:"role,omitempty"` + + // For organization membership, the API URL of the organization. + OrganizationURL *string `json:"organization_url,omitempty"` + + // For organization membership, the organization the membership is for. + Organization *Organization `json:"organization,omitempty"` + + // For organization membership, the user the membership is for. + User *User `json:"user,omitempty"` +} + +func (m Membership) String() string { + return Stringify(m) +} + +// ListMembersOptions specifies optional parameters to the +// OrganizationsService.ListMembers method. +type ListMembersOptions struct { + // If true (or if the authenticated user is not an owner of the + // organization), list only publicly visible members. + PublicOnly bool `url:"-"` + + // Filter members returned in the list. Possible values are: + // 2fa_disabled, all. Default is "all". + Filter string `url:"filter,omitempty"` + + // Role filters members returned by their role in the organization. + // Possible values are: + // all - all members of the organization, regardless of role + // admin - organization owners + // member - non-owner organization members + // + // Default is "all". + Role string `url:"role,omitempty"` + + ListOptions +} + +// ListMembers lists the members for an organization. If the authenticated +// user is an owner of the organization, this will return both concealed and +// public members, otherwise it will only return public members. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-organization-members +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-public-organization-members +func (s *OrganizationsService) ListMembers(ctx context.Context, org string, opts *ListMembersOptions) ([]*User, *Response, error) { + var u string + if opts != nil && opts.PublicOnly { + u = fmt.Sprintf("orgs/%v/public_members", org) + } else { + u = fmt.Sprintf("orgs/%v/members", org) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// IsMember checks if a user is a member of an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#check-organization-membership-for-a-user +func (s *OrganizationsService) IsMember(ctx context.Context, org, user string) (bool, *Response, error) { + u := fmt.Sprintf("orgs/%v/members/%v", org, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + member, err := parseBoolResponse(err) + return member, resp, err +} + +// IsPublicMember checks if a user is a public member of an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#check-public-organization-membership-for-a-user +func (s *OrganizationsService) IsPublicMember(ctx context.Context, org, user string) (bool, *Response, error) { + u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + member, err := parseBoolResponse(err) + return member, resp, err +} + +// RemoveMember removes a user from all teams of an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#remove-an-organization-member +func (s *OrganizationsService) RemoveMember(ctx context.Context, org, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/members/%v", org, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// PublicizeMembership publicizes a user's membership in an organization. (A +// user cannot publicize the membership for another user.) +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#set-public-organization-membership-for-the-authenticated-user +func (s *OrganizationsService) PublicizeMembership(ctx context.Context, org, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ConcealMembership conceals a user's membership in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#remove-public-organization-membership-for-the-authenticated-user +func (s *OrganizationsService) ConcealMembership(ctx context.Context, org, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/public_members/%v", org, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListOrgMembershipsOptions specifies optional parameters to the +// OrganizationsService.ListOrgMemberships method. +type ListOrgMembershipsOptions struct { + // Filter memberships to include only those with the specified state. + // Possible values are: "active", "pending". + State string `url:"state,omitempty"` + + ListOptions +} + +// ListOrgMemberships lists the organization memberships for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-organization-memberships-for-the-authenticated-user +func (s *OrganizationsService) ListOrgMemberships(ctx context.Context, opts *ListOrgMembershipsOptions) ([]*Membership, *Response, error) { + u := "user/memberships/orgs" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var memberships []*Membership + resp, err := s.client.Do(ctx, req, &memberships) + if err != nil { + return nil, resp, err + } + + return memberships, resp, nil +} + +// GetOrgMembership gets the membership for a user in a specified organization. +// Passing an empty string for user will get the membership for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#get-an-organization-membership-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#get-organization-membership-for-a-user +func (s *OrganizationsService) GetOrgMembership(ctx context.Context, user, org string) (*Membership, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) + } else { + u = fmt.Sprintf("user/memberships/orgs/%v", org) + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + membership := new(Membership) + resp, err := s.client.Do(ctx, req, membership) + if err != nil { + return nil, resp, err + } + + return membership, resp, nil +} + +// EditOrgMembership edits the membership for user in specified organization. +// Passing an empty string for user will edit the membership for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#update-an-organization-membership-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#set-organization-membership-for-a-user +func (s *OrganizationsService) EditOrgMembership(ctx context.Context, user, org string, membership *Membership) (*Membership, *Response, error) { + var u, method string + if user != "" { + u = fmt.Sprintf("orgs/%v/memberships/%v", org, user) + method = "PUT" + } else { + u = fmt.Sprintf("user/memberships/orgs/%v", org) + method = "PATCH" + } + + req, err := s.client.NewRequest(method, u, membership) + if err != nil { + return nil, nil, err + } + + m := new(Membership) + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// RemoveOrgMembership removes user from the specified organization. If the +// user has been invited to the organization, this will cancel their invitation. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#remove-organization-membership-for-a-user +func (s *OrganizationsService) RemoveOrgMembership(ctx context.Context, user, org string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/memberships/%v", org, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListPendingOrgInvitations returns a list of pending invitations. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-pending-organization-invitations +func (s *OrganizationsService) ListPendingOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("orgs/%v/invitations", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pendingInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &pendingInvitations) + if err != nil { + return nil, resp, err + } + + return pendingInvitations, resp, nil +} + +// CreateOrgInvitationOptions specifies the parameters to the OrganizationService.Invite +// method. +type CreateOrgInvitationOptions struct { + // GitHub user ID for the person you are inviting. Not required if you provide Email. + InviteeID *int64 `json:"invitee_id,omitempty"` + // Email address of the person you are inviting, which can be an existing GitHub user. + // Not required if you provide InviteeID + Email *string `json:"email,omitempty"` + // Specify role for new member. Can be one of: + // * admin - Organization owners with full administrative rights to the + // organization and complete access to all repositories and teams. + // * direct_member - Non-owner organization members with ability to see + // other members and join teams by invitation. + // * billing_manager - Non-owner organization members with ability to + // manage the billing settings of your organization. + // Default is "direct_member". + Role *string `json:"role,omitempty"` + TeamID []int64 `json:"team_ids,omitempty"` +} + +// CreateOrgInvitation invites people to an organization by using their GitHub user ID or their email address. +// In order to create invitations in an organization, +// the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#create-an-organization-invitation +func (s *OrganizationsService) CreateOrgInvitation(ctx context.Context, org string, opts *CreateOrgInvitationOptions) (*Invitation, *Response, error) { + u := fmt.Sprintf("orgs/%v/invitations", org) + + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + var invitation *Invitation + resp, err := s.client.Do(ctx, req, &invitation) + if err != nil { + return nil, resp, err + } + + return invitation, resp, nil +} + +// ListOrgInvitationTeams lists all teams associated with an invitation. In order to see invitations in an organization, +// the authenticated user must be an organization owner. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-organization-invitation-teams +func (s *OrganizationsService) ListOrgInvitationTeams(ctx context.Context, org, invitationID string, opts *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/invitations/%v/teams", org, invitationID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var orgInvitationTeams []*Team + resp, err := s.client.Do(ctx, req, &orgInvitationTeams) + if err != nil { + return nil, resp, err + } + + return orgInvitationTeams, resp, nil +} + +// ListFailedOrgInvitations returns a list of failed inviatations. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/members#list-failed-organization-invitations +func (s *OrganizationsService) ListFailedOrgInvitations(ctx context.Context, org string, opts *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("orgs/%v/failed_invitations", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var failedInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &failedInvitations) + if err != nil { + return nil, resp, err + } + + return failedInvitations, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_outside_collaborators.go b/vendor/github.com/google/go-github/v56/github/orgs_outside_collaborators.go new file mode 100644 index 000000000..506a49460 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_outside_collaborators.go @@ -0,0 +1,81 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListOutsideCollaboratorsOptions specifies optional parameters to the +// OrganizationsService.ListOutsideCollaborators method. +type ListOutsideCollaboratorsOptions struct { + // Filter outside collaborators returned in the list. Possible values are: + // 2fa_disabled, all. Default is "all". + Filter string `url:"filter,omitempty"` + + ListOptions +} + +// ListOutsideCollaborators lists outside collaborators of organization's repositories. +// This will only work if the authenticated +// user is an owner of the organization. +// +// Warning: The API may change without advance notice during the preview period. +// Preview features are not supported for production use. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/outside-collaborators#list-outside-collaborators-for-an-organization +func (s *OrganizationsService) ListOutsideCollaborators(ctx context.Context, org string, opts *ListOutsideCollaboratorsOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("orgs/%v/outside_collaborators", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// RemoveOutsideCollaborator removes a user from the list of outside collaborators; +// consequently, removing them from all the organization's repositories. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/outside-collaborators#remove-outside-collaborator-from-an-organization +func (s *OrganizationsService) RemoveOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ConvertMemberToOutsideCollaborator reduces the permission level of a member of the +// organization to that of an outside collaborator. Therefore, they will only +// have access to the repositories that their current team membership allows. +// Responses for converting a non-member or the last owner to an outside collaborator +// are listed in GitHub API docs. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/outside-collaborators#convert-an-organization-member-to-outside-collaborator +func (s *OrganizationsService) ConvertMemberToOutsideCollaborator(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/outside_collaborators/%v", org, user) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_packages.go b/vendor/github.com/google/go-github/v56/github/orgs_packages.go new file mode 100644 index 000000000..0ae68aaa3 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_packages.go @@ -0,0 +1,149 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// List the packages for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#list-packages-for-an-organization +func (s *OrganizationsService) ListPackages(ctx context.Context, org string, opts *PackageListOptions) ([]*Package, *Response, error) { + u := fmt.Sprintf("orgs/%v/packages", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var packages []*Package + resp, err := s.client.Do(ctx, req, &packages) + if err != nil { + return nil, resp, err + } + + return packages, resp, nil +} + +// Get a package by name from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-for-an-organization +func (s *OrganizationsService) GetPackage(ctx context.Context, org, packageType, packageName string) (*Package, *Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pack *Package + resp, err := s.client.Do(ctx, req, &pack) + if err != nil { + return nil, resp, err + } + + return pack, resp, nil +} + +// Delete a package from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-a-package-for-an-organization +func (s *OrganizationsService) DeletePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v", org, packageType, packageName) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Restore a package to an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-a-package-for-an-organization +func (s *OrganizationsService) RestorePackage(ctx context.Context, org, packageType, packageName string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v/restore", org, packageType, packageName) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Get all versions of a package in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#list-package-versions-for-a-package-owned-by-an-organization +func (s *OrganizationsService) PackageGetAllVersions(ctx context.Context, org, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions", org, packageType, packageName) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var versions []*PackageVersion + resp, err := s.client.Do(ctx, req, &versions) + if err != nil { + return nil, resp, err + } + + return versions, resp, nil +} + +// Get a specific version of a package in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-version-for-an-organization +func (s *OrganizationsService) PackageGetVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var version *PackageVersion + resp, err := s.client.Do(ctx, req, &version) + if err != nil { + return nil, resp, err + } + + return version, resp, nil +} + +// Delete a package version from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization +func (s *OrganizationsService) PackageDeleteVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v", org, packageType, packageName, packageVersionID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Restore a package version to an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-package-version-for-an-organization +func (s *OrganizationsService) PackageRestoreVersion(ctx context.Context, org, packageType, packageName string, packageVersionID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/packages/%v/%v/versions/%v/restore", org, packageType, packageName, packageVersionID) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_personal_access_tokens.go b/vendor/github.com/google/go-github/v56/github/orgs_personal_access_tokens.go new file mode 100644 index 000000000..c30ff2843 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_personal_access_tokens.go @@ -0,0 +1,34 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" +) + +// ReviewPersonalAccessTokenRequestOptions specifies the parameters to the ReviewPersonalAccessTokenRequest method. +type ReviewPersonalAccessTokenRequestOptions struct { + Action string `json:"action"` + Reason *string `json:"reason,omitempty"` +} + +// ReviewPersonalAccessTokenRequest approves or denies a pending request to access organization resources via a fine-grained personal access token. +// Only GitHub Apps can call this API, using the `organization_personal_access_token_requests: write` permission. +// `action` can be one of `approve` or `deny`. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/personal-access-tokens?apiVersion=2022-11-28#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token +func (s *OrganizationsService) ReviewPersonalAccessTokenRequest(ctx context.Context, org string, requestID int64, opts ReviewPersonalAccessTokenRequestOptions) (*Response, error) { + u := fmt.Sprintf("orgs/%v/personal-access-token-requests/%v", org, requestID) + + req, err := s.client.NewRequest(http.MethodPost, u, &opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_projects.go b/vendor/github.com/google/go-github/v56/github/orgs_projects.go new file mode 100644 index 000000000..d49eae54d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_projects.go @@ -0,0 +1,60 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListProjects lists the projects for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#list-organization-projects +func (s *OrganizationsService) ListProjects(ctx context.Context, org string, opts *ProjectListOptions) ([]*Project, *Response, error) { + u := fmt.Sprintf("orgs/%v/projects", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// CreateProject creates a GitHub Project for the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#create-an-organization-project +func (s *OrganizationsService) CreateProject(ctx context.Context, org string, opts *ProjectOptions) (*Project, *Response, error) { + u := fmt.Sprintf("orgs/%v/projects", org) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + project := &Project{} + resp, err := s.client.Do(ctx, req, project) + if err != nil { + return nil, resp, err + } + + return project, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_rules.go b/vendor/github.com/google/go-github/v56/github/orgs_rules.go new file mode 100644 index 000000000..a3905af8f --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_rules.go @@ -0,0 +1,105 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetAllOrganizationRulesets gets all the rulesets for the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/rules#get-all-organization-repository-rulesets +func (s *OrganizationsService) GetAllOrganizationRulesets(ctx context.Context, org string) ([]*Ruleset, *Response, error) { + u := fmt.Sprintf("orgs/%v/rulesets", org) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rulesets []*Ruleset + resp, err := s.client.Do(ctx, req, &rulesets) + if err != nil { + return nil, resp, err + } + + return rulesets, resp, nil +} + +// CreateOrganizationRuleset creates a ruleset for the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/rules#create-an-organization-repository-ruleset +func (s *OrganizationsService) CreateOrganizationRuleset(ctx context.Context, org string, rs *Ruleset) (*Ruleset, *Response, error) { + u := fmt.Sprintf("orgs/%v/rulesets", org) + + req, err := s.client.NewRequest("POST", u, rs) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// GetOrganizationRuleset gets a ruleset from the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/rules#get-an-organization-repository-ruleset +func (s *OrganizationsService) GetOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Ruleset, *Response, error) { + u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// UpdateOrganizationRuleset updates a ruleset from the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/rules#update-an-organization-repository-ruleset +func (s *OrganizationsService) UpdateOrganizationRuleset(ctx context.Context, org string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { + u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) + + req, err := s.client.NewRequest("PUT", u, rs) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// DeleteOrganizationRuleset deletes a ruleset from the specified organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/rules#delete-an-organization-repository-ruleset +func (s *OrganizationsService) DeleteOrganizationRuleset(ctx context.Context, org string, rulesetID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/rulesets/%v", org, rulesetID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_security_managers.go b/vendor/github.com/google/go-github/v56/github/orgs_security_managers.go new file mode 100644 index 000000000..a3f002e0e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_security_managers.go @@ -0,0 +1,57 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListSecurityManagerTeams lists all security manager teams for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/security-managers#list-security-manager-teams +func (s *OrganizationsService) ListSecurityManagerTeams(ctx context.Context, org string) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/security-managers", org) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// AddSecurityManagerTeam adds a team to the list of security managers for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/security-managers#add-a-security-manager-team +func (s *OrganizationsService) AddSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveSecurityManagerTeam removes a team from the list of security managers for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/security-managers#remove-a-security-manager-team +func (s *OrganizationsService) RemoveSecurityManagerTeam(ctx context.Context, org, team string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/security-managers/teams/%v", org, team) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/orgs_users_blocking.go b/vendor/github.com/google/go-github/v56/github/orgs_users_blocking.go new file mode 100644 index 000000000..9c6cf6026 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/orgs_users_blocking.go @@ -0,0 +1,91 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListBlockedUsers lists all the users blocked by an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/blocking#list-users-blocked-by-an-organization +func (s *OrganizationsService) ListBlockedUsers(ctx context.Context, org string, opts *ListOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("orgs/%v/blocks", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + var blockedUsers []*User + resp, err := s.client.Do(ctx, req, &blockedUsers) + if err != nil { + return nil, resp, err + } + + return blockedUsers, resp, nil +} + +// IsBlocked reports whether specified user is blocked from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/blocking#check-if-a-user-is-blocked-by-an-organization +func (s *OrganizationsService) IsBlocked(ctx context.Context, org string, user string) (bool, *Response, error) { + u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + resp, err := s.client.Do(ctx, req, nil) + isBlocked, err := parseBoolResponse(err) + return isBlocked, resp, err +} + +// BlockUser blocks specified user from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/blocking#block-a-user-from-an-organization +func (s *OrganizationsService) BlockUser(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + return s.client.Do(ctx, req, nil) +} + +// UnblockUser unblocks specified user from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/orgs/blocking#unblock-a-user-from-an-organization +func (s *OrganizationsService) UnblockUser(ctx context.Context, org string, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/blocks/%v", org, user) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/packages.go b/vendor/github.com/google/go-github/v56/github/packages.go new file mode 100644 index 000000000..ef7df0740 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/packages.go @@ -0,0 +1,143 @@ +// Copyright 2020 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// Package represents a GitHub package. +type Package struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + PackageType *string `json:"package_type,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Owner *User `json:"owner,omitempty"` + PackageVersion *PackageVersion `json:"package_version,omitempty"` + Registry *PackageRegistry `json:"registry,omitempty"` + URL *string `json:"url,omitempty"` + VersionCount *int64 `json:"version_count,omitempty"` + Visibility *string `json:"visibility,omitempty"` + Repository *Repository `json:"repository,omitempty"` +} + +func (p Package) String() string { + return Stringify(p) +} + +// PackageVersion represents a GitHub package version. +type PackageVersion struct { + ID *int64 `json:"id,omitempty"` + Version *string `json:"version,omitempty"` + Summary *string `json:"summary,omitempty"` + Body *string `json:"body,omitempty"` + BodyHTML *string `json:"body_html,omitempty"` + Release *PackageRelease `json:"release,omitempty"` + Manifest *string `json:"manifest,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + TargetOID *string `json:"target_oid,omitempty"` + Draft *bool `json:"draft,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + PackageFiles []*PackageFile `json:"package_files,omitempty"` + Author *User `json:"author,omitempty"` + InstallationCommand *string `json:"installation_command,omitempty"` + Metadata *PackageMetadata `json:"metadata,omitempty"` + PackageHTMLURL *string `json:"package_html_url,omitempty"` + Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` +} + +func (pv PackageVersion) String() string { + return Stringify(pv) +} + +// PackageRelease represents a GitHub package version release. +type PackageRelease struct { + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ID *int64 `json:"id,omitempty"` + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + Name *string `json:"name,omitempty"` + Draft *bool `json:"draft,omitempty"` + Author *User `json:"author,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PublishedAt *Timestamp `json:"published_at,omitempty"` +} + +func (r PackageRelease) String() string { + return Stringify(r) +} + +// PackageFile represents a GitHub package version release file. +type PackageFile struct { + DownloadURL *string `json:"download_url,omitempty"` + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + SHA256 *string `json:"sha256,omitempty"` + SHA1 *string `json:"sha1,omitempty"` + MD5 *string `json:"md5,omitempty"` + ContentType *string `json:"content_type,omitempty"` + State *string `json:"state,omitempty"` + Author *User `json:"author,omitempty"` + Size *int64 `json:"size,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +func (pf PackageFile) String() string { + return Stringify(pf) +} + +// PackageRegistry represents a GitHub package registry. +type PackageRegistry struct { + AboutURL *string `json:"about_url,omitempty"` + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` + URL *string `json:"url,omitempty"` + Vendor *string `json:"vendor,omitempty"` +} + +func (r PackageRegistry) String() string { + return Stringify(r) +} + +// PackageListOptions represents the optional list options for a package. +type PackageListOptions struct { + // Visibility of packages "public", "internal" or "private". + Visibility *string `url:"visibility,omitempty"` + + // PackageType represents the type of package. + // It can be one of "npm", "maven", "rubygems", "nuget", "docker", or "container". + PackageType *string `url:"package_type,omitempty"` + + // State of package either "active" or "deleted". + State *string `url:"state,omitempty"` + + ListOptions +} + +// PackageMetadata represents metadata from a package. +type PackageMetadata struct { + PackageType *string `json:"package_type,omitempty"` + Container *PackageContainerMetadata `json:"container,omitempty"` +} + +func (r PackageMetadata) String() string { + return Stringify(r) +} + +// PackageContainerMetadata represents container metadata for docker container packages. +type PackageContainerMetadata struct { + Tags []string `json:"tags,omitempty"` +} + +func (r PackageContainerMetadata) String() string { + return Stringify(r) +} diff --git a/vendor/github.com/google/go-github/v56/github/projects.go b/vendor/github.com/google/go-github/v56/github/projects.go new file mode 100644 index 000000000..df7ad6cd9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/projects.go @@ -0,0 +1,596 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ProjectsService provides access to the projects functions in the +// GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/projects +type ProjectsService service + +// Project represents a GitHub Project. +type Project struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ColumnsURL *string `json:"columns_url,omitempty"` + OwnerURL *string `json:"owner_url,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` + OrganizationPermission *string `json:"organization_permission,omitempty"` + Private *bool `json:"private,omitempty"` + + // The User object that generated the project. + Creator *User `json:"creator,omitempty"` +} + +func (p Project) String() string { + return Stringify(p) +} + +// GetProject gets a GitHub Project for a repo. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#get-a-project +func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, *Response, error) { + u := fmt.Sprintf("projects/%v", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + project := &Project{} + resp, err := s.client.Do(ctx, req, project) + if err != nil { + return nil, resp, err + } + + return project, resp, nil +} + +// ProjectOptions specifies the parameters to the +// RepositoriesService.CreateProject and +// ProjectsService.UpdateProject methods. +type ProjectOptions struct { + // The name of the project. (Required for creation; optional for update.) + Name *string `json:"name,omitempty"` + // The body of the project. (Optional.) + Body *string `json:"body,omitempty"` + + // The following field(s) are only applicable for update. + // They should be left with zero values for creation. + + // State of the project. Either "open" or "closed". (Optional.) + State *string `json:"state,omitempty"` + // The permission level that all members of the project's organization + // will have on this project. + // Setting the organization permission is only available + // for organization projects. (Optional.) + OrganizationPermission *string `json:"organization_permission,omitempty"` + // Sets visibility of the project within the organization. + // Setting visibility is only available + // for organization projects.(Optional.) + Private *bool `json:"private,omitempty"` +} + +// UpdateProject updates a repository project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#update-a-project +func (s *ProjectsService) UpdateProject(ctx context.Context, id int64, opts *ProjectOptions) (*Project, *Response, error) { + u := fmt.Sprintf("projects/%v", id) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + project := &Project{} + resp, err := s.client.Do(ctx, req, project) + if err != nil { + return nil, resp, err + } + + return project, resp, nil +} + +// DeleteProject deletes a GitHub Project from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#delete-a-project +func (s *ProjectsService) DeleteProject(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("projects/%v", id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ProjectColumn represents a column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/projects/ +type ProjectColumn struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` + ProjectURL *string `json:"project_url,omitempty"` + CardsURL *string `json:"cards_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +// ListProjectColumns lists the columns of a GitHub Project for a repo. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#list-project-columns +func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int64, opts *ListOptions) ([]*ProjectColumn, *Response, error) { + u := fmt.Sprintf("projects/%v/columns", projectID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + columns := []*ProjectColumn{} + resp, err := s.client.Do(ctx, req, &columns) + if err != nil { + return nil, resp, err + } + + return columns, resp, nil +} + +// GetProjectColumn gets a column of a GitHub Project for a repo. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#get-a-project-column +func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int64) (*ProjectColumn, *Response, error) { + u := fmt.Sprintf("projects/columns/%v", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + column := &ProjectColumn{} + resp, err := s.client.Do(ctx, req, column) + if err != nil { + return nil, resp, err + } + + return column, resp, nil +} + +// ProjectColumnOptions specifies the parameters to the +// ProjectsService.CreateProjectColumn and +// ProjectsService.UpdateProjectColumn methods. +type ProjectColumnOptions struct { + // The name of the project column. (Required for creation and update.) + Name string `json:"name"` +} + +// CreateProjectColumn creates a column for the specified (by number) project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#create-a-project-column +func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { + u := fmt.Sprintf("projects/%v/columns", projectID) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + column := &ProjectColumn{} + resp, err := s.client.Do(ctx, req, column) + if err != nil { + return nil, resp, err + } + + return column, resp, nil +} + +// UpdateProjectColumn updates a column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#update-an-existing-project-column +func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnOptions) (*ProjectColumn, *Response, error) { + u := fmt.Sprintf("projects/columns/%v", columnID) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + column := &ProjectColumn{} + resp, err := s.client.Do(ctx, req, column) + if err != nil { + return nil, resp, err + } + + return column, resp, nil +} + +// DeleteProjectColumn deletes a column from a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#delete-a-project-column +func (s *ProjectsService) DeleteProjectColumn(ctx context.Context, columnID int64) (*Response, error) { + u := fmt.Sprintf("projects/columns/%v", columnID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ProjectColumnMoveOptions specifies the parameters to the +// ProjectsService.MoveProjectColumn method. +type ProjectColumnMoveOptions struct { + // Position can be one of "first", "last", or "after:", where + // is the ID of a column in the same project. (Required.) + Position string `json:"position"` +} + +// MoveProjectColumn moves a column within a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/columns#move-a-project-column +func (s *ProjectsService) MoveProjectColumn(ctx context.Context, columnID int64, opts *ProjectColumnMoveOptions) (*Response, error) { + u := fmt.Sprintf("projects/columns/%v/moves", columnID) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ProjectCard represents a card in a column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards/#get-a-project-card +type ProjectCard struct { + URL *string `json:"url,omitempty"` + ColumnURL *string `json:"column_url,omitempty"` + ContentURL *string `json:"content_url,omitempty"` + ID *int64 `json:"id,omitempty"` + Note *string `json:"note,omitempty"` + Creator *User `json:"creator,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Archived *bool `json:"archived,omitempty"` + + // The following fields are only populated by Webhook events. + ColumnID *int64 `json:"column_id,omitempty"` + + // The following fields are only populated by Events API. + ProjectID *int64 `json:"project_id,omitempty"` + ProjectURL *string `json:"project_url,omitempty"` + ColumnName *string `json:"column_name,omitempty"` + PreviousColumnName *string `json:"previous_column_name,omitempty"` // Populated in "moved_columns_in_project" event deliveries. +} + +// ProjectCardListOptions specifies the optional parameters to the +// ProjectsService.ListProjectCards method. +type ProjectCardListOptions struct { + // ArchivedState is used to list all, archived, or not_archived project cards. + // Defaults to not_archived when you omit this parameter. + ArchivedState *string `url:"archived_state,omitempty"` + + ListOptions +} + +// ListProjectCards lists the cards in a column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#list-project-cards +func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int64, opts *ProjectCardListOptions) ([]*ProjectCard, *Response, error) { + u := fmt.Sprintf("projects/columns/%v/cards", columnID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + cards := []*ProjectCard{} + resp, err := s.client.Do(ctx, req, &cards) + if err != nil { + return nil, resp, err + } + + return cards, resp, nil +} + +// GetProjectCard gets a card in a column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#get-a-project-card +func (s *ProjectsService) GetProjectCard(ctx context.Context, cardID int64) (*ProjectCard, *Response, error) { + u := fmt.Sprintf("projects/columns/cards/%v", cardID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + card := &ProjectCard{} + resp, err := s.client.Do(ctx, req, card) + if err != nil { + return nil, resp, err + } + + return card, resp, nil +} + +// ProjectCardOptions specifies the parameters to the +// ProjectsService.CreateProjectCard and +// ProjectsService.UpdateProjectCard methods. +type ProjectCardOptions struct { + // The note of the card. Note and ContentID are mutually exclusive. + Note string `json:"note,omitempty"` + // The ID (not Number) of the Issue to associate with this card. + // Note and ContentID are mutually exclusive. + ContentID int64 `json:"content_id,omitempty"` + // The type of content to associate with this card. Possible values are: "Issue" and "PullRequest". + ContentType string `json:"content_type,omitempty"` + // Use true to archive a project card. + // Specify false if you need to restore a previously archived project card. + Archived *bool `json:"archived,omitempty"` +} + +// CreateProjectCard creates a card in the specified column of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#create-a-project-card +func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { + u := fmt.Sprintf("projects/columns/%v/cards", columnID) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + card := &ProjectCard{} + resp, err := s.client.Do(ctx, req, card) + if err != nil { + return nil, resp, err + } + + return card, resp, nil +} + +// UpdateProjectCard updates a card of a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#update-an-existing-project-card +func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int64, opts *ProjectCardOptions) (*ProjectCard, *Response, error) { + u := fmt.Sprintf("projects/columns/cards/%v", cardID) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + card := &ProjectCard{} + resp, err := s.client.Do(ctx, req, card) + if err != nil { + return nil, resp, err + } + + return card, resp, nil +} + +// DeleteProjectCard deletes a card from a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#delete-a-project-card +func (s *ProjectsService) DeleteProjectCard(ctx context.Context, cardID int64) (*Response, error) { + u := fmt.Sprintf("projects/columns/cards/%v", cardID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ProjectCardMoveOptions specifies the parameters to the +// ProjectsService.MoveProjectCard method. +type ProjectCardMoveOptions struct { + // Position can be one of "top", "bottom", or "after:", where + // is the ID of a card in the same project. + Position string `json:"position"` + // ColumnID is the ID of a column in the same project. Note that ColumnID + // is required when using Position "after:" when that card is in + // another column; otherwise it is optional. + ColumnID int64 `json:"column_id,omitempty"` +} + +// MoveProjectCard moves a card within a GitHub Project. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/cards#move-a-project-card +func (s *ProjectsService) MoveProjectCard(ctx context.Context, cardID int64, opts *ProjectCardMoveOptions) (*Response, error) { + u := fmt.Sprintf("projects/columns/cards/%v/moves", cardID) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ProjectCollaboratorOptions specifies the optional parameters to the +// ProjectsService.AddProjectCollaborator method. +type ProjectCollaboratorOptions struct { + // Permission specifies the permission to grant to the collaborator. + // Possible values are: + // "read" - can read, but not write to or administer this project. + // "write" - can read and write, but not administer this project. + // "admin" - can read, write and administer this project. + // + // Default value is "write" + Permission *string `json:"permission,omitempty"` +} + +// AddProjectCollaborator adds a collaborator to an organization project and sets +// their permission level. You must be an organization owner or a project admin to add a collaborator. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/collaborators#add-project-collaborator +func (s *ProjectsService) AddProjectCollaborator(ctx context.Context, id int64, username string, opts *ProjectCollaboratorOptions) (*Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// RemoveProjectCollaborator removes a collaborator from an organization project. +// You must be an organization owner or a project admin to remove a collaborator. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/collaborators#remove-user-as-a-collaborator +func (s *ProjectsService) RemoveProjectCollaborator(ctx context.Context, id int64, username string) (*Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v", id, username) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ListCollaboratorOptions specifies the optional parameters to the +// ProjectsService.ListProjectCollaborators method. +type ListCollaboratorOptions struct { + // Affiliation specifies how collaborators should be filtered by their affiliation. + // Possible values are: + // "outside" - All outside collaborators of an organization-owned repository + // "direct" - All collaborators with permissions to an organization-owned repository, + // regardless of organization membership status + // "all" - All collaborators the authenticated user can see + // + // Default value is "all". + Affiliation *string `url:"affiliation,omitempty"` + + ListOptions +} + +// ListProjectCollaborators lists the collaborators for an organization project. For a project, +// the list of collaborators includes outside collaborators, organization members that are direct +// collaborators, organization members with access through team memberships, organization members +// with access through default organization permissions, and organization owners. You must be an +// organization owner or a project admin to list collaborators. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/collaborators#list-project-collaborators +func (s *ProjectsService) ListProjectCollaborators(ctx context.Context, id int64, opts *ListCollaboratorOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("projects/%v/collaborators", id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// ProjectPermissionLevel represents the permission level an organization +// member has for a given project. +type ProjectPermissionLevel struct { + // Possible values: "admin", "write", "read", "none" + Permission *string `json:"permission,omitempty"` + + User *User `json:"user,omitempty"` +} + +// ReviewProjectCollaboratorPermission returns the collaborator's permission level for an organization +// project. Possible values for the permission key: "admin", "write", "read", "none". +// You must be an organization owner or a project admin to review a user's permission level. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/collaborators#get-project-permission-for-a-user +func (s *ProjectsService) ReviewProjectCollaboratorPermission(ctx context.Context, id int64, username string) (*ProjectPermissionLevel, *Response, error) { + u := fmt.Sprintf("projects/%v/collaborators/%v/permission", id, username) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + ppl := new(ProjectPermissionLevel) + resp, err := s.client.Do(ctx, req, ppl) + if err != nil { + return nil, resp, err + } + return ppl, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/pulls.go b/vendor/github.com/google/go-github/v56/github/pulls.go new file mode 100644 index 000000000..29549e24b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/pulls.go @@ -0,0 +1,486 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "fmt" +) + +// PullRequestsService handles communication with the pull request related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/ +type PullRequestsService service + +// PullRequestAutoMerge represents the "auto_merge" response for a PullRequest. +type PullRequestAutoMerge struct { + EnabledBy *User `json:"enabled_by,omitempty"` + MergeMethod *string `json:"merge_method,omitempty"` + CommitTitle *string `json:"commit_title,omitempty"` + CommitMessage *string `json:"commit_message,omitempty"` +} + +// PullRequest represents a GitHub pull request on a repository. +type PullRequest struct { + ID *int64 `json:"id,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + Locked *bool `json:"locked,omitempty"` + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + ClosedAt *Timestamp `json:"closed_at,omitempty"` + MergedAt *Timestamp `json:"merged_at,omitempty"` + Labels []*Label `json:"labels,omitempty"` + User *User `json:"user,omitempty"` + Draft *bool `json:"draft,omitempty"` + Merged *bool `json:"merged,omitempty"` + Mergeable *bool `json:"mergeable,omitempty"` + MergeableState *string `json:"mergeable_state,omitempty"` + MergedBy *User `json:"merged_by,omitempty"` + MergeCommitSHA *string `json:"merge_commit_sha,omitempty"` + Rebaseable *bool `json:"rebaseable,omitempty"` + Comments *int `json:"comments,omitempty"` + Commits *int `json:"commits,omitempty"` + Additions *int `json:"additions,omitempty"` + Deletions *int `json:"deletions,omitempty"` + ChangedFiles *int `json:"changed_files,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + IssueURL *string `json:"issue_url,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + DiffURL *string `json:"diff_url,omitempty"` + PatchURL *string `json:"patch_url,omitempty"` + CommitsURL *string `json:"commits_url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + ReviewCommentsURL *string `json:"review_comments_url,omitempty"` + ReviewCommentURL *string `json:"review_comment_url,omitempty"` + ReviewComments *int `json:"review_comments,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Assignees []*User `json:"assignees,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` + AuthorAssociation *string `json:"author_association,omitempty"` + NodeID *string `json:"node_id,omitempty"` + RequestedReviewers []*User `json:"requested_reviewers,omitempty"` + AutoMerge *PullRequestAutoMerge `json:"auto_merge,omitempty"` + + // RequestedTeams is populated as part of the PullRequestEvent. + // See, https://docs.github.com/en/developers/webhooks-and-events/github-event-types#pullrequestevent for an example. + RequestedTeams []*Team `json:"requested_teams,omitempty"` + + Links *PRLinks `json:"_links,omitempty"` + Head *PullRequestBranch `json:"head,omitempty"` + Base *PullRequestBranch `json:"base,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the pull request. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` +} + +func (p PullRequest) String() string { + return Stringify(p) +} + +// PRLink represents a single link object from GitHub pull request _links. +type PRLink struct { + HRef *string `json:"href,omitempty"` +} + +// PRLinks represents the "_links" object in a GitHub pull request. +type PRLinks struct { + Self *PRLink `json:"self,omitempty"` + HTML *PRLink `json:"html,omitempty"` + Issue *PRLink `json:"issue,omitempty"` + Comments *PRLink `json:"comments,omitempty"` + ReviewComments *PRLink `json:"review_comments,omitempty"` + ReviewComment *PRLink `json:"review_comment,omitempty"` + Commits *PRLink `json:"commits,omitempty"` + Statuses *PRLink `json:"statuses,omitempty"` +} + +// PullRequestBranch represents a base or head branch in a GitHub pull request. +type PullRequestBranch struct { + Label *string `json:"label,omitempty"` + Ref *string `json:"ref,omitempty"` + SHA *string `json:"sha,omitempty"` + Repo *Repository `json:"repo,omitempty"` + User *User `json:"user,omitempty"` +} + +// PullRequestListOptions specifies the optional parameters to the +// PullRequestsService.List method. +type PullRequestListOptions struct { + // State filters pull requests based on their state. Possible values are: + // open, closed, all. Default is "open". + State string `url:"state,omitempty"` + + // Head filters pull requests by head user and branch name in the format of: + // "user:ref-name". + Head string `url:"head,omitempty"` + + // Base filters pull requests by base branch name. + Base string `url:"base,omitempty"` + + // Sort specifies how to sort pull requests. Possible values are: created, + // updated, popularity, long-running. Default is "created". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort pull requests. Possible values are: asc, desc. + // If Sort is "created" or not specified, Default is "desc", otherwise Default + // is "asc" + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// List the pull requests for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#list-pull-requests +func (s *PullRequestsService) List(ctx context.Context, owner string, repo string, opts *PullRequestListOptions) ([]*PullRequest, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pulls []*PullRequest + resp, err := s.client.Do(ctx, req, &pulls) + if err != nil { + return nil, resp, err + } + + return pulls, resp, nil +} + +// ListPullRequestsWithCommit returns pull requests associated with a commit SHA. +// +// The results may include open and closed pull requests. +// By default, the PullRequestListOptions State filters for "open". +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#list-pull-requests-associated-with-a-commit +func (s *PullRequestsService) ListPullRequestsWithCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*PullRequest, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/pulls", owner, repo, sha) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) + var pulls []*PullRequest + resp, err := s.client.Do(ctx, req, &pulls) + if err != nil { + return nil, resp, err + } + + return pulls, resp, nil +} + +// Get a single pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request +func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string, number int) (*PullRequest, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + pull := new(PullRequest) + resp, err := s.client.Do(ctx, req, pull) + if err != nil { + return nil, resp, err + } + + return pull, resp, nil +} + +// GetRaw gets a single pull request in raw (diff or patch) format. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request +func (s *PullRequestsService) GetRaw(ctx context.Context, owner string, repo string, number int, opts RawOptions) (string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + + switch opts.Type { + case Diff: + req.Header.Set("Accept", mediaTypeV3Diff) + case Patch: + req.Header.Set("Accept", mediaTypeV3Patch) + default: + return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) + } + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// NewPullRequest represents a new pull request to be created. +type NewPullRequest struct { + Title *string `json:"title,omitempty"` + Head *string `json:"head,omitempty"` + HeadRepo *string `json:"head_repo,omitempty"` + Base *string `json:"base,omitempty"` + Body *string `json:"body,omitempty"` + Issue *int `json:"issue,omitempty"` + MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` + Draft *bool `json:"draft,omitempty"` +} + +// Create a new pull request on the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#create-a-pull-request +func (s *PullRequestsService) Create(ctx context.Context, owner string, repo string, pull *NewPullRequest) (*PullRequest, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls", owner, repo) + req, err := s.client.NewRequest("POST", u, pull) + if err != nil { + return nil, nil, err + } + + p := new(PullRequest) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// PullRequestBranchUpdateOptions specifies the optional parameters to the +// PullRequestsService.UpdateBranch method. +type PullRequestBranchUpdateOptions struct { + // ExpectedHeadSHA specifies the most recent commit on the pull request's branch. + // Default value is the SHA of the pull request's current HEAD ref. + ExpectedHeadSHA *string `json:"expected_head_sha,omitempty"` +} + +// PullRequestBranchUpdateResponse specifies the response of pull request branch update. +type PullRequestBranchUpdateResponse struct { + Message *string `json:"message,omitempty"` + URL *string `json:"url,omitempty"` +} + +// UpdateBranch updates the pull request branch with latest upstream changes. +// +// This method might return an AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it has now scheduled the update of the pull request branch in a background task. +// A follow up request, after a delay of a second or so, should result +// in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#update-a-pull-request-branch +func (s *PullRequestsService) UpdateBranch(ctx context.Context, owner, repo string, number int, opts *PullRequestBranchUpdateOptions) (*PullRequestBranchUpdateResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/update-branch", owner, repo, number) + + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeUpdatePullRequestBranchPreview) + + p := new(PullRequestBranchUpdateResponse) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +type pullRequestUpdate struct { + Title *string `json:"title,omitempty"` + Body *string `json:"body,omitempty"` + State *string `json:"state,omitempty"` + Base *string `json:"base,omitempty"` + MaintainerCanModify *bool `json:"maintainer_can_modify,omitempty"` +} + +// Edit a pull request. +// pull must not be nil. +// +// The following fields are editable: Title, Body, State, Base.Ref and MaintainerCanModify. +// Base.Ref updates the base branch of the pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#update-a-pull-request +func (s *PullRequestsService) Edit(ctx context.Context, owner string, repo string, number int, pull *PullRequest) (*PullRequest, *Response, error) { + if pull == nil { + return nil, nil, fmt.Errorf("pull must be provided") + } + + u := fmt.Sprintf("repos/%v/%v/pulls/%d", owner, repo, number) + + update := &pullRequestUpdate{ + Title: pull.Title, + Body: pull.Body, + State: pull.State, + MaintainerCanModify: pull.MaintainerCanModify, + } + // avoid updating the base branch when closing the Pull Request + // - otherwise the GitHub API server returns a "Validation Failed" error: + // "Cannot change base branch of closed pull request". + if pull.Base != nil && pull.GetState() != "closed" { + update.Base = pull.Base.Ref + } + + req, err := s.client.NewRequest("PATCH", u, update) + if err != nil { + return nil, nil, err + } + + p := new(PullRequest) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// ListCommits lists the commits in a pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#list-commits-on-a-pull-request +func (s *PullRequestsService) ListCommits(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*RepositoryCommit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/commits", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var commits []*RepositoryCommit + resp, err := s.client.Do(ctx, req, &commits) + if err != nil { + return nil, resp, err + } + + return commits, resp, nil +} + +// ListFiles lists the files in a pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#list-pull-requests-files +func (s *PullRequestsService) ListFiles(ctx context.Context, owner string, repo string, number int, opts *ListOptions) ([]*CommitFile, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/files", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var commitFiles []*CommitFile + resp, err := s.client.Do(ctx, req, &commitFiles) + if err != nil { + return nil, resp, err + } + + return commitFiles, resp, nil +} + +// IsMerged checks if a pull request has been merged. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#check-if-a-pull-request-has-been-merged +func (s *PullRequestsService) IsMerged(ctx context.Context, owner string, repo string, number int) (bool, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + merged, err := parseBoolResponse(err) + return merged, resp, err +} + +// PullRequestMergeResult represents the result of merging a pull request. +type PullRequestMergeResult struct { + SHA *string `json:"sha,omitempty"` + Merged *bool `json:"merged,omitempty"` + Message *string `json:"message,omitempty"` +} + +// PullRequestOptions lets you define how a pull request will be merged. +type PullRequestOptions struct { + CommitTitle string // Title for the automatic commit message. (Optional.) + SHA string // SHA that pull request head must match to allow merge. (Optional.) + + // The merge method to use. Possible values include: "merge", "squash", and "rebase" with the default being merge. (Optional.) + MergeMethod string + + // If false, an empty string commit message will use the default commit message. If true, an empty string commit message will be used. + DontDefaultIfBlank bool +} + +type pullRequestMergeRequest struct { + CommitMessage *string `json:"commit_message,omitempty"` + CommitTitle string `json:"commit_title,omitempty"` + MergeMethod string `json:"merge_method,omitempty"` + SHA string `json:"sha,omitempty"` +} + +// Merge a pull request. +// commitMessage is an extra detail to append to automatic commit message. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/pulls#merge-a-pull-request +func (s *PullRequestsService) Merge(ctx context.Context, owner string, repo string, number int, commitMessage string, options *PullRequestOptions) (*PullRequestMergeResult, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/merge", owner, repo, number) + + pullRequestBody := &pullRequestMergeRequest{} + if commitMessage != "" { + pullRequestBody.CommitMessage = &commitMessage + } + if options != nil { + pullRequestBody.CommitTitle = options.CommitTitle + pullRequestBody.MergeMethod = options.MergeMethod + pullRequestBody.SHA = options.SHA + if options.DontDefaultIfBlank && commitMessage == "" { + pullRequestBody.CommitMessage = &commitMessage + } + } + req, err := s.client.NewRequest("PUT", u, pullRequestBody) + if err != nil { + return nil, nil, err + } + + mergeResult := new(PullRequestMergeResult) + resp, err := s.client.Do(ctx, req, mergeResult) + if err != nil { + return nil, resp, err + } + + return mergeResult, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/pulls_comments.go b/vendor/github.com/google/go-github/v56/github/pulls_comments.go new file mode 100644 index 000000000..ea1cf45d1 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/pulls_comments.go @@ -0,0 +1,204 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strings" + "time" +) + +// PullRequestComment represents a comment left on a pull request. +type PullRequestComment struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + InReplyTo *int64 `json:"in_reply_to_id,omitempty"` + Body *string `json:"body,omitempty"` + Path *string `json:"path,omitempty"` + DiffHunk *string `json:"diff_hunk,omitempty"` + PullRequestReviewID *int64 `json:"pull_request_review_id,omitempty"` + Position *int `json:"position,omitempty"` + OriginalPosition *int `json:"original_position,omitempty"` + StartLine *int `json:"start_line,omitempty"` + Line *int `json:"line,omitempty"` + OriginalLine *int `json:"original_line,omitempty"` + OriginalStartLine *int `json:"original_start_line,omitempty"` + Side *string `json:"side,omitempty"` + StartSide *string `json:"start_side,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + OriginalCommitID *string `json:"original_commit_id,omitempty"` + User *User `json:"user,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + // AuthorAssociation is the comment author's relationship to the pull request's repository. + // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". + AuthorAssociation *string `json:"author_association,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + PullRequestURL *string `json:"pull_request_url,omitempty"` + // Can be one of: LINE, FILE from https://docs.github.com/en/rest/pulls/comments?apiVersion=2022-11-28#create-a-review-comment-for-a-pull-request + SubjectType *string `json:"subject_type,omitempty"` +} + +func (p PullRequestComment) String() string { + return Stringify(p) +} + +// PullRequestListCommentsOptions specifies the optional parameters to the +// PullRequestsService.ListComments method. +type PullRequestListCommentsOptions struct { + // Sort specifies how to sort comments. Possible values are: created, updated. + Sort string `url:"sort,omitempty"` + + // Direction in which to sort comments. Possible values are: asc, desc. + Direction string `url:"direction,omitempty"` + + // Since filters comments by time. + Since time.Time `url:"since,omitempty"` + + ListOptions +} + +// ListComments lists all comments on the specified pull request. Specifying a +// pull request number of 0 will return all comments on all pull requests for +// the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#list-review-comments-on-a-pull-request +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#list-review-comments-in-a-repository +func (s *PullRequestsService) ListComments(ctx context.Context, owner, repo string, number int, opts *PullRequestListCommentsOptions) ([]*PullRequestComment, *Response, error) { + var u string + if number == 0 { + u = fmt.Sprintf("repos/%v/%v/pulls/comments", owner, repo) + } else { + u = fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var comments []*PullRequestComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// GetComment fetches the specified pull request comment. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#get-a-review-comment-for-a-pull-request +func (s *PullRequestsService) GetComment(ctx context.Context, owner, repo string, commentID int64) (*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + comment := new(PullRequestComment) + resp, err := s.client.Do(ctx, req, comment) + if err != nil { + return nil, resp, err + } + + return comment, resp, nil +} + +// CreateComment creates a new comment on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#create-a-review-comment-for-a-pull-request +func (s *PullRequestsService) CreateComment(ctx context.Context, owner, repo string, number int, comment *PullRequestComment) (*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + // TODO: remove custom Accept headers when their respective API fully launches. + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeMultiLineCommentsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + c := new(PullRequestComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// CreateCommentInReplyTo creates a new comment as a reply to an existing pull request comment. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#create-a-review-comment-for-a-pull-request +func (s *PullRequestsService) CreateCommentInReplyTo(ctx context.Context, owner, repo string, number int, body string, commentID int64) (*PullRequestComment, *Response, error) { + comment := &struct { + Body string `json:"body,omitempty"` + InReplyTo int64 `json:"in_reply_to,omitempty"` + }{ + Body: body, + InReplyTo: commentID, + } + u := fmt.Sprintf("repos/%v/%v/pulls/%d/comments", owner, repo, number) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(PullRequestComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// EditComment updates a pull request comment. +// A non-nil comment.Body must be provided. Other comment fields should be left nil. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#update-a-review-comment-for-a-pull-request +func (s *PullRequestsService) EditComment(ctx context.Context, owner, repo string, commentID int64, comment *PullRequestComment) (*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(PullRequestComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// DeleteComment deletes a pull request comment. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/comments#delete-a-review-comment-for-a-pull-request +func (s *PullRequestsService) DeleteComment(ctx context.Context, owner, repo string, commentID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%d", owner, repo, commentID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/pulls_reviewers.go b/vendor/github.com/google/go-github/v56/github/pulls_reviewers.go new file mode 100644 index 000000000..1c336540b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/pulls_reviewers.go @@ -0,0 +1,80 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ReviewersRequest specifies users and teams for a pull request review request. +type ReviewersRequest struct { + NodeID *string `json:"node_id,omitempty"` + Reviewers []string `json:"reviewers,omitempty"` + TeamReviewers []string `json:"team_reviewers,omitempty"` +} + +// Reviewers represents reviewers of a pull request. +type Reviewers struct { + Users []*User `json:"users,omitempty"` + Teams []*Team `json:"teams,omitempty"` +} + +// RequestReviewers creates a review request for the provided reviewers for the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/review-requests#request-reviewers-for-a-pull-request +func (s *PullRequestsService) RequestReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*PullRequest, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) + req, err := s.client.NewRequest("POST", u, &reviewers) + if err != nil { + return nil, nil, err + } + + r := new(PullRequest) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// ListReviewers lists reviewers whose reviews have been requested on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/review-requests#list-requested-reviewers-for-a-pull-request +func (s *PullRequestsService) ListReviewers(ctx context.Context, owner, repo string, number int, opts *ListOptions) (*Reviewers, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/requested_reviewers", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + reviewers := new(Reviewers) + resp, err := s.client.Do(ctx, req, reviewers) + if err != nil { + return nil, resp, err + } + + return reviewers, resp, nil +} + +// RemoveReviewers removes the review request for the provided reviewers for the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/review-requests#remove-requested-reviewers-from-a-pull-request +func (s *PullRequestsService) RemoveReviewers(ctx context.Context, owner, repo string, number int, reviewers ReviewersRequest) (*Response, error) { + u := fmt.Sprintf("repos/%s/%s/pulls/%d/requested_reviewers", owner, repo, number) + req, err := s.client.NewRequest("DELETE", u, &reviewers) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/pulls_reviews.go b/vendor/github.com/google/go-github/v56/github/pulls_reviews.go new file mode 100644 index 000000000..addcce468 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/pulls_reviews.go @@ -0,0 +1,313 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "errors" + "fmt" +) + +var ErrMixedCommentStyles = errors.New("cannot use both position and side/line form comments") + +// PullRequestReview represents a review of a pull request. +type PullRequestReview struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + User *User `json:"user,omitempty"` + Body *string `json:"body,omitempty"` + SubmittedAt *Timestamp `json:"submitted_at,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + PullRequestURL *string `json:"pull_request_url,omitempty"` + State *string `json:"state,omitempty"` + // AuthorAssociation is the comment author's relationship to the issue's repository. + // Possible values are "COLLABORATOR", "CONTRIBUTOR", "FIRST_TIMER", "FIRST_TIME_CONTRIBUTOR", "MEMBER", "OWNER", or "NONE". + AuthorAssociation *string `json:"author_association,omitempty"` +} + +func (p PullRequestReview) String() string { + return Stringify(p) +} + +// DraftReviewComment represents a comment part of the review. +type DraftReviewComment struct { + Path *string `json:"path,omitempty"` + Position *int `json:"position,omitempty"` + Body *string `json:"body,omitempty"` + + // The new comfort-fade-preview fields + StartSide *string `json:"start_side,omitempty"` + Side *string `json:"side,omitempty"` + StartLine *int `json:"start_line,omitempty"` + Line *int `json:"line,omitempty"` +} + +func (c DraftReviewComment) String() string { + return Stringify(c) +} + +// PullRequestReviewRequest represents a request to create a review. +type PullRequestReviewRequest struct { + NodeID *string `json:"node_id,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + Body *string `json:"body,omitempty"` + Event *string `json:"event,omitempty"` + Comments []*DraftReviewComment `json:"comments,omitempty"` +} + +func (r PullRequestReviewRequest) String() string { + return Stringify(r) +} + +func (r *PullRequestReviewRequest) isComfortFadePreview() (bool, error) { + var isCF *bool + for _, comment := range r.Comments { + if comment == nil { + continue + } + hasPos := comment.Position != nil + hasComfortFade := (comment.StartSide != nil) || (comment.Side != nil) || + (comment.StartLine != nil) || (comment.Line != nil) + + switch { + case hasPos && hasComfortFade: + return false, ErrMixedCommentStyles + case hasPos && isCF != nil && *isCF: + return false, ErrMixedCommentStyles + case hasComfortFade && isCF != nil && !*isCF: + return false, ErrMixedCommentStyles + } + isCF = &hasComfortFade + } + if isCF != nil { + return *isCF, nil + } + return false, nil +} + +// PullRequestReviewDismissalRequest represents a request to dismiss a review. +type PullRequestReviewDismissalRequest struct { + Message *string `json:"message,omitempty"` +} + +func (r PullRequestReviewDismissalRequest) String() string { + return Stringify(r) +} + +// ListReviews lists all reviews on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#list-reviews-for-a-pull-request +func (s *PullRequestsService) ListReviews(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var reviews []*PullRequestReview + resp, err := s.client.Do(ctx, req, &reviews) + if err != nil { + return nil, resp, err + } + + return reviews, resp, nil +} + +// GetReview fetches the specified pull request review. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#get-a-review-for-a-pull-request +func (s *PullRequestsService) GetReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + review := new(PullRequestReview) + resp, err := s.client.Do(ctx, req, review) + if err != nil { + return nil, resp, err + } + + return review, resp, nil +} + +// DeletePendingReview deletes the specified pull request pending review. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#delete-a-pending-review-for-a-pull-request +func (s *PullRequestsService) DeletePendingReview(ctx context.Context, owner, repo string, number int, reviewID int64) (*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, nil, err + } + + review := new(PullRequestReview) + resp, err := s.client.Do(ctx, req, review) + if err != nil { + return nil, resp, err + } + + return review, resp, nil +} + +// ListReviewComments lists all the comments for the specified review. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#list-comments-for-a-pull-request-review +func (s *PullRequestsService) ListReviewComments(ctx context.Context, owner, repo string, number int, reviewID int64, opts *ListOptions) ([]*PullRequestComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/comments", owner, repo, number, reviewID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var comments []*PullRequestComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// CreateReview creates a new review on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#create-a-review-for-a-pull-request +// +// In order to use multi-line comments, you must use the "comfort fade" preview. +// This replaces the use of the "Position" field in comments with 4 new fields: +// +// [Start]Side, and [Start]Line. +// +// These new fields must be used for ALL comments (including single-line), +// with the following restrictions (empirically observed, so subject to change). +// +// For single-line "comfort fade" comments, you must use: +// +// Path: &path, // as before +// Body: &body, // as before +// Side: &"RIGHT" (or "LEFT") +// Line: &123, // NOT THE SAME AS POSITION, this is an actual line number. +// +// If StartSide or StartLine is used with single-line comments, a 422 is returned. +// +// For multi-line "comfort fade" comments, you must use: +// +// Path: &path, // as before +// Body: &body, // as before +// StartSide: &"RIGHT" (or "LEFT") +// Side: &"RIGHT" (or "LEFT") +// StartLine: &120, +// Line: &125, +// +// Suggested edits are made by commenting on the lines to replace, and including the +// suggested edit in a block like this (it may be surrounded in non-suggestion markdown): +// +// ```suggestion +// Use this instead. +// It is waaaaaay better. +// ``` +func (s *PullRequestsService) CreateReview(ctx context.Context, owner, repo string, number int, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews", owner, repo, number) + + req, err := s.client.NewRequest("POST", u, review) + if err != nil { + return nil, nil, err + } + + // Detect which style of review comment is being used. + if isCF, err := review.isComfortFadePreview(); err != nil { + return nil, nil, err + } else if isCF { + // If the review comments are using the comfort fade preview fields, + // then pass the comfort fade header. + req.Header.Set("Accept", mediaTypeMultiLineCommentsPreview) + } + + r := new(PullRequestReview) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// UpdateReview updates the review summary on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#update-a-review-for-a-pull-request +func (s *PullRequestsService) UpdateReview(ctx context.Context, owner, repo string, number int, reviewID int64, body string) (*PullRequestReview, *Response, error) { + opts := &struct { + Body string `json:"body"` + }{Body: body} + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d", owner, repo, number, reviewID) + + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + review := &PullRequestReview{} + resp, err := s.client.Do(ctx, req, review) + if err != nil { + return nil, resp, err + } + + return review, resp, nil +} + +// SubmitReview submits a specified review on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#submit-a-review-for-a-pull-request +func (s *PullRequestsService) SubmitReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewRequest) (*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/events", owner, repo, number, reviewID) + + req, err := s.client.NewRequest("POST", u, review) + if err != nil { + return nil, nil, err + } + + r := new(PullRequestReview) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// DismissReview dismisses a specified review on the specified pull request. +// +// GitHub API docs: https://docs.github.com/en/rest/pulls/reviews#dismiss-a-review-for-a-pull-request +func (s *PullRequestsService) DismissReview(ctx context.Context, owner, repo string, number int, reviewID int64, review *PullRequestReviewDismissalRequest) (*PullRequestReview, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/%d/reviews/%d/dismissals", owner, repo, number, reviewID) + + req, err := s.client.NewRequest("PUT", u, review) + if err != nil { + return nil, nil, err + } + + r := new(PullRequestReview) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/pulls_threads.go b/vendor/github.com/google/go-github/v56/github/pulls_threads.go new file mode 100644 index 000000000..23e924d88 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/pulls_threads.go @@ -0,0 +1,17 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +// PullRequestThread represents a thread of comments on a pull request. +type PullRequestThread struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Comments []*PullRequestComment `json:"comments,omitempty"` +} + +func (p PullRequestThread) String() string { + return Stringify(p) +} diff --git a/vendor/github.com/google/go-github/v56/github/reactions.go b/vendor/github.com/google/go-github/v56/github/reactions.go new file mode 100644 index 000000000..14d193ae8 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/reactions.go @@ -0,0 +1,520 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" +) + +// ReactionsService provides access to the reactions-related functions in the +// GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions +type ReactionsService service + +// Reaction represents a GitHub reaction. +type Reaction struct { + // ID is the Reaction ID. + ID *int64 `json:"id,omitempty"` + User *User `json:"user,omitempty"` + NodeID *string `json:"node_id,omitempty"` + // Content is the type of reaction. + // Possible values are: + // "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". + Content *string `json:"content,omitempty"` +} + +// Reactions represents a summary of GitHub reactions. +type Reactions struct { + TotalCount *int `json:"total_count,omitempty"` + PlusOne *int `json:"+1,omitempty"` + MinusOne *int `json:"-1,omitempty"` + Laugh *int `json:"laugh,omitempty"` + Confused *int `json:"confused,omitempty"` + Heart *int `json:"heart,omitempty"` + Hooray *int `json:"hooray,omitempty"` + Rocket *int `json:"rocket,omitempty"` + Eyes *int `json:"eyes,omitempty"` + URL *string `json:"url,omitempty"` +} + +func (r Reaction) String() string { + return Stringify(r) +} + +// ListCommentReactionOptions specifies the optional parameters to the +// ReactionsService.ListCommentReactions method. +type ListCommentReactionOptions struct { + // Content restricts the returned comment reactions to only those with the given type. + // Omit this parameter to list all reactions to a commit comment. + // Possible values are: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". + Content string `url:"content,omitempty"` + + ListOptions +} + +// ListCommentReactions lists the reactions for a commit comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-a-commit-comment +func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListCommentReactionOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreateCommentReaction creates a reaction for a commit comment. +// Note that if you have already created a reaction of type content, the +// previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-commit-comment +func (s *ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions", owner, repo, id) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteCommentReaction deletes the reaction for a commit comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-a-commit-comment-reaction +func (s *ReactionsService) DeleteCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v/reactions/%v", owner, repo, commentID, reactionID) + + return s.deleteReaction(ctx, u) +} + +// DeleteCommentReactionByID deletes the reaction for a commit comment by repository ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-a-commit-comment-reaction +func (s *ReactionsService) DeleteCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { + u := fmt.Sprintf("repositories/%v/comments/%v/reactions/%v", repoID, commentID, reactionID) + + return s.deleteReaction(ctx, u) +} + +// ListIssueReactions lists the reactions for an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-an-issue +func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo string, number int, opts *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreateIssueReaction creates a reaction for an issue. +// Note that if you have already created a reaction of type content, the +// previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-an-issue +func (s *ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo string, number int, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/%v/reactions", owner, repo, number) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteIssueReaction deletes the reaction to an issue. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-an-issue-reaction +func (s *ReactionsService) DeleteIssueReaction(ctx context.Context, owner, repo string, issueNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/issues/%v/reactions/%v", owner, repo, issueNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +// DeleteIssueReactionByID deletes the reaction to an issue by repository ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-an-issue-reaction +func (s *ReactionsService) DeleteIssueReactionByID(ctx context.Context, repoID, issueNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repositories/%v/issues/%v/reactions/%v", repoID, issueNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +// ListIssueCommentReactions lists the reactions for an issue comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-an-issue-comment +func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreateIssueCommentReaction creates a reaction for an issue comment. +// Note that if you have already created a reaction of type content, the +// previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-an-issue-comment +func (s *ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions", owner, repo, id) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteIssueCommentReaction deletes the reaction to an issue comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-an-issue-comment-reaction +func (s *ReactionsService) DeleteIssueCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/issues/comments/%v/reactions/%v", owner, repo, commentID, reactionID) + + return s.deleteReaction(ctx, url) +} + +// DeleteIssueCommentReactionByID deletes the reaction to an issue comment by repository ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-an-issue-comment-reaction +func (s *ReactionsService) DeleteIssueCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repositories/%v/issues/comments/%v/reactions/%v", repoID, commentID, reactionID) + + return s.deleteReaction(ctx, url) +} + +// ListPullRequestCommentReactions lists the reactions for a pull request review comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-a-pull-request-review-comment +func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreatePullRequestCommentReaction creates a reaction for a pull request review comment. +// Note that if you have already created a reaction of type content, the +// previously created reaction will be returned with Status: 200 OK. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-pull-request-review-comment +func (s *ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, owner, repo string, id int64, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions", owner, repo, id) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeletePullRequestCommentReaction deletes the reaction to a pull request review comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-a-pull-request-comment-reaction +func (s *ReactionsService) DeletePullRequestCommentReaction(ctx context.Context, owner, repo string, commentID, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/pulls/comments/%v/reactions/%v", owner, repo, commentID, reactionID) + + return s.deleteReaction(ctx, url) +} + +// DeletePullRequestCommentReactionByID deletes the reaction to a pull request review comment by repository ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-a-pull-request-comment-reaction +func (s *ReactionsService) DeletePullRequestCommentReactionByID(ctx context.Context, repoID, commentID, reactionID int64) (*Response, error) { + url := fmt.Sprintf("repositories/%v/pulls/comments/%v/reactions/%v", repoID, commentID, reactionID) + + return s.deleteReaction(ctx, url) +} + +// ListTeamDiscussionReactions lists the reactions for a team discussion. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-a-team-discussion-legacy +func (s *ReactionsService) ListTeamDiscussionReactions(ctx context.Context, teamID int64, discussionNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// CreateTeamDiscussionReaction creates a reaction for a team discussion. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-team-discussion-legacy +func (s *ReactionsService) CreateTeamDiscussionReaction(ctx context.Context, teamID int64, discussionNumber int, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/reactions", teamID, discussionNumber) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteTeamDiscussionReaction deletes the reaction to a team discussion. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-team-discussion-reaction +func (s *ReactionsService) DeleteTeamDiscussionReaction(ctx context.Context, org, teamSlug string, discussionNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/reactions/%v", org, teamSlug, discussionNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +// DeleteTeamDiscussionReactionByOrgIDAndTeamID deletes the reaction to a team discussion by organization ID and team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-team-discussion +func (s *ReactionsService) DeleteTeamDiscussionReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/reactions/%v", orgID, teamID, discussionNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +// ListTeamDiscussionCommentReactions lists the reactions for a team discussion comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#list-reactions-for-a-team-discussion-comment-legacy +func (s *ReactionsService) ListTeamDiscussionCommentReactions(ctx context.Context, teamID int64, discussionNumber, commentNumber int, opts *ListOptions) ([]*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var m []*Reaction + resp, err := s.client.Do(ctx, req, &m) + if err != nil { + return nil, resp, err + } + return m, resp, nil +} + +// CreateTeamDiscussionCommentReaction creates a reaction for a team discussion comment. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-team-discussion-comment-legacy +func (s *ReactionsService) CreateTeamDiscussionCommentReaction(ctx context.Context, teamID int64, discussionNumber, commentNumber int, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("teams/%v/discussions/%v/comments/%v/reactions", teamID, discussionNumber, commentNumber) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + +// DeleteTeamDiscussionCommentReaction deletes the reaction to a team discussion comment. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#delete-team-discussion-comment-reaction +func (s *ReactionsService) DeleteTeamDiscussionCommentReaction(ctx context.Context, org, teamSlug string, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v/reactions/%v", org, teamSlug, discussionNumber, commentNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +// DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID deletes the reaction to a team discussion comment by organization ID and team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-team-discussion-comment +func (s *ReactionsService) DeleteTeamDiscussionCommentReactionByOrgIDAndTeamID(ctx context.Context, orgID, teamID, discussionNumber, commentNumber int, reactionID int64) (*Response, error) { + url := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v/reactions/%v", orgID, teamID, discussionNumber, commentNumber, reactionID) + + return s.deleteReaction(ctx, url) +} + +func (s *ReactionsService) deleteReaction(ctx context.Context, url string) (*Response, error) { + req, err := s.client.NewRequest(http.MethodDelete, url, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + return s.client.Do(ctx, req, nil) +} + +// Create a reaction to a release. +// Note that a response with a Status: 200 OK means that you already +// added the reaction type to this release. +// The content should have one of the following values: "+1", "-1", "laugh", "confused", "heart", "hooray", "rocket", or "eyes". +// +// GitHub API docs: https://docs.github.com/en/rest/reactions#create-reaction-for-a-release +func (s *ReactionsService) CreateReleaseReaction(ctx context.Context, owner, repo string, releaseID int64, content string) (*Reaction, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/releases/%v/reactions", owner, repo, releaseID) + + body := &Reaction{Content: String(content)} + req, err := s.client.NewRequest("POST", u, body) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeReactionsPreview) + + m := &Reaction{} + resp, err := s.client.Do(ctx, req, m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos.go b/vendor/github.com/google/go-github/v56/github/repos.go new file mode 100644 index 000000000..f2059926e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos.go @@ -0,0 +1,2174 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "net/url" + "strings" +) + +const githubBranchNotProtected string = "Branch not protected" + +var ErrBranchNotProtected = errors.New("branch is not protected") + +// RepositoriesService handles communication with the repository related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/ +type RepositoriesService service + +// Repository represents a GitHub repository. +type Repository struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Owner *User `json:"owner,omitempty"` + Name *string `json:"name,omitempty"` + FullName *string `json:"full_name,omitempty"` + Description *string `json:"description,omitempty"` + Homepage *string `json:"homepage,omitempty"` + CodeOfConduct *CodeOfConduct `json:"code_of_conduct,omitempty"` + DefaultBranch *string `json:"default_branch,omitempty"` + MasterBranch *string `json:"master_branch,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PushedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CloneURL *string `json:"clone_url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + MirrorURL *string `json:"mirror_url,omitempty"` + SSHURL *string `json:"ssh_url,omitempty"` + SVNURL *string `json:"svn_url,omitempty"` + Language *string `json:"language,omitempty"` + Fork *bool `json:"fork,omitempty"` + ForksCount *int `json:"forks_count,omitempty"` + NetworkCount *int `json:"network_count,omitempty"` + OpenIssuesCount *int `json:"open_issues_count,omitempty"` + OpenIssues *int `json:"open_issues,omitempty"` // Deprecated: Replaced by OpenIssuesCount. For backward compatibility OpenIssues is still populated. + StargazersCount *int `json:"stargazers_count,omitempty"` + SubscribersCount *int `json:"subscribers_count,omitempty"` + WatchersCount *int `json:"watchers_count,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility WatchersCount is still populated. + Watchers *int `json:"watchers,omitempty"` // Deprecated: Replaced by StargazersCount. For backward compatibility Watchers is still populated. + Size *int `json:"size,omitempty"` + AutoInit *bool `json:"auto_init,omitempty"` + Parent *Repository `json:"parent,omitempty"` + Source *Repository `json:"source,omitempty"` + TemplateRepository *Repository `json:"template_repository,omitempty"` + Organization *Organization `json:"organization,omitempty"` + Permissions map[string]bool `json:"permissions,omitempty"` + AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` + AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` + AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` + AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` + AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` + AllowForking *bool `json:"allow_forking,omitempty"` + WebCommitSignoffRequired *bool `json:"web_commit_signoff_required,omitempty"` + DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` + UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` + SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "COMMIT_OR_PR_TITLE" + SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "COMMIT_MESSAGES", "BLANK" + MergeCommitTitle *string `json:"merge_commit_title,omitempty"` // Can be one of: "PR_TITLE", "MERGE_MESSAGE" + MergeCommitMessage *string `json:"merge_commit_message,omitempty"` // Can be one of: "PR_BODY", "PR_TITLE", "BLANK" + Topics []string `json:"topics,omitempty"` + Archived *bool `json:"archived,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + + // Only provided when using RepositoriesService.Get while in preview + License *License `json:"license,omitempty"` + + // Additional mutable fields when creating and editing a repository + Private *bool `json:"private,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasPages *bool `json:"has_pages,omitempty"` + HasProjects *bool `json:"has_projects,omitempty"` + HasDownloads *bool `json:"has_downloads,omitempty"` + HasDiscussions *bool `json:"has_discussions,omitempty"` + IsTemplate *bool `json:"is_template,omitempty"` + LicenseTemplate *string `json:"license_template,omitempty"` + GitignoreTemplate *string `json:"gitignore_template,omitempty"` + + // Options for configuring Advanced Security and Secret Scanning + SecurityAndAnalysis *SecurityAndAnalysis `json:"security_and_analysis,omitempty"` + + // Creating an organization repository. Required for non-owners. + TeamID *int64 `json:"team_id,omitempty"` + + // API URLs + URL *string `json:"url,omitempty"` + ArchiveURL *string `json:"archive_url,omitempty"` + AssigneesURL *string `json:"assignees_url,omitempty"` + BlobsURL *string `json:"blobs_url,omitempty"` + BranchesURL *string `json:"branches_url,omitempty"` + CollaboratorsURL *string `json:"collaborators_url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + CommitsURL *string `json:"commits_url,omitempty"` + CompareURL *string `json:"compare_url,omitempty"` + ContentsURL *string `json:"contents_url,omitempty"` + ContributorsURL *string `json:"contributors_url,omitempty"` + DeploymentsURL *string `json:"deployments_url,omitempty"` + DownloadsURL *string `json:"downloads_url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + ForksURL *string `json:"forks_url,omitempty"` + GitCommitsURL *string `json:"git_commits_url,omitempty"` + GitRefsURL *string `json:"git_refs_url,omitempty"` + GitTagsURL *string `json:"git_tags_url,omitempty"` + HooksURL *string `json:"hooks_url,omitempty"` + IssueCommentURL *string `json:"issue_comment_url,omitempty"` + IssueEventsURL *string `json:"issue_events_url,omitempty"` + IssuesURL *string `json:"issues_url,omitempty"` + KeysURL *string `json:"keys_url,omitempty"` + LabelsURL *string `json:"labels_url,omitempty"` + LanguagesURL *string `json:"languages_url,omitempty"` + MergesURL *string `json:"merges_url,omitempty"` + MilestonesURL *string `json:"milestones_url,omitempty"` + NotificationsURL *string `json:"notifications_url,omitempty"` + PullsURL *string `json:"pulls_url,omitempty"` + ReleasesURL *string `json:"releases_url,omitempty"` + StargazersURL *string `json:"stargazers_url,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + SubscribersURL *string `json:"subscribers_url,omitempty"` + SubscriptionURL *string `json:"subscription_url,omitempty"` + TagsURL *string `json:"tags_url,omitempty"` + TreesURL *string `json:"trees_url,omitempty"` + TeamsURL *string `json:"teams_url,omitempty"` + + // TextMatches is only populated from search results that request text matches + // See: search.go and https://docs.github.com/en/rest/search/#text-match-metadata + TextMatches []*TextMatch `json:"text_matches,omitempty"` + + // Visibility is only used for Create and Edit endpoints. The visibility field + // overrides the field parameter when both are used. + // Can be one of public, private or internal. + Visibility *string `json:"visibility,omitempty"` + + // RoleName is only returned by the API 'check team permissions for a repository'. + // See: teams.go (IsTeamRepoByID) https://docs.github.com/en/rest/teams/teams#check-team-permissions-for-a-repository + RoleName *string `json:"role_name,omitempty"` +} + +func (r Repository) String() string { + return Stringify(r) +} + +// BranchListOptions specifies the optional parameters to the +// RepositoriesService.ListBranches method. +type BranchListOptions struct { + // Setting to true returns only protected branches. + // When set to false, only unprotected branches are returned. + // Omitting this parameter returns all branches. + // Default: nil + Protected *bool `url:"protected,omitempty"` + + ListOptions +} + +// RepositoryListOptions specifies the optional parameters to the +// RepositoriesService.List method. +type RepositoryListOptions struct { + // Visibility of repositories to list. Can be one of all, public, or private. + // Default: all + Visibility string `url:"visibility,omitempty"` + + // List repos of given affiliation[s]. + // Comma-separated list of values. Can include: + // * owner: Repositories that are owned by the authenticated user. + // * collaborator: Repositories that the user has been added to as a + // collaborator. + // * organization_member: Repositories that the user has access to through + // being a member of an organization. This includes every repository on + // every team that the user is on. + // Default: owner,collaborator,organization_member + Affiliation string `url:"affiliation,omitempty"` + + // Type of repositories to list. + // Can be one of all, owner, public, private, member. Default: all + // Will cause a 422 error if used in the same request as visibility or + // affiliation. + Type string `url:"type,omitempty"` + + // How to sort the repository list. Can be one of created, updated, pushed, + // full_name. Default: full_name + Sort string `url:"sort,omitempty"` + + // Direction in which to sort repositories. Can be one of asc or desc. + // Default: when using full_name: asc; otherwise desc + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// SecurityAndAnalysis specifies the optional advanced security features +// that are enabled on a given repository. +type SecurityAndAnalysis struct { + AdvancedSecurity *AdvancedSecurity `json:"advanced_security,omitempty"` + SecretScanning *SecretScanning `json:"secret_scanning,omitempty"` + SecretScanningPushProtection *SecretScanningPushProtection `json:"secret_scanning_push_protection,omitempty"` + DependabotSecurityUpdates *DependabotSecurityUpdates `json:"dependabot_security_updates,omitempty"` +} + +func (s SecurityAndAnalysis) String() string { + return Stringify(s) +} + +// AdvancedSecurity specifies the state of advanced security on a repository. +// +// GitHub API docs: https://docs.github.com/en/github/getting-started-with-github/learning-about-github/about-github-advanced-security +type AdvancedSecurity struct { + Status *string `json:"status,omitempty"` +} + +func (a AdvancedSecurity) String() string { + return Stringify(a) +} + +// SecretScanning specifies the state of secret scanning on a repository. +// +// GitHub API docs: https://docs.github.com/en/code-security/secret-security/about-secret-scanning +type SecretScanning struct { + Status *string `json:"status,omitempty"` +} + +func (s SecretScanning) String() string { + return Stringify(s) +} + +// SecretScanningPushProtection specifies the state of secret scanning push protection on a repository. +// +// GitHub API docs: https://docs.github.com/en/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns +type SecretScanningPushProtection struct { + Status *string `json:"status,omitempty"` +} + +func (s SecretScanningPushProtection) String() string { + return Stringify(s) +} + +// DependabotSecurityUpdates specifies the state of Dependabot security updates on a repository. +// +// GitHub API docs: https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates +type DependabotSecurityUpdates struct { + Status *string `json:"status,omitempty"` +} + +func (d DependabotSecurityUpdates) String() string { + return Stringify(d) +} + +// List the repositories for a user. Passing the empty string will list +// repositories for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repositories-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repositories-for-a-user +func (s *RepositoriesService) List(ctx context.Context, user string, opts *RepositoryListOptions) ([]*Repository, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/repos", user) + } else { + u = "user/repos" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeTopicsPreview, mediaTypeRepositoryVisibilityPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// RepositoryListByOrgOptions specifies the optional parameters to the +// RepositoriesService.ListByOrg method. +type RepositoryListByOrgOptions struct { + // Type of repositories to list. Possible values are: all, public, private, + // forks, sources, member. Default is "all". + Type string `url:"type,omitempty"` + + // How to sort the repository list. Can be one of created, updated, pushed, + // full_name. Default is "created". + Sort string `url:"sort,omitempty"` + + // Direction in which to sort repositories. Can be one of asc or desc. + // Default when using full_name: asc; otherwise desc. + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// ListByOrg lists the repositories for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-organization-repositories +func (s *RepositoriesService) ListByOrg(ctx context.Context, org string, opts *RepositoryListByOrgOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("orgs/%v/repos", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeTopicsPreview, mediaTypeRepositoryVisibilityPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// RepositoryListAllOptions specifies the optional parameters to the +// RepositoriesService.ListAll method. +type RepositoryListAllOptions struct { + // ID of the last repository seen + Since int64 `url:"since,omitempty"` +} + +// ListAll lists all GitHub repositories in the order that they were created. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-public-repositories +func (s *RepositoriesService) ListAll(ctx context.Context, opts *RepositoryListAllOptions) ([]*Repository, *Response, error) { + u, err := addOptions("repositories", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// createRepoRequest is a subset of Repository and is used internally +// by Create to pass only the known fields for the endpoint. +// +// See https://github.com/google/go-github/issues/1014 for more +// information. +type createRepoRequest struct { + // Name is required when creating a repo. + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + Homepage *string `json:"homepage,omitempty"` + + Private *bool `json:"private,omitempty"` + Visibility *string `json:"visibility,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasProjects *bool `json:"has_projects,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasDiscussions *bool `json:"has_discussions,omitempty"` + IsTemplate *bool `json:"is_template,omitempty"` + + // Creating an organization repository. Required for non-owners. + TeamID *int64 `json:"team_id,omitempty"` + + AutoInit *bool `json:"auto_init,omitempty"` + GitignoreTemplate *string `json:"gitignore_template,omitempty"` + LicenseTemplate *string `json:"license_template,omitempty"` + AllowSquashMerge *bool `json:"allow_squash_merge,omitempty"` + AllowMergeCommit *bool `json:"allow_merge_commit,omitempty"` + AllowRebaseMerge *bool `json:"allow_rebase_merge,omitempty"` + AllowUpdateBranch *bool `json:"allow_update_branch,omitempty"` + AllowAutoMerge *bool `json:"allow_auto_merge,omitempty"` + AllowForking *bool `json:"allow_forking,omitempty"` + DeleteBranchOnMerge *bool `json:"delete_branch_on_merge,omitempty"` + UseSquashPRTitleAsDefault *bool `json:"use_squash_pr_title_as_default,omitempty"` + SquashMergeCommitTitle *string `json:"squash_merge_commit_title,omitempty"` + SquashMergeCommitMessage *string `json:"squash_merge_commit_message,omitempty"` + MergeCommitTitle *string `json:"merge_commit_title,omitempty"` + MergeCommitMessage *string `json:"merge_commit_message,omitempty"` +} + +// Create a new repository. If an organization is specified, the new +// repository will be created under that org. If the empty string is +// specified, it will be created for the authenticated user. +// +// Note that only a subset of the repo fields are used and repo must +// not be nil. +// +// Also note that this method will return the response without actually +// waiting for GitHub to finish creating the repository and letting the +// changes propagate throughout its servers. You may set up a loop with +// exponential back-off to verify repository's creation. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#create-a-repository-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#create-an-organization-repository +func (s *RepositoriesService) Create(ctx context.Context, org string, repo *Repository) (*Repository, *Response, error) { + var u string + if org != "" { + u = fmt.Sprintf("orgs/%v/repos", org) + } else { + u = "user/repos" + } + + repoReq := &createRepoRequest{ + Name: repo.Name, + Description: repo.Description, + Homepage: repo.Homepage, + Private: repo.Private, + Visibility: repo.Visibility, + HasIssues: repo.HasIssues, + HasProjects: repo.HasProjects, + HasWiki: repo.HasWiki, + HasDiscussions: repo.HasDiscussions, + IsTemplate: repo.IsTemplate, + TeamID: repo.TeamID, + AutoInit: repo.AutoInit, + GitignoreTemplate: repo.GitignoreTemplate, + LicenseTemplate: repo.LicenseTemplate, + AllowSquashMerge: repo.AllowSquashMerge, + AllowMergeCommit: repo.AllowMergeCommit, + AllowRebaseMerge: repo.AllowRebaseMerge, + AllowUpdateBranch: repo.AllowUpdateBranch, + AllowAutoMerge: repo.AllowAutoMerge, + AllowForking: repo.AllowForking, + DeleteBranchOnMerge: repo.DeleteBranchOnMerge, + UseSquashPRTitleAsDefault: repo.UseSquashPRTitleAsDefault, + SquashMergeCommitTitle: repo.SquashMergeCommitTitle, + SquashMergeCommitMessage: repo.SquashMergeCommitMessage, + MergeCommitTitle: repo.MergeCommitTitle, + MergeCommitMessage: repo.MergeCommitMessage, + } + + req, err := s.client.NewRequest("POST", u, repoReq) + if err != nil { + return nil, nil, err + } + + acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// TemplateRepoRequest represents a request to create a repository from a template. +type TemplateRepoRequest struct { + // Name is required when creating a repo. + Name *string `json:"name,omitempty"` + Owner *string `json:"owner,omitempty"` + Description *string `json:"description,omitempty"` + + IncludeAllBranches *bool `json:"include_all_branches,omitempty"` + Private *bool `json:"private,omitempty"` +} + +// CreateFromTemplate generates a repository from a template. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#create-a-repository-using-a-template +func (s *RepositoriesService) CreateFromTemplate(ctx context.Context, templateOwner, templateRepo string, templateRepoReq *TemplateRepoRequest) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/generate", templateOwner, templateRepo) + + req, err := s.client.NewRequest("POST", u, templateRepoReq) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeRepositoryTemplatePreview) + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// Get fetches a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#get-a-repository +func (s *RepositoriesService) Get(ctx context.Context, owner, repo string) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when the license support fully launches + // https://docs.github.com/en/rest/licenses/#get-a-repositorys-license + acceptHeaders := []string{ + mediaTypeCodesOfConductPreview, + mediaTypeTopicsPreview, + mediaTypeRepositoryTemplatePreview, + mediaTypeRepositoryVisibilityPreview, + } + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// GetCodeOfConduct gets the contents of a repository's code of conduct. +// Note that https://docs.github.com/en/rest/codes-of-conduct#about-the-codes-of-conduct-api +// says to use the GET /repos/{owner}/{repo} endpoint. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#update-a-repository +func (s *RepositoriesService) GetCodeOfConduct(ctx context.Context, owner, repo string) (*CodeOfConduct, *Response, error) { + u := fmt.Sprintf("repos/%v/%v", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeCodesOfConductPreview) + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r.GetCodeOfConduct(), resp, nil +} + +// GetByID fetches a repository. +// +// Note: GetByID uses the undocumented GitHub API endpoint /repositories/:id. +func (s *RepositoriesService) GetByID(ctx context.Context, id int64) (*Repository, *Response, error) { + u := fmt.Sprintf("repositories/%d", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// Edit updates a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#update-a-repository +func (s *RepositoriesService) Edit(ctx context.Context, owner, repo string, repository *Repository) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v", owner, repo) + req, err := s.client.NewRequest("PATCH", u, repository) + if err != nil { + return nil, nil, err + } + + acceptHeaders := []string{mediaTypeRepositoryTemplatePreview, mediaTypeRepositoryVisibilityPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// Delete a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#delete-a-repository +func (s *RepositoriesService) Delete(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Contributor represents a repository contributor +type Contributor struct { + Login *string `json:"login,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + GravatarID *string `json:"gravatar_id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + FollowersURL *string `json:"followers_url,omitempty"` + FollowingURL *string `json:"following_url,omitempty"` + GistsURL *string `json:"gists_url,omitempty"` + StarredURL *string `json:"starred_url,omitempty"` + SubscriptionsURL *string `json:"subscriptions_url,omitempty"` + OrganizationsURL *string `json:"organizations_url,omitempty"` + ReposURL *string `json:"repos_url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + ReceivedEventsURL *string `json:"received_events_url,omitempty"` + Type *string `json:"type,omitempty"` + SiteAdmin *bool `json:"site_admin,omitempty"` + Contributions *int `json:"contributions,omitempty"` + Name *string `json:"name,omitempty"` + Email *string `json:"email,omitempty"` +} + +// ListContributorsOptions specifies the optional parameters to the +// RepositoriesService.ListContributors method. +type ListContributorsOptions struct { + // Include anonymous contributors in results or not + Anon string `url:"anon,omitempty"` + + ListOptions +} + +// GetVulnerabilityAlerts checks if vulnerability alerts are enabled for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository +func (s *RepositoriesService) GetVulnerabilityAlerts(ctx context.Context, owner, repository string) (bool, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) + + resp, err := s.client.Do(ctx, req, nil) + vulnerabilityAlertsEnabled, err := parseBoolResponse(err) + return vulnerabilityAlertsEnabled, resp, err +} + +// EnableVulnerabilityAlerts enables vulnerability alerts and the dependency graph for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#enable-vulnerability-alerts +func (s *RepositoriesService) EnableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) + + return s.client.Do(ctx, req, nil) +} + +// DisableVulnerabilityAlerts disables vulnerability alerts and the dependency graph for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#disable-vulnerability-alerts +func (s *RepositoriesService) DisableVulnerabilityAlerts(ctx context.Context, owner, repository string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/vulnerability-alerts", owner, repository) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredVulnerabilityAlertsPreview) + + return s.client.Do(ctx, req, nil) +} + +// GetAutomatedSecurityFixes checks if the automated security fixes for a repository are enabled. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos?apiVersion=2022-11-28#check-if-automated-security-fixes-are-enabled-for-a-repository +func (s *RepositoriesService) GetAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*AutomatedSecurityFixes, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + p := new(AutomatedSecurityFixes) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + return p, resp, nil +} + +// EnableAutomatedSecurityFixes enables the automated security fixes for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#enable-automated-security-fixes +func (s *RepositoriesService) EnableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DisableAutomatedSecurityFixes disables vulnerability alerts and the dependency graph for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#disable-automated-security-fixes +func (s *RepositoriesService) DisableAutomatedSecurityFixes(ctx context.Context, owner, repository string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/automated-security-fixes", owner, repository) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListContributors lists contributors for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repository-contributors +func (s *RepositoriesService) ListContributors(ctx context.Context, owner string, repository string, opts *ListContributorsOptions) ([]*Contributor, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/contributors", owner, repository) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var contributor []*Contributor + resp, err := s.client.Do(ctx, req, &contributor) + if err != nil { + return nil, resp, err + } + + return contributor, resp, nil +} + +// ListLanguages lists languages for the specified repository. The returned map +// specifies the languages and the number of bytes of code written in that +// language. For example: +// +// { +// "C": 78769, +// "Python": 7769 +// } +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repository-languages +func (s *RepositoriesService) ListLanguages(ctx context.Context, owner string, repo string) (map[string]int, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/languages", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + languages := make(map[string]int) + resp, err := s.client.Do(ctx, req, &languages) + if err != nil { + return nil, resp, err + } + + return languages, resp, nil +} + +// ListTeams lists the teams for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repository-teams +func (s *RepositoriesService) ListTeams(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/teams", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// RepositoryTag represents a repository tag. +type RepositoryTag struct { + Name *string `json:"name,omitempty"` + Commit *Commit `json:"commit,omitempty"` + ZipballURL *string `json:"zipball_url,omitempty"` + TarballURL *string `json:"tarball_url,omitempty"` +} + +// ListTags lists tags for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-repository-tags +func (s *RepositoriesService) ListTags(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*RepositoryTag, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/tags", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var tags []*RepositoryTag + resp, err := s.client.Do(ctx, req, &tags) + if err != nil { + return nil, resp, err + } + + return tags, resp, nil +} + +// Branch represents a repository branch +type Branch struct { + Name *string `json:"name,omitempty"` + Commit *RepositoryCommit `json:"commit,omitempty"` + Protected *bool `json:"protected,omitempty"` +} + +// Protection represents a repository branch's protection. +type Protection struct { + RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` + RequiredPullRequestReviews *PullRequestReviewsEnforcement `json:"required_pull_request_reviews"` + EnforceAdmins *AdminEnforcement `json:"enforce_admins"` + Restrictions *BranchRestrictions `json:"restrictions"` + RequireLinearHistory *RequireLinearHistory `json:"required_linear_history"` + AllowForcePushes *AllowForcePushes `json:"allow_force_pushes"` + AllowDeletions *AllowDeletions `json:"allow_deletions"` + RequiredConversationResolution *RequiredConversationResolution `json:"required_conversation_resolution"` + BlockCreations *BlockCreations `json:"block_creations,omitempty"` + LockBranch *LockBranch `json:"lock_branch,omitempty"` + AllowForkSyncing *AllowForkSyncing `json:"allow_fork_syncing,omitempty"` + RequiredSignatures *SignaturesProtectedBranch `json:"required_signatures,omitempty"` + URL *string `json:"url,omitempty"` +} + +// BlockCreations represents whether users can push changes that create branches. If this is true, this +// setting blocks pushes that create new branches, unless the push is initiated by a user, team, or app +// which has the ability to push. +type BlockCreations struct { + Enabled *bool `json:"enabled,omitempty"` +} + +// LockBranch represents if the branch is marked as read-only. If this is true, users will not be able to push to the branch. +type LockBranch struct { + Enabled *bool `json:"enabled,omitempty"` +} + +// AllowForkSyncing represents whether users can pull changes from upstream when the branch is locked. +type AllowForkSyncing struct { + Enabled *bool `json:"enabled,omitempty"` +} + +// BranchProtectionRule represents the rule applied to a repositories branch. +type BranchProtectionRule struct { + ID *int64 `json:"id,omitempty"` + RepositoryID *int64 `json:"repository_id,omitempty"` + Name *string `json:"name,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + PullRequestReviewsEnforcementLevel *string `json:"pull_request_reviews_enforcement_level,omitempty"` + RequiredApprovingReviewCount *int `json:"required_approving_review_count,omitempty"` + DismissStaleReviewsOnPush *bool `json:"dismiss_stale_reviews_on_push,omitempty"` + AuthorizedDismissalActorsOnly *bool `json:"authorized_dismissal_actors_only,omitempty"` + IgnoreApprovalsFromContributors *bool `json:"ignore_approvals_from_contributors,omitempty"` + RequireCodeOwnerReview *bool `json:"require_code_owner_review,omitempty"` + RequiredStatusChecks []string `json:"required_status_checks,omitempty"` + RequiredStatusChecksEnforcementLevel *string `json:"required_status_checks_enforcement_level,omitempty"` + StrictRequiredStatusChecksPolicy *bool `json:"strict_required_status_checks_policy,omitempty"` + SignatureRequirementEnforcementLevel *string `json:"signature_requirement_enforcement_level,omitempty"` + LinearHistoryRequirementEnforcementLevel *string `json:"linear_history_requirement_enforcement_level,omitempty"` + AdminEnforced *bool `json:"admin_enforced,omitempty"` + AllowForcePushesEnforcementLevel *string `json:"allow_force_pushes_enforcement_level,omitempty"` + AllowDeletionsEnforcementLevel *string `json:"allow_deletions_enforcement_level,omitempty"` + MergeQueueEnforcementLevel *string `json:"merge_queue_enforcement_level,omitempty"` + RequiredDeploymentsEnforcementLevel *string `json:"required_deployments_enforcement_level,omitempty"` + RequiredConversationResolutionLevel *string `json:"required_conversation_resolution_level,omitempty"` + AuthorizedActorsOnly *bool `json:"authorized_actors_only,omitempty"` + AuthorizedActorNames []string `json:"authorized_actor_names,omitempty"` +} + +// ProtectionChanges represents the changes to the rule if the BranchProtection was edited. +type ProtectionChanges struct { + AdminEnforced *AdminEnforcedChanges `json:"admin_enforced,omitempty"` + AllowDeletionsEnforcementLevel *AllowDeletionsEnforcementLevelChanges `json:"allow_deletions_enforcement_level,omitempty"` + AuthorizedActorNames *AuthorizedActorNames `json:"authorized_actor_names,omitempty"` + AuthorizedActorsOnly *AuthorizedActorsOnly `json:"authorized_actors_only,omitempty"` + AuthorizedDismissalActorsOnly *AuthorizedDismissalActorsOnlyChanges `json:"authorized_dismissal_actors_only,omitempty"` + CreateProtected *CreateProtectedChanges `json:"create_protected,omitempty"` + DismissStaleReviewsOnPush *DismissStaleReviewsOnPushChanges `json:"dismiss_stale_reviews_on_push,omitempty"` + LinearHistoryRequirementEnforcementLevel *LinearHistoryRequirementEnforcementLevelChanges `json:"linear_history_requirement_enforcement_level,omitempty"` + PullRequestReviewsEnforcementLevel *PullRequestReviewsEnforcementLevelChanges `json:"pull_request_reviews_enforcement_level,omitempty"` + RequireCodeOwnerReview *RequireCodeOwnerReviewChanges `json:"require_code_owner_review,omitempty"` + RequiredConversationResolutionLevel *RequiredConversationResolutionLevelChanges `json:"required_conversation_resolution_level,omitempty"` + RequiredDeploymentsEnforcementLevel *RequiredDeploymentsEnforcementLevelChanges `json:"required_deployments_enforcement_level,omitempty"` + RequiredStatusChecks *RequiredStatusChecksChanges `json:"required_status_checks,omitempty"` + RequiredStatusChecksEnforcementLevel *RequiredStatusChecksEnforcementLevelChanges `json:"required_status_checks_enforcement_level,omitempty"` + SignatureRequirementEnforcementLevel *SignatureRequirementEnforcementLevelChanges `json:"signature_requirement_enforcement_level,omitempty"` +} + +// AdminEnforcedChanges represents the changes made to the AdminEnforced policy. +type AdminEnforcedChanges struct { + From *bool `json:"from,omitempty"` +} + +// AllowDeletionsEnforcementLevelChanges represents the changes made to the AllowDeletionsEnforcementLevel policy. +type AllowDeletionsEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// AuthorizedActorNames represents who are authorized to edit the branch protection rules. +type AuthorizedActorNames struct { + From []string `json:"from,omitempty"` +} + +// AuthorizedActorsOnly represents if the branch rule can be edited by authorized actors only. +type AuthorizedActorsOnly struct { + From *bool `json:"from,omitempty"` +} + +// AuthorizedDismissalActorsOnlyChanges represents the changes made to the AuthorizedDismissalActorsOnly policy. +type AuthorizedDismissalActorsOnlyChanges struct { + From *bool `json:"from,omitempty"` +} + +// CreateProtectedChanges represents the changes made to the CreateProtected policy. +type CreateProtectedChanges struct { + From *bool `json:"from,omitempty"` +} + +// DismissStaleReviewsOnPushChanges represents the changes made to the DismissStaleReviewsOnPushChanges policy. +type DismissStaleReviewsOnPushChanges struct { + From *bool `json:"from,omitempty"` +} + +// LinearHistoryRequirementEnforcementLevelChanges represents the changes made to the LinearHistoryRequirementEnforcementLevel policy. +type LinearHistoryRequirementEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// PullRequestReviewsEnforcementLevelChanges represents the changes made to the PullRequestReviewsEnforcementLevel policy. +type PullRequestReviewsEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// RequireCodeOwnerReviewChanges represents the changes made to the RequireCodeOwnerReview policy. +type RequireCodeOwnerReviewChanges struct { + From *bool `json:"from,omitempty"` +} + +// RequiredConversationResolutionLevelChanges represents the changes made to the RequiredConversationResolutionLevel policy. +type RequiredConversationResolutionLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// RequiredDeploymentsEnforcementLevelChanges represents the changes made to the RequiredDeploymentsEnforcementLevel policy. +type RequiredDeploymentsEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// RequiredStatusChecksChanges represents the changes made to the RequiredStatusChecks policy. +type RequiredStatusChecksChanges struct { + From []string `json:"from,omitempty"` +} + +// RequiredStatusChecksEnforcementLevelChanges represents the changes made to the RequiredStatusChecksEnforcementLevel policy. +type RequiredStatusChecksEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// SignatureRequirementEnforcementLevelChanges represents the changes made to the SignatureRequirementEnforcementLevel policy. +type SignatureRequirementEnforcementLevelChanges struct { + From *string `json:"from,omitempty"` +} + +// ProtectionRequest represents a request to create/edit a branch's protection. +type ProtectionRequest struct { + RequiredStatusChecks *RequiredStatusChecks `json:"required_status_checks"` + RequiredPullRequestReviews *PullRequestReviewsEnforcementRequest `json:"required_pull_request_reviews"` + EnforceAdmins bool `json:"enforce_admins"` + Restrictions *BranchRestrictionsRequest `json:"restrictions"` + // Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. + RequireLinearHistory *bool `json:"required_linear_history,omitempty"` + // Permits force pushes to the protected branch by anyone with write access to the repository. + AllowForcePushes *bool `json:"allow_force_pushes,omitempty"` + // Allows deletion of the protected branch by anyone with write access to the repository. + AllowDeletions *bool `json:"allow_deletions,omitempty"` + // RequiredConversationResolution, if set to true, requires all comments + // on the pull request to be resolved before it can be merged to a protected branch. + RequiredConversationResolution *bool `json:"required_conversation_resolution,omitempty"` + // BlockCreations, if set to true, will cause the restrictions setting to also block pushes + // which create new branches, unless initiated by a user, team, app with the ability to push. + BlockCreations *bool `json:"block_creations,omitempty"` + // LockBranch, if set to true, will prevent users from pushing to the branch. + LockBranch *bool `json:"lock_branch,omitempty"` + // AllowForkSyncing, if set to true, will allow users to pull changes from upstream + // when the branch is locked. + AllowForkSyncing *bool `json:"allow_fork_syncing,omitempty"` +} + +// RequiredStatusChecks represents the protection status of a individual branch. +type RequiredStatusChecks struct { + // Require branches to be up to date before merging. (Required.) + Strict bool `json:"strict"` + // The list of status checks to require in order to merge into this + // branch. (Deprecated. Note: only one of Contexts/Checks can be populated, + // but at least one must be populated). + Contexts []string `json:"contexts,omitempty"` + // The list of status checks to require in order to merge into this + // branch. + Checks []*RequiredStatusCheck `json:"checks"` + ContextsURL *string `json:"contexts_url,omitempty"` + URL *string `json:"url,omitempty"` +} + +// RequiredStatusChecksRequest represents a request to edit a protected branch's status checks. +type RequiredStatusChecksRequest struct { + Strict *bool `json:"strict,omitempty"` + // Note: if both Contexts and Checks are populated, + // the GitHub API will only use Checks. + Contexts []string `json:"contexts,omitempty"` + Checks []*RequiredStatusCheck `json:"checks,omitempty"` +} + +// RequiredStatusCheck represents a status check of a protected branch. +type RequiredStatusCheck struct { + // The name of the required check. + Context string `json:"context"` + // The ID of the GitHub App that must provide this check. + // Omit this field to automatically select the GitHub App + // that has recently provided this check, + // or any app if it was not set by a GitHub App. + // Pass -1 to explicitly allow any app to set the status. + AppID *int64 `json:"app_id,omitempty"` +} + +// PullRequestReviewsEnforcement represents the pull request reviews enforcement of a protected branch. +type PullRequestReviewsEnforcement struct { + // Allow specific users, teams, or apps to bypass pull request requirements. + BypassPullRequestAllowances *BypassPullRequestAllowances `json:"bypass_pull_request_allowances,omitempty"` + // Specifies which users, teams and apps can dismiss pull request reviews. + DismissalRestrictions *DismissalRestrictions `json:"dismissal_restrictions,omitempty"` + // Specifies if approved reviews are dismissed automatically, when a new commit is pushed. + DismissStaleReviews bool `json:"dismiss_stale_reviews"` + // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. + RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. + RequireLastPushApproval bool `json:"require_last_push_approval"` +} + +// PullRequestReviewsEnforcementRequest represents request to set the pull request review +// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcement above +// because the request structure is different from the response structure. +type PullRequestReviewsEnforcementRequest struct { + // Allow specific users, teams, or apps to bypass pull request requirements. + BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` + // Specifies which users, teams and apps should be allowed to dismiss pull request reviews. + // User, team and app dismissal restrictions are only available for + // organization-owned repositories. Must be nil for personal repositories. + DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` + // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. (Required) + DismissStaleReviews bool `json:"dismiss_stale_reviews"` + // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. + RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. + RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` +} + +// PullRequestReviewsEnforcementUpdate represents request to patch the pull request review +// enforcement of a protected branch. It is separate from PullRequestReviewsEnforcementRequest above +// because the patch request does not require all fields to be initialized. +type PullRequestReviewsEnforcementUpdate struct { + // Allow specific users, teams, or apps to bypass pull request requirements. + BypassPullRequestAllowancesRequest *BypassPullRequestAllowancesRequest `json:"bypass_pull_request_allowances,omitempty"` + // Specifies which users, teams and apps can dismiss pull request reviews. Can be omitted. + DismissalRestrictionsRequest *DismissalRestrictionsRequest `json:"dismissal_restrictions,omitempty"` + // Specifies if approved reviews can be dismissed automatically, when a new commit is pushed. Can be omitted. + DismissStaleReviews *bool `json:"dismiss_stale_reviews,omitempty"` + // RequireCodeOwnerReviews specifies if merging pull requests is blocked until code owners have reviewed. + RequireCodeOwnerReviews *bool `json:"require_code_owner_reviews,omitempty"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1 - 6 or 0 to not require reviewers. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + // RequireLastPushApproval specifies whether the last pusher to a pull request branch can approve it. + RequireLastPushApproval *bool `json:"require_last_push_approval,omitempty"` +} + +// RequireLinearHistory represents the configuration to enforce branches with no merge commit. +type RequireLinearHistory struct { + Enabled bool `json:"enabled"` +} + +// AllowDeletions represents the configuration to accept deletion of protected branches. +type AllowDeletions struct { + Enabled bool `json:"enabled"` +} + +// AllowForcePushes represents the configuration to accept forced pushes on protected branches. +type AllowForcePushes struct { + Enabled bool `json:"enabled"` +} + +// RequiredConversationResolution, if enabled, requires all comments on the pull request to be resolved before it can be merged to a protected branch. +type RequiredConversationResolution struct { + Enabled bool `json:"enabled"` +} + +// AdminEnforcement represents the configuration to enforce required status checks for repository administrators. +type AdminEnforcement struct { + URL *string `json:"url,omitempty"` + Enabled bool `json:"enabled"` +} + +// BranchRestrictions represents the restriction that only certain users or +// teams may push to a branch. +type BranchRestrictions struct { + // The list of user logins with push access. + Users []*User `json:"users"` + // The list of team slugs with push access. + Teams []*Team `json:"teams"` + // The list of app slugs with push access. + Apps []*App `json:"apps"` +} + +// BranchRestrictionsRequest represents the request to create/edit the +// restriction that only certain users or teams may push to a branch. It is +// separate from BranchRestrictions above because the request structure is +// different from the response structure. +type BranchRestrictionsRequest struct { + // The list of user logins with push access. (Required; use []string{} instead of nil for empty list.) + Users []string `json:"users"` + // The list of team slugs with push access. (Required; use []string{} instead of nil for empty list.) + Teams []string `json:"teams"` + // The list of app slugs with push access. + Apps []string `json:"apps"` +} + +// BypassPullRequestAllowances represents the people, teams, or apps who are allowed to bypass required pull requests. +type BypassPullRequestAllowances struct { + // The list of users allowed to bypass pull request requirements. + Users []*User `json:"users"` + // The list of teams allowed to bypass pull request requirements. + Teams []*Team `json:"teams"` + // The list of apps allowed to bypass pull request requirements. + Apps []*App `json:"apps"` +} + +// BypassPullRequestAllowancesRequest represents the people, teams, or apps who are +// allowed to bypass required pull requests. +// It is separate from BypassPullRequestAllowances above because the request structure is +// different from the response structure. +type BypassPullRequestAllowancesRequest struct { + // The list of user logins allowed to bypass pull request requirements. + Users []string `json:"users"` + // The list of team slugs allowed to bypass pull request requirements. + Teams []string `json:"teams"` + // The list of app slugs allowed to bypass pull request requirements. + Apps []string `json:"apps"` +} + +// DismissalRestrictions specifies which users and teams can dismiss pull request reviews. +type DismissalRestrictions struct { + // The list of users who can dimiss pull request reviews. + Users []*User `json:"users"` + // The list of teams which can dismiss pull request reviews. + Teams []*Team `json:"teams"` + // The list of apps which can dismiss pull request reviews. + Apps []*App `json:"apps"` +} + +// DismissalRestrictionsRequest represents the request to create/edit the +// restriction to allows only specific users, teams or apps to dimiss pull request reviews. It is +// separate from DismissalRestrictions above because the request structure is +// different from the response structure. +// Note: Both Users and Teams must be nil, or both must be non-nil. +type DismissalRestrictionsRequest struct { + // The list of user logins who can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) + Users *[]string `json:"users,omitempty"` + // The list of team slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) + Teams *[]string `json:"teams,omitempty"` + // The list of app slugs which can dismiss pull request reviews. (Required; use nil to disable dismissal_restrictions or &[]string{} otherwise.) + Apps *[]string `json:"apps,omitempty"` +} + +// SignaturesProtectedBranch represents the protection status of an individual branch. +type SignaturesProtectedBranch struct { + URL *string `json:"url,omitempty"` + // Commits pushed to matching branches must have verified signatures. + Enabled *bool `json:"enabled,omitempty"` +} + +// AutomatedSecurityFixes represents their status. +type AutomatedSecurityFixes struct { + Enabled *bool `json:"enabled"` + Paused *bool `json:"paused"` +} + +// ListBranches lists branches for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branches#list-branches +func (s *RepositoriesService) ListBranches(ctx context.Context, owner string, repo string, opts *BranchListOptions) ([]*Branch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var branches []*Branch + resp, err := s.client.Do(ctx, req, &branches) + if err != nil { + return nil, resp, err + } + + return branches, resp, nil +} + +// GetBranch gets the specified branch for a repository. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branches#get-a-branch +func (s *RepositoriesService) GetBranch(ctx context.Context, owner, repo, branch string, maxRedirects int) (*Branch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v", owner, repo, url.PathEscape(branch)) + + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + b := new(Branch) + err = json.NewDecoder(resp.Body).Decode(b) + return b, newResponse(resp), err +} + +// renameBranchRequest represents a request to rename a branch. +type renameBranchRequest struct { + NewName string `json:"new_name"` +} + +// RenameBranch renames a branch in a repository. +// +// To rename a non-default branch: Users must have push access. GitHub Apps must have the `contents:write` repository permission. +// To rename the default branch: Users must have admin or owner permissions. GitHub Apps must have the `administration:write` repository permission. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branches#rename-a-branch +func (s *RepositoriesService) RenameBranch(ctx context.Context, owner, repo, branch, newName string) (*Branch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/rename", owner, repo, url.PathEscape(branch)) + r := &renameBranchRequest{NewName: newName} + req, err := s.client.NewRequest("POST", u, r) + if err != nil { + return nil, nil, err + } + + b := new(Branch) + resp, err := s.client.Do(ctx, req, b) + if err != nil { + return nil, resp, err + } + + return b, resp, nil +} + +// GetBranchProtection gets the protection of a given branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-branch-protection +func (s *RepositoriesService) GetBranchProtection(ctx context.Context, owner, repo, branch string) (*Protection, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + p := new(Protection) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + if isBranchNotProtected(err) { + err = ErrBranchNotProtected + } + return nil, resp, err + } + + return p, resp, nil +} + +// GetRequiredStatusChecks gets the required status checks for a given protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-status-checks-protection +func (s *RepositoriesService) GetRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*RequiredStatusChecks, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + p := new(RequiredStatusChecks) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + if isBranchNotProtected(err) { + err = ErrBranchNotProtected + } + return nil, resp, err + } + + return p, resp, nil +} + +// ListRequiredStatusChecksContexts lists the required status checks contexts for a given protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-all-status-check-contexts +func (s *RepositoriesService) ListRequiredStatusChecksContexts(ctx context.Context, owner, repo, branch string) (contexts []string, resp *Response, err error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks/contexts", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + resp, err = s.client.Do(ctx, req, &contexts) + if err != nil { + if isBranchNotProtected(err) { + err = ErrBranchNotProtected + } + return nil, resp, err + } + + return contexts, resp, nil +} + +// UpdateBranchProtection updates the protection of a given branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#update-branch-protection +func (s *RepositoriesService) UpdateBranchProtection(ctx context.Context, owner, repo, branch string, preq *ProtectionRequest) (*Protection, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PUT", u, preq) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + p := new(Protection) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// RemoveBranchProtection removes the protection of a given branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#delete-branch-protection +func (s *RepositoriesService) RemoveBranchProtection(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetSignaturesProtectedBranch gets required signatures of protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-commit-signature-protection +func (s *RepositoriesService) GetSignaturesProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) + + p := new(SignaturesProtectedBranch) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + +// RequireSignaturesOnProtectedBranch makes signed commits required on a protected branch. +// It requires admin access and branch protection to be enabled. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#create-commit-signature-protection +func (s *RepositoriesService) RequireSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*SignaturesProtectedBranch, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) + + r := new(SignaturesProtectedBranch) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// OptionalSignaturesOnProtectedBranch removes required signed commits on a given branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#delete-commit-signature-protection +func (s *RepositoriesService) OptionalSignaturesOnProtectedBranch(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_signatures", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeSignaturePreview) + + return s.client.Do(ctx, req, nil) +} + +// UpdateRequiredStatusChecks updates the required status checks for a given protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#update-status-check-protection +func (s *RepositoriesService) UpdateRequiredStatusChecks(ctx context.Context, owner, repo, branch string, sreq *RequiredStatusChecksRequest) (*RequiredStatusChecks, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PATCH", u, sreq) + if err != nil { + return nil, nil, err + } + + sc := new(RequiredStatusChecks) + resp, err := s.client.Do(ctx, req, sc) + if err != nil { + return nil, resp, err + } + + return sc, resp, nil +} + +// RemoveRequiredStatusChecks removes the required status checks for a given protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#remove-status-check-protection +func (s *RepositoriesService) RemoveRequiredStatusChecks(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// License gets the contents of a repository's license if one is detected. +// +// GitHub API docs: https://docs.github.com/en/rest/licenses#get-the-license-for-a-repository +func (s *RepositoriesService) License(ctx context.Context, owner, repo string) (*RepositoryLicense, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/license", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + r := &RepositoryLicense{} + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// GetPullRequestReviewEnforcement gets pull request review enforcement of a protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-pull-request-review-protection +func (s *RepositoriesService) GetPullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + r := new(PullRequestReviewsEnforcement) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// UpdatePullRequestReviewEnforcement patches pull request review enforcement of a protected branch. +// It requires admin access and branch protection to be enabled. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#update-pull-request-review-protection +func (s *RepositoriesService) UpdatePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string, patch *PullRequestReviewsEnforcementUpdate) (*PullRequestReviewsEnforcement, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PATCH", u, patch) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + r := new(PullRequestReviewsEnforcement) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// DisableDismissalRestrictions disables dismissal restrictions of a protected branch. +// It requires admin access and branch protection to be enabled. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#update-pull-request-review-protection +func (s *RepositoriesService) DisableDismissalRestrictions(ctx context.Context, owner, repo, branch string) (*PullRequestReviewsEnforcement, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) + + data := new(struct { + DismissalRestrictionsRequest `json:"dismissal_restrictions"` + }) + + req, err := s.client.NewRequest("PATCH", u, data) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) + + r := new(PullRequestReviewsEnforcement) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// RemovePullRequestReviewEnforcement removes pull request enforcement of a protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#delete-pull-request-review-protection +func (s *RepositoriesService) RemovePullRequestReviewEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_pull_request_reviews", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetAdminEnforcement gets admin enforcement information of a protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-admin-branch-protection +func (s *RepositoriesService) GetAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + r := new(AdminEnforcement) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// AddAdminEnforcement adds admin enforcement to a protected branch. +// It requires admin access and branch protection to be enabled. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#set-admin-branch-protection +func (s *RepositoriesService) AddAdminEnforcement(ctx context.Context, owner, repo, branch string) (*AdminEnforcement, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + r := new(AdminEnforcement) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// RemoveAdminEnforcement removes admin enforcement from a protected branch. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#delete-admin-branch-protection +func (s *RepositoriesService) RemoveAdminEnforcement(ctx context.Context, owner, repo, branch string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/enforce_admins", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// repositoryTopics represents a collection of repository topics. +type repositoryTopics struct { + Names []string `json:"names"` +} + +// ListAllTopics lists topics for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#get-all-repository-topics +func (s *RepositoriesService) ListAllTopics(ctx context.Context, owner, repo string) ([]string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + + topics := new(repositoryTopics) + resp, err := s.client.Do(ctx, req, topics) + if err != nil { + return nil, resp, err + } + + return topics.Names, resp, nil +} + +// ReplaceAllTopics replaces all repository topics. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#replace-all-repository-topics +func (s *RepositoriesService) ReplaceAllTopics(ctx context.Context, owner, repo string, topics []string) ([]string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/topics", owner, repo) + t := &repositoryTopics{ + Names: topics, + } + if t.Names == nil { + t.Names = []string{} + } + req, err := s.client.NewRequest("PUT", u, t) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + + t = new(repositoryTopics) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t.Names, resp, nil +} + +// ListApps lists the GitHub apps that have push access to a given protected branch. +// It requires the GitHub apps to have `write` access to the `content` permission. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch +// +// Deprecated: Please use ListAppRestrictions instead. +func (s *RepositoriesService) ListApps(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var apps []*App + resp, err := s.client.Do(ctx, req, &apps) + if err != nil { + return nil, resp, err + } + + return apps, resp, nil +} + +// ListAppRestrictions lists the GitHub apps that have push access to a given protected branch. +// It requires the GitHub apps to have `write` access to the `content` permission. +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-apps-with-access-to-the-protected-branch +// +// Note: This is a wrapper around ListApps so a naming convention with ListUserRestrictions and ListTeamRestrictions is preserved. +func (s *RepositoriesService) ListAppRestrictions(ctx context.Context, owner, repo, branch string) ([]*App, *Response, error) { + return s.ListApps(ctx, owner, repo, branch) +} + +// ReplaceAppRestrictions replaces the apps that have push access to a given protected branch. +// It removes all apps that previously had push access and grants push access to the new list of apps. +// It requires the GitHub apps to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#set-app-access-restrictions +func (s *RepositoriesService) ReplaceAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PUT", u, apps) + if err != nil { + return nil, nil, err + } + + var newApps []*App + resp, err := s.client.Do(ctx, req, &newApps) + if err != nil { + return nil, resp, err + } + + return newApps, resp, nil +} + +// AddAppRestrictions grants the specified apps push access to a given protected branch. +// It requires the GitHub apps to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#add-app-access-restrictions +func (s *RepositoriesService) AddAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("POST", u, apps) + if err != nil { + return nil, nil, err + } + + var newApps []*App + resp, err := s.client.Do(ctx, req, &newApps) + if err != nil { + return nil, resp, err + } + + return newApps, resp, nil +} + +// RemoveAppRestrictions removes the restrictions of an app from pushing to this branch. +// It requires the GitHub apps to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#remove-app-access-restrictions +func (s *RepositoriesService) RemoveAppRestrictions(ctx context.Context, owner, repo, branch string, apps []string) ([]*App, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/apps", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, apps) + if err != nil { + return nil, nil, err + } + + var newApps []*App + resp, err := s.client.Do(ctx, req, &newApps) + if err != nil { + return nil, resp, err + } + + return newApps, resp, nil +} + +// ListTeamRestrictions lists the GitHub teams that have push access to a given protected branch. +// It requires the GitHub teams to have `write` access to the `content` permission. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-teams-with-access-to-the-protected-branch +func (s *RepositoriesService) ListTeamRestrictions(ctx context.Context, owner, repo, branch string) ([]*Team, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ReplaceTeamRestrictions replaces the team that have push access to a given protected branch. +// This removes all teams that previously had push access and grants push access to the new list of teams. +// It requires the GitHub teams to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#set-team-access-restrictions +func (s *RepositoriesService) ReplaceTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PUT", u, teams) + if err != nil { + return nil, nil, err + } + + var newTeams []*Team + resp, err := s.client.Do(ctx, req, &newTeams) + if err != nil { + return nil, resp, err + } + + return newTeams, resp, nil +} + +// AddTeamRestrictions grants the specified teams push access to a given protected branch. +// It requires the GitHub teams to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#add-team-access-restrictions +func (s *RepositoriesService) AddTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("POST", u, teams) + if err != nil { + return nil, nil, err + } + + var newTeams []*Team + resp, err := s.client.Do(ctx, req, &newTeams) + if err != nil { + return nil, resp, err + } + + return newTeams, resp, nil +} + +// RemoveTeamRestrictions removes the restrictions of a team from pushing to this branch. +// It requires the GitHub teams to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#remove-team-access-restrictions +func (s *RepositoriesService) RemoveTeamRestrictions(ctx context.Context, owner, repo, branch string, teams []string) ([]*Team, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/teams", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, teams) + if err != nil { + return nil, nil, err + } + + var newTeams []*Team + resp, err := s.client.Do(ctx, req, &newTeams) + if err != nil { + return nil, resp, err + } + + return newTeams, resp, nil +} + +// ListUserRestrictions lists the GitHub users that have push access to a given protected branch. +// It requires the GitHub users to have `write` access to the `content` permission. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#get-users-with-access-to-the-protected-branch +func (s *RepositoriesService) ListUserRestrictions(ctx context.Context, owner, repo, branch string) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// ReplaceUserRestrictions replaces the user that have push access to a given protected branch. +// It removes all users that previously had push access and grants push access to the new list of users. +// It requires the GitHub users to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#set-team-access-restrictions +func (s *RepositoriesService) ReplaceUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("PUT", u, users) + if err != nil { + return nil, nil, err + } + + var newUsers []*User + resp, err := s.client.Do(ctx, req, &newUsers) + if err != nil { + return nil, resp, err + } + + return newUsers, resp, nil +} + +// AddUserRestrictions grants the specified users push access to a given protected branch. +// It requires the GitHub users to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#add-team-access-restrictions +func (s *RepositoriesService) AddUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("POST", u, users) + if err != nil { + return nil, nil, err + } + + var newUsers []*User + resp, err := s.client.Do(ctx, req, &newUsers) + if err != nil { + return nil, resp, err + } + + return newUsers, resp, nil +} + +// RemoveUserRestrictions removes the restrictions of a user from pushing to this branch. +// It requires the GitHub users to have `write` access to the `content` permission. +// +// Note: The list of users, apps, and teams in total is limited to 100 items. +// +// Note: the branch name is URL path escaped for you. See: https://pkg.go.dev/net/url#PathEscape . +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branch-protection#remove-team-access-restrictions +func (s *RepositoriesService) RemoveUserRestrictions(ctx context.Context, owner, repo, branch string, users []string) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/restrictions/users", owner, repo, url.PathEscape(branch)) + req, err := s.client.NewRequest("DELETE", u, users) + if err != nil { + return nil, nil, err + } + + var newUsers []*User + resp, err := s.client.Do(ctx, req, &newUsers) + if err != nil { + return nil, resp, err + } + + return newUsers, resp, nil +} + +// TransferRequest represents a request to transfer a repository. +type TransferRequest struct { + NewOwner string `json:"new_owner"` + NewName *string `json:"new_name,omitempty"` + TeamID []int64 `json:"team_ids,omitempty"` +} + +// Transfer transfers a repository from one account or organization to another. +// +// This method might return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it has now scheduled the transfer of the repository in a background task. +// A follow up request, after a delay of a second or so, should result +// in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#transfer-a-repository +func (s *RepositoriesService) Transfer(ctx context.Context, owner, repo string, transfer TransferRequest) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/transfer", owner, repo) + + req, err := s.client.NewRequest("POST", u, &transfer) + if err != nil { + return nil, nil, err + } + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// DispatchRequestOptions represents a request to trigger a repository_dispatch event. +type DispatchRequestOptions struct { + // EventType is a custom webhook event name. (Required.) + EventType string `json:"event_type"` + // ClientPayload is a custom JSON payload with extra information about the webhook event. + // Defaults to an empty JSON object. + ClientPayload *json.RawMessage `json:"client_payload,omitempty"` +} + +// Dispatch triggers a repository_dispatch event in a GitHub Actions workflow. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#create-a-repository-dispatch-event +func (s *RepositoriesService) Dispatch(ctx context.Context, owner, repo string, opts DispatchRequestOptions) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/dispatches", owner, repo) + + req, err := s.client.NewRequest("POST", u, &opts) + if err != nil { + return nil, nil, err + } + + r := new(Repository) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +// isBranchNotProtected determines whether a branch is not protected +// based on the error message returned by GitHub API. +func isBranchNotProtected(err error) bool { + errorResponse, ok := err.(*ErrorResponse) + return ok && errorResponse.Message == githubBranchNotProtected +} + +// EnablePrivateReporting enables private reporting of vulnerabilities for a +// repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#enable-private-vulnerability-reporting-for-a-repository +func (s *RepositoriesService) EnablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// DisablePrivateReporting disables private reporting of vulnerabilities for a +// repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#disable-private-vulnerability-reporting-for-a-repository +func (s *RepositoriesService) DisablePrivateReporting(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/private-vulnerability-reporting", owner, repo) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_actions_access.go b/vendor/github.com/google/go-github/v56/github/repos_actions_access.go new file mode 100644 index 000000000..55761eeb7 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_actions_access.go @@ -0,0 +1,55 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RepositoryActionsAccessLevel represents the repository actions access level. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository +type RepositoryActionsAccessLevel struct { + // AccessLevel specifies the level of access that workflows outside of the repository have + // to actions and reusable workflows within the repository. + // Possible values are: "none", "organization" "enterprise". + AccessLevel *string `json:"access_level,omitempty"` +} + +// GetActionsAccessLevel gets the level of access that workflows outside of the repository have +// to actions and reusable workflows in the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-the-level-of-access-for-workflows-outside-of-the-repository +func (s *RepositoriesService) GetActionsAccessLevel(ctx context.Context, owner, repo string) (*RepositoryActionsAccessLevel, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + raal := new(RepositoryActionsAccessLevel) + resp, err := s.client.Do(ctx, req, raal) + if err != nil { + return nil, resp, err + } + + return raal, resp, nil +} + +// EditActionsAccessLevel sets the level of access that workflows outside of the repository have +// to actions and reusable workflows in the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-the-level-of-access-for-workflows-outside-of-the-repository +func (s *RepositoriesService) EditActionsAccessLevel(ctx context.Context, owner, repo string, repositoryActionsAccessLevel RepositoryActionsAccessLevel) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions/access", owner, repo) + req, err := s.client.NewRequest("PUT", u, repositoryActionsAccessLevel) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_actions_allowed.go b/vendor/github.com/google/go-github/v56/github/repos_actions_allowed.go new file mode 100644 index 000000000..25a469058 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_actions_allowed.go @@ -0,0 +1,49 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetActionsAllowed gets the allowed actions and reusable workflows for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-allowed-actions-and-reusable-workflows-for-a-repository +func (s *RepositoriesService) GetActionsAllowed(ctx context.Context, org, repo string) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + actionsAllowed := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, actionsAllowed) + if err != nil { + return nil, resp, err + } + + return actionsAllowed, resp, nil +} + +// EditActionsAllowed sets the allowed actions and reusable workflows for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-allowed-actions-and-reusable-workflows-for-a-repository +func (s *RepositoriesService) EditActionsAllowed(ctx context.Context, org, repo string, actionsAllowed ActionsAllowed) (*ActionsAllowed, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions/selected-actions", org, repo) + req, err := s.client.NewRequest("PUT", u, actionsAllowed) + if err != nil { + return nil, nil, err + } + + p := new(ActionsAllowed) + resp, err := s.client.Do(ctx, req, p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_actions_permissions.go b/vendor/github.com/google/go-github/v56/github/repos_actions_permissions.go new file mode 100644 index 000000000..45f844cec --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_actions_permissions.go @@ -0,0 +1,62 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ActionsPermissionsRepository represents a policy for repositories and allowed actions in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions +type ActionsPermissionsRepository struct { + Enabled *bool `json:"enabled,omitempty"` + AllowedActions *string `json:"allowed_actions,omitempty"` + SelectedActionsURL *string `json:"selected_actions_url,omitempty"` +} + +func (a ActionsPermissionsRepository) String() string { + return Stringify(a) +} + +// GetActionsPermissions gets the GitHub Actions permissions policy for repositories and allowed actions in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#get-github-actions-permissions-for-a-repository +func (s *RepositoriesService) GetActionsPermissions(ctx context.Context, owner, repo string) (*ActionsPermissionsRepository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + permissions := new(ActionsPermissionsRepository) + resp, err := s.client.Do(ctx, req, permissions) + if err != nil { + return nil, resp, err + } + + return permissions, resp, nil +} + +// EditActionsPermissions sets the permissions policy for repositories and allowed actions in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/actions/permissions#set-github-actions-permissions-for-a-repository +func (s *RepositoriesService) EditActionsPermissions(ctx context.Context, owner, repo string, actionsPermissionsRepository ActionsPermissionsRepository) (*ActionsPermissionsRepository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/actions/permissions", owner, repo) + req, err := s.client.NewRequest("PUT", u, actionsPermissionsRepository) + if err != nil { + return nil, nil, err + } + + permissions := new(ActionsPermissionsRepository) + resp, err := s.client.Do(ctx, req, permissions) + if err != nil { + return nil, resp, err + } + + return permissions, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_autolinks.go b/vendor/github.com/google/go-github/v56/github/repos_autolinks.go new file mode 100644 index 000000000..0d2cec618 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_autolinks.go @@ -0,0 +1,104 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// AutolinkOptions specifies parameters for RepositoriesService.AddAutolink method. +type AutolinkOptions struct { + KeyPrefix *string `json:"key_prefix,omitempty"` + URLTemplate *string `json:"url_template,omitempty"` + IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` +} + +// Autolink represents autolinks to external resources like JIRA issues and Zendesk tickets. +type Autolink struct { + ID *int64 `json:"id,omitempty"` + KeyPrefix *string `json:"key_prefix,omitempty"` + URLTemplate *string `json:"url_template,omitempty"` + IsAlphanumeric *bool `json:"is_alphanumeric,omitempty"` +} + +// ListAutolinks returns a list of autolinks configured for the given repository. +// Information about autolinks are only available to repository administrators. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/autolinks#list-all-autolinks-of-a-repository +func (s *RepositoriesService) ListAutolinks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Autolink, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var autolinks []*Autolink + resp, err := s.client.Do(ctx, req, &autolinks) + if err != nil { + return nil, resp, err + } + + return autolinks, resp, nil +} + +// AddAutolink creates an autolink reference for a repository. +// Users with admin access to the repository can create an autolink. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/autolinks#create-an-autolink-reference-for-a-repository +func (s *RepositoriesService) AddAutolink(ctx context.Context, owner, repo string, opts *AutolinkOptions) (*Autolink, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/autolinks", owner, repo) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + al := new(Autolink) + resp, err := s.client.Do(ctx, req, al) + if err != nil { + return nil, resp, err + } + return al, resp, nil +} + +// GetAutolink returns a single autolink reference by ID that was configured for the given repository. +// Information about autolinks are only available to repository administrators. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/autolinks#get-an-autolink-reference-of-a-repository +func (s *RepositoriesService) GetAutolink(ctx context.Context, owner, repo string, id int64) (*Autolink, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var autolink *Autolink + resp, err := s.client.Do(ctx, req, &autolink) + if err != nil { + return nil, resp, err + } + + return autolink, resp, nil +} + +// DeleteAutolink deletes a single autolink reference by ID that was configured for the given repository. +// Information about autolinks are only available to repository administrators. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/autolinks#delete-an-autolink-reference-from-a-repository +func (s *RepositoriesService) DeleteAutolink(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/autolinks/%v", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_codeowners.go b/vendor/github.com/google/go-github/v56/github/repos_codeowners.go new file mode 100644 index 000000000..1c3f78abd --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_codeowners.go @@ -0,0 +1,59 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetCodeownersErrorsOptions specifies the optional parameters to the +// RepositoriesService.GetCodeownersErrors method. +type GetCodeownersErrorsOptions struct { + // A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. + // Default: the repository's default branch (e.g. main). + Ref string `url:"ref,omitempty"` +} + +// CodeownersErrors represents a list of syntax errors detected in the CODEOWNERS file. +type CodeownersErrors struct { + Errors []*CodeownersError `json:"errors"` +} + +// CodeownersError represents a syntax error detected in the CODEOWNERS file. +type CodeownersError struct { + Line int `json:"line"` + Column int `json:"column"` + Kind string `json:"kind"` + Source string `json:"source"` + Suggestion *string `json:"suggestion,omitempty"` + Message string `json:"message"` + Path string `json:"path"` +} + +// GetCodeownersErrors lists any syntax errors that are detected in the CODEOWNERS file. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/repos#list-codeowners-errors +func (s *RepositoriesService) GetCodeownersErrors(ctx context.Context, owner, repo string, opts *GetCodeownersErrorsOptions) (*CodeownersErrors, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/codeowners/errors", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + codeownersErrors := &CodeownersErrors{} + resp, err := s.client.Do(ctx, req, codeownersErrors) + if err != nil { + return nil, resp, err + } + + return codeownersErrors, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_collaborators.go b/vendor/github.com/google/go-github/v56/github/repos_collaborators.go new file mode 100644 index 000000000..c2396872f --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_collaborators.go @@ -0,0 +1,166 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListCollaboratorsOptions specifies the optional parameters to the +// RepositoriesService.ListCollaborators method. +type ListCollaboratorsOptions struct { + // Affiliation specifies how collaborators should be filtered by their affiliation. + // Possible values are: + // outside - All outside collaborators of an organization-owned repository + // direct - All collaborators with permissions to an organization-owned repository, + // regardless of organization membership status + // all - All collaborators the authenticated user can see + // + // Default value is "all". + Affiliation string `url:"affiliation,omitempty"` + + // Permission specifies how collaborators should be filtered by the permissions they have on the repository. + // Possible values are: + // "pull", "triage", "push", "maintain", "admin" + // + // If not specified, all collaborators will be returned. + Permission string `url:"permission,omitempty"` + + ListOptions +} + +// CollaboratorInvitation represents an invitation created when adding a collaborator. +// GitHub API docs: https://docs.github.com/en/rest/repos/collaborators/#response-when-a-new-invitation-is-created +type CollaboratorInvitation struct { + ID *int64 `json:"id,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Invitee *User `json:"invitee,omitempty"` + Inviter *User `json:"inviter,omitempty"` + Permissions *string `json:"permissions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` +} + +// ListCollaborators lists the GitHub users that have access to the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/collaborators#list-repository-collaborators +func (s *RepositoriesService) ListCollaborators(ctx context.Context, owner, repo string, opts *ListCollaboratorsOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/collaborators", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// IsCollaborator checks whether the specified GitHub user has collaborator +// access to the given repo. +// Note: This will return false if the user is not a collaborator OR the user +// is not a GitHub user. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator +func (s *RepositoriesService) IsCollaborator(ctx context.Context, owner, repo, user string) (bool, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + isCollab, err := parseBoolResponse(err) + return isCollab, resp, err +} + +// RepositoryPermissionLevel represents the permission level an organization +// member has for a given repository. +type RepositoryPermissionLevel struct { + // Possible values: "admin", "write", "read", "none" + Permission *string `json:"permission,omitempty"` + + User *User `json:"user,omitempty"` +} + +// GetPermissionLevel retrieves the specific permission level a collaborator has for a given repository. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/collaborators#get-repository-permissions-for-a-user +func (s *RepositoriesService) GetPermissionLevel(ctx context.Context, owner, repo, user string) (*RepositoryPermissionLevel, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/collaborators/%v/permission", owner, repo, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + rpl := new(RepositoryPermissionLevel) + resp, err := s.client.Do(ctx, req, rpl) + if err != nil { + return nil, resp, err + } + + return rpl, resp, nil +} + +// RepositoryAddCollaboratorOptions specifies the optional parameters to the +// RepositoriesService.AddCollaborator method. +type RepositoryAddCollaboratorOptions struct { + // Permission specifies the permission to grant the user on this repository. + // Possible values are: + // pull - team members can pull, but not push to or administer this repository + // push - team members can pull and push, but not administer this repository + // admin - team members can pull, push and administer this repository + // maintain - team members can manage the repository without access to sensitive or destructive actions. + // triage - team members can proactively manage issues and pull requests without write access. + // + // Default value is "push". This option is only valid for organization-owned repositories. + Permission string `json:"permission,omitempty"` +} + +// AddCollaborator sends an invitation to the specified GitHub user +// to become a collaborator to the given repo. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/collaborators#add-a-repository-collaborator +func (s *RepositoriesService) AddCollaborator(ctx context.Context, owner, repo, user string, opts *RepositoryAddCollaboratorOptions) (*CollaboratorInvitation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + acr := new(CollaboratorInvitation) + resp, err := s.client.Do(ctx, req, acr) + if err != nil { + return nil, resp, err + } + + return acr, resp, nil +} + +// RemoveCollaborator removes the specified GitHub user as collaborator from the given repo. +// Note: Does not return error if a valid user that is not a collaborator is removed. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/collaborators#remove-a-repository-collaborator +func (s *RepositoriesService) RemoveCollaborator(ctx context.Context, owner, repo, user string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/collaborators/%v", owner, repo, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_comments.go b/vendor/github.com/google/go-github/v56/github/repos_comments.go new file mode 100644 index 000000000..e282374e9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_comments.go @@ -0,0 +1,161 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RepositoryComment represents a comment for a commit, file, or line in a repository. +type RepositoryComment struct { + HTMLURL *string `json:"html_url,omitempty"` + URL *string `json:"url,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + User *User `json:"user,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + + // User-mutable fields + Body *string `json:"body"` + // User-initialized fields + Path *string `json:"path,omitempty"` + Position *int `json:"position,omitempty"` +} + +func (r RepositoryComment) String() string { + return Stringify(r) +} + +// ListComments lists all the comments for the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#list-commit-comments-for-a-repository +func (s *RepositoriesService) ListComments(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var comments []*RepositoryComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// ListCommitComments lists all the comments for a given commit SHA. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#list-commit-comments +func (s *RepositoriesService) ListCommitComments(ctx context.Context, owner, repo, sha string, opts *ListOptions) ([]*RepositoryComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + var comments []*RepositoryComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// CreateComment creates a comment for the given commit. +// Note: GitHub allows for comments to be created for non-existing files and positions. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#create-a-commit-comment +func (s *RepositoriesService) CreateComment(ctx context.Context, owner, repo, sha string, comment *RepositoryComment) (*RepositoryComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/comments", owner, repo, sha) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(RepositoryComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// GetComment gets a single comment from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#get-a-commit-comment +func (s *RepositoriesService) GetComment(ctx context.Context, owner, repo string, id int64) (*RepositoryComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeReactionsPreview) + + c := new(RepositoryComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// UpdateComment updates the body of a single comment. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#update-a-commit-comment +func (s *RepositoriesService) UpdateComment(ctx context.Context, owner, repo string, id int64, comment *RepositoryComment) (*RepositoryComment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + c := new(RepositoryComment) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} + +// DeleteComment deletes a single comment from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/comments#delete-a-commit-comment +func (s *RepositoriesService) DeleteComment(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/comments/%v", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_commits.go b/vendor/github.com/google/go-github/v56/github/repos_commits.go new file mode 100644 index 000000000..d1fb577c6 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_commits.go @@ -0,0 +1,312 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "context" + "fmt" + "net/url" + "time" +) + +// RepositoryCommit represents a commit in a repo. +// Note that it's wrapping a Commit, so author/committer information is in two places, +// but contain different details about them: in RepositoryCommit "github details", in Commit - "git details". +type RepositoryCommit struct { + NodeID *string `json:"node_id,omitempty"` + SHA *string `json:"sha,omitempty"` + Commit *Commit `json:"commit,omitempty"` + Author *User `json:"author,omitempty"` + Committer *User `json:"committer,omitempty"` + Parents []*Commit `json:"parents,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + URL *string `json:"url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + + // Details about how many changes were made in this commit. Only filled in during GetCommit! + Stats *CommitStats `json:"stats,omitempty"` + // Details about which files, and how this commit touched. Only filled in during GetCommit! + Files []*CommitFile `json:"files,omitempty"` +} + +func (r RepositoryCommit) String() string { + return Stringify(r) +} + +// CommitStats represents the number of additions / deletions from a file in a given RepositoryCommit or GistCommit. +type CommitStats struct { + Additions *int `json:"additions,omitempty"` + Deletions *int `json:"deletions,omitempty"` + Total *int `json:"total,omitempty"` +} + +func (c CommitStats) String() string { + return Stringify(c) +} + +// CommitFile represents a file modified in a commit. +type CommitFile struct { + SHA *string `json:"sha,omitempty"` + Filename *string `json:"filename,omitempty"` + Additions *int `json:"additions,omitempty"` + Deletions *int `json:"deletions,omitempty"` + Changes *int `json:"changes,omitempty"` + Status *string `json:"status,omitempty"` + Patch *string `json:"patch,omitempty"` + BlobURL *string `json:"blob_url,omitempty"` + RawURL *string `json:"raw_url,omitempty"` + ContentsURL *string `json:"contents_url,omitempty"` + PreviousFilename *string `json:"previous_filename,omitempty"` +} + +func (c CommitFile) String() string { + return Stringify(c) +} + +// CommitsComparison is the result of comparing two commits. +// See CompareCommits() for details. +type CommitsComparison struct { + BaseCommit *RepositoryCommit `json:"base_commit,omitempty"` + MergeBaseCommit *RepositoryCommit `json:"merge_base_commit,omitempty"` + + // Head can be 'behind' or 'ahead' + Status *string `json:"status,omitempty"` + AheadBy *int `json:"ahead_by,omitempty"` + BehindBy *int `json:"behind_by,omitempty"` + TotalCommits *int `json:"total_commits,omitempty"` + + Commits []*RepositoryCommit `json:"commits,omitempty"` + + Files []*CommitFile `json:"files,omitempty"` + + HTMLURL *string `json:"html_url,omitempty"` + PermalinkURL *string `json:"permalink_url,omitempty"` + DiffURL *string `json:"diff_url,omitempty"` + PatchURL *string `json:"patch_url,omitempty"` + URL *string `json:"url,omitempty"` // API URL. +} + +func (c CommitsComparison) String() string { + return Stringify(c) +} + +// CommitsListOptions specifies the optional parameters to the +// RepositoriesService.ListCommits method. +type CommitsListOptions struct { + // SHA or branch to start listing Commits from. + SHA string `url:"sha,omitempty"` + + // Path that should be touched by the returned Commits. + Path string `url:"path,omitempty"` + + // Author of by which to filter Commits. + Author string `url:"author,omitempty"` + + // Since when should Commits be included in the response. + Since time.Time `url:"since,omitempty"` + + // Until when should Commits be included in the response. + Until time.Time `url:"until,omitempty"` + + ListOptions +} + +// BranchCommit is the result of listing branches with commit SHA. +type BranchCommit struct { + Name *string `json:"name,omitempty"` + Commit *Commit `json:"commit,omitempty"` + Protected *bool `json:"protected,omitempty"` +} + +// ListCommits lists the commits of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#list-commits +func (s *RepositoriesService) ListCommits(ctx context.Context, owner, repo string, opts *CommitsListOptions) ([]*RepositoryCommit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var commits []*RepositoryCommit + resp, err := s.client.Do(ctx, req, &commits) + if err != nil { + return nil, resp, err + } + + return commits, resp, nil +} + +// GetCommit fetches the specified commit, including all details about it. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#get-a-single-commit +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#get-a-commit +func (s *RepositoriesService) GetCommit(ctx context.Context, owner, repo, sha string, opts *ListOptions) (*RepositoryCommit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + commit := new(RepositoryCommit) + resp, err := s.client.Do(ctx, req, commit) + if err != nil { + return nil, resp, err + } + + return commit, resp, nil +} + +// GetCommitRaw fetches the specified commit in raw (diff or patch) format. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#get-a-commit +func (s *RepositoriesService) GetCommitRaw(ctx context.Context, owner string, repo string, sha string, opts RawOptions) (string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, sha) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + + switch opts.Type { + case Diff: + req.Header.Set("Accept", mediaTypeV3Diff) + case Patch: + req.Header.Set("Accept", mediaTypeV3Patch) + default: + return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) + } + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// GetCommitSHA1 gets the SHA-1 of a commit reference. If a last-known SHA1 is +// supplied and no new commits have occurred, a 304 Unmodified response is returned. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#get-a-commit +func (s *RepositoriesService) GetCommitSHA1(ctx context.Context, owner, repo, ref, lastSHA string) (string, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v", owner, repo, refURLEscape(ref)) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + if lastSHA != "" { + req.Header.Set("If-None-Match", `"`+lastSHA+`"`) + } + + req.Header.Set("Accept", mediaTypeV3SHA) + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// CompareCommits compares a range of commits with each other. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#compare-two-commits +func (s *RepositoriesService) CompareCommits(ctx context.Context, owner, repo string, base, head string, opts *ListOptions) (*CommitsComparison, *Response, error) { + escapedBase := url.QueryEscape(base) + escapedHead := url.QueryEscape(head) + + u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + comp := new(CommitsComparison) + resp, err := s.client.Do(ctx, req, comp) + if err != nil { + return nil, resp, err + } + + return comp, resp, nil +} + +// CompareCommitsRaw compares a range of commits with each other in raw (diff or patch) format. +// +// Both "base" and "head" must be branch names in "repo". +// To compare branches across other repositories in the same network as "repo", +// use the format ":branch". +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#compare-two-commits +func (s *RepositoriesService) CompareCommitsRaw(ctx context.Context, owner, repo, base, head string, opts RawOptions) (string, *Response, error) { + escapedBase := url.QueryEscape(base) + escapedHead := url.QueryEscape(head) + + u := fmt.Sprintf("repos/%v/%v/compare/%v...%v", owner, repo, escapedBase, escapedHead) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return "", nil, err + } + + switch opts.Type { + case Diff: + req.Header.Set("Accept", mediaTypeV3Diff) + case Patch: + req.Header.Set("Accept", mediaTypeV3Patch) + default: + return "", nil, fmt.Errorf("unsupported raw type %d", opts.Type) + } + + var buf bytes.Buffer + resp, err := s.client.Do(ctx, req, &buf) + if err != nil { + return "", resp, err + } + + return buf.String(), resp, nil +} + +// ListBranchesHeadCommit gets all branches where the given commit SHA is the HEAD, +// or latest commit for the branch. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/commits#list-branches-for-head-commit +func (s *RepositoriesService) ListBranchesHeadCommit(ctx context.Context, owner, repo, sha string) ([]*BranchCommit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/branches-where-head", owner, repo, sha) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeListPullsOrBranchesForCommitPreview) + var branchCommits []*BranchCommit + resp, err := s.client.Do(ctx, req, &branchCommits) + if err != nil { + return nil, resp, err + } + + return branchCommits, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_community_health.go b/vendor/github.com/google/go-github/v56/github/repos_community_health.go new file mode 100644 index 000000000..750ee1582 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_community_health.go @@ -0,0 +1,61 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Metric represents the different fields for one file in community health files. +type Metric struct { + Name *string `json:"name"` + Key *string `json:"key"` + SPDXID *string `json:"spdx_id"` + URL *string `json:"url"` + HTMLURL *string `json:"html_url"` + NodeID *string `json:"node_id"` +} + +// CommunityHealthFiles represents the different files in the community health metrics response. +type CommunityHealthFiles struct { + CodeOfConduct *Metric `json:"code_of_conduct"` + CodeOfConductFile *Metric `json:"code_of_conduct_file"` + Contributing *Metric `json:"contributing"` + IssueTemplate *Metric `json:"issue_template"` + PullRequestTemplate *Metric `json:"pull_request_template"` + License *Metric `json:"license"` + Readme *Metric `json:"readme"` +} + +// CommunityHealthMetrics represents a response containing the community metrics of a repository. +type CommunityHealthMetrics struct { + HealthPercentage *int `json:"health_percentage"` + Description *string `json:"description"` + Documentation *string `json:"documentation"` + Files *CommunityHealthFiles `json:"files"` + UpdatedAt *Timestamp `json:"updated_at"` + ContentReportsEnabled *bool `json:"content_reports_enabled"` +} + +// GetCommunityHealthMetrics retrieves all the community health metrics for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/community#get-community-profile-metrics +func (s *RepositoriesService) GetCommunityHealthMetrics(ctx context.Context, owner, repo string) (*CommunityHealthMetrics, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/community/profile", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + metrics := &CommunityHealthMetrics{} + resp, err := s.client.Do(ctx, req, metrics) + if err != nil { + return nil, resp, err + } + + return metrics, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_contents.go b/vendor/github.com/google/go-github/v56/github/repos_contents.go new file mode 100644 index 000000000..cfb4a6da2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_contents.go @@ -0,0 +1,337 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Repository contents API methods. +// GitHub API docs: https://docs.github.com/en/rest/repos/contents/ + +package github + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "path" + "strings" +) + +var ErrPathForbidden = errors.New("path must not contain '..' due to auth vulnerability issue") + +// RepositoryContent represents a file or directory in a github repository. +type RepositoryContent struct { + Type *string `json:"type,omitempty"` + // Target is only set if the type is "symlink" and the target is not a normal file. + // If Target is set, Path will be the symlink path. + Target *string `json:"target,omitempty"` + Encoding *string `json:"encoding,omitempty"` + Size *int `json:"size,omitempty"` + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + // Content contains the actual file content, which may be encoded. + // Callers should call GetContent which will decode the content if + // necessary. + Content *string `json:"content,omitempty"` + SHA *string `json:"sha,omitempty"` + URL *string `json:"url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + DownloadURL *string `json:"download_url,omitempty"` + SubmoduleGitURL *string `json:"submodule_git_url,omitempty"` +} + +// RepositoryContentResponse holds the parsed response from CreateFile, UpdateFile, and DeleteFile. +type RepositoryContentResponse struct { + Content *RepositoryContent `json:"content,omitempty"` + Commit `json:"commit,omitempty"` +} + +// RepositoryContentFileOptions specifies optional parameters for CreateFile, UpdateFile, and DeleteFile. +type RepositoryContentFileOptions struct { + Message *string `json:"message,omitempty"` + Content []byte `json:"content"` // unencoded + SHA *string `json:"sha,omitempty"` + Branch *string `json:"branch,omitempty"` + Author *CommitAuthor `json:"author,omitempty"` + Committer *CommitAuthor `json:"committer,omitempty"` +} + +// RepositoryContentGetOptions represents an optional ref parameter, which can be a SHA, +// branch, or tag +type RepositoryContentGetOptions struct { + Ref string `url:"ref,omitempty"` +} + +// String converts RepositoryContent to a string. It's primarily for testing. +func (r RepositoryContent) String() string { + return Stringify(r) +} + +// GetContent returns the content of r, decoding it if necessary. +func (r *RepositoryContent) GetContent() (string, error) { + var encoding string + if r.Encoding != nil { + encoding = *r.Encoding + } + + switch encoding { + case "base64": + if r.Content == nil { + return "", errors.New("malformed response: base64 encoding of null content") + } + c, err := base64.StdEncoding.DecodeString(*r.Content) + return string(c), err + case "": + if r.Content == nil { + return "", nil + } + return *r.Content, nil + case "none": + return "", errors.New("unsupported content encoding: none, this may occur when file size > 1 MB, if that is the case consider using DownloadContents") + default: + return "", fmt.Errorf("unsupported content encoding: %v", encoding) + } +} + +// GetReadme gets the Readme file for the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents#get-a-repository-readme +func (s *RepositoriesService) GetReadme(ctx context.Context, owner, repo string, opts *RepositoryContentGetOptions) (*RepositoryContent, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/readme", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + readme := new(RepositoryContent) + resp, err := s.client.Do(ctx, req, readme) + if err != nil { + return nil, resp, err + } + + return readme, resp, nil +} + +// DownloadContents returns an io.ReadCloser that reads the contents of the +// specified file. This function will work with files of any size, as opposed +// to GetContents which is limited to 1 Mb files. It is the caller's +// responsibility to close the ReadCloser. +// +// It is possible for the download to result in a failed response when the +// returned error is nil. Callers should check the returned Response status +// code to verify the content is from a successful response. +func (s *RepositoriesService) DownloadContents(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *Response, error) { + dir := path.Dir(filepath) + filename := path.Base(filepath) + _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) + if err != nil { + return nil, resp, err + } + + for _, contents := range dirContents { + if *contents.Name == filename { + if contents.DownloadURL == nil || *contents.DownloadURL == "" { + return nil, resp, fmt.Errorf("no download link found for %s", filepath) + } + + dlResp, err := s.client.client.Get(*contents.DownloadURL) + if err != nil { + return nil, &Response{Response: dlResp}, err + } + + return dlResp.Body, &Response{Response: dlResp}, nil + } + } + + return nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) +} + +// DownloadContentsWithMeta is identical to DownloadContents but additionally +// returns the RepositoryContent of the requested file. This additional data +// is useful for future operations involving the requested file. For merely +// reading the content of a file, DownloadContents is perfectly adequate. +// +// It is possible for the download to result in a failed response when the +// returned error is nil. Callers should check the returned Response status +// code to verify the content is from a successful response. +func (s *RepositoriesService) DownloadContentsWithMeta(ctx context.Context, owner, repo, filepath string, opts *RepositoryContentGetOptions) (io.ReadCloser, *RepositoryContent, *Response, error) { + dir := path.Dir(filepath) + filename := path.Base(filepath) + _, dirContents, resp, err := s.GetContents(ctx, owner, repo, dir, opts) + if err != nil { + return nil, nil, resp, err + } + + for _, contents := range dirContents { + if *contents.Name == filename { + if contents.DownloadURL == nil || *contents.DownloadURL == "" { + return nil, contents, resp, fmt.Errorf("no download link found for %s", filepath) + } + + dlResp, err := s.client.client.Get(*contents.DownloadURL) + if err != nil { + return nil, contents, &Response{Response: dlResp}, err + } + + return dlResp.Body, contents, &Response{Response: dlResp}, nil + } + } + + return nil, nil, resp, fmt.Errorf("no file named %s found in %s", filename, dir) +} + +// GetContents can return either the metadata and content of a single file +// (when path references a file) or the metadata of all the files and/or +// subdirectories of a directory (when path references a directory). To make it +// easy to distinguish between both result types and to mimic the API as much +// as possible, both result types will be returned but only one will contain a +// value and the other will be nil. +// +// Due to an auth vulnerability issue in the GitHub v3 API, ".." is not allowed +// to appear anywhere in the "path" or this method will return an error. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents#get-repository-content +func (s *RepositoriesService) GetContents(ctx context.Context, owner, repo, path string, opts *RepositoryContentGetOptions) (fileContent *RepositoryContent, directoryContent []*RepositoryContent, resp *Response, err error) { + if strings.Contains(path, "..") { + return nil, nil, nil, ErrPathForbidden + } + + escapedPath := (&url.URL{Path: strings.TrimSuffix(path, "/")}).String() + u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, escapedPath) + u, err = addOptions(u, opts) + if err != nil { + return nil, nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, nil, err + } + + var rawJSON json.RawMessage + resp, err = s.client.Do(ctx, req, &rawJSON) + if err != nil { + return nil, nil, resp, err + } + + fileUnmarshalError := json.Unmarshal(rawJSON, &fileContent) + if fileUnmarshalError == nil { + return fileContent, nil, resp, nil + } + + directoryUnmarshalError := json.Unmarshal(rawJSON, &directoryContent) + if directoryUnmarshalError == nil { + return nil, directoryContent, resp, nil + } + + return nil, nil, resp, fmt.Errorf("unmarshalling failed for both file and directory content: %s and %s", fileUnmarshalError, directoryUnmarshalError) +} + +// CreateFile creates a new file in a repository at the given path and returns +// the commit and file metadata. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents#create-or-update-file-contents +func (s *RepositoriesService) CreateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + createResponse := new(RepositoryContentResponse) + resp, err := s.client.Do(ctx, req, createResponse) + if err != nil { + return nil, resp, err + } + + return createResponse, resp, nil +} + +// UpdateFile updates a file in a repository at the given path and returns the +// commit and file metadata. Requires the blob SHA of the file being updated. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents#create-or-update-file-contents +func (s *RepositoriesService) UpdateFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + updateResponse := new(RepositoryContentResponse) + resp, err := s.client.Do(ctx, req, updateResponse) + if err != nil { + return nil, resp, err + } + + return updateResponse, resp, nil +} + +// DeleteFile deletes a file from a repository and returns the commit. +// Requires the blob SHA of the file to be deleted. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents#delete-a-file +func (s *RepositoriesService) DeleteFile(ctx context.Context, owner, repo, path string, opts *RepositoryContentFileOptions) (*RepositoryContentResponse, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/contents/%s", owner, repo, path) + req, err := s.client.NewRequest("DELETE", u, opts) + if err != nil { + return nil, nil, err + } + + deleteResponse := new(RepositoryContentResponse) + resp, err := s.client.Do(ctx, req, deleteResponse) + if err != nil { + return nil, resp, err + } + + return deleteResponse, resp, nil +} + +// ArchiveFormat is used to define the archive type when calling GetArchiveLink. +type ArchiveFormat string + +const ( + // Tarball specifies an archive in gzipped tar format. + Tarball ArchiveFormat = "tarball" + + // Zipball specifies an archive in zip format. + Zipball ArchiveFormat = "zipball" +) + +// GetArchiveLink returns an URL to download a tarball or zipball archive for a +// repository. The archiveFormat can be specified by either the github.Tarball +// or github.Zipball constant. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/contents/#get-archive-link +func (s *RepositoriesService) GetArchiveLink(ctx context.Context, owner, repo string, archiveformat ArchiveFormat, opts *RepositoryContentGetOptions, maxRedirects int) (*url.URL, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/%s", owner, repo, archiveformat) + if opts != nil && opts.Ref != "" { + u += fmt.Sprintf("/%s", opts.Ref) + } + resp, err := s.client.roundTripWithOptionalFollowRedirect(ctx, u, maxRedirects) + if err != nil { + return nil, nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusFound { + return nil, newResponse(resp), fmt.Errorf("unexpected status code: %s", resp.Status) + } + + parsedURL, err := url.Parse(resp.Header.Get("Location")) + if err != nil { + return nil, newResponse(resp), err + } + + return parsedURL, newResponse(resp), nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_deployment_branch_policies.go b/vendor/github.com/google/go-github/v56/github/repos_deployment_branch_policies.go new file mode 100644 index 000000000..32bf72cca --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_deployment_branch_policies.go @@ -0,0 +1,125 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// DeploymentBranchPolicy represents a single deployment branch policy for an environment. +type DeploymentBranchPolicy struct { + Name *string `json:"name,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Type *string `json:"type,omitempty"` +} + +// DeploymentBranchPolicyResponse represents the slightly different format of response that comes back when you list deployment branch policies. +type DeploymentBranchPolicyResponse struct { + TotalCount *int `json:"total_count,omitempty"` + BranchPolicies []*DeploymentBranchPolicy `json:"branch_policies,omitempty"` +} + +// DeploymentBranchPolicyRequest represents a deployment branch policy request. +type DeploymentBranchPolicyRequest struct { + Name *string `json:"name,omitempty"` + Type *string `json:"type,omitempty"` +} + +// ListDeploymentBranchPolicies lists the deployment branch policies for an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/branch-policies#list-deployment-branch-policies +func (s *RepositoriesService) ListDeploymentBranchPolicies(ctx context.Context, owner, repo, environment string) (*DeploymentBranchPolicyResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var list *DeploymentBranchPolicyResponse + resp, err := s.client.Do(ctx, req, &list) + if err != nil { + return nil, resp, err + } + + return list, resp, nil +} + +// GetDeploymentBranchPolicy gets a deployment branch policy for an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/branch-policies#get-a-deployment-branch-policy +func (s *RepositoriesService) GetDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*DeploymentBranchPolicy, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var policy *DeploymentBranchPolicy + resp, err := s.client.Do(ctx, req, &policy) + if err != nil { + return nil, resp, err + } + + return policy, resp, nil +} + +// CreateDeploymentBranchPolicy creates a deployment branch policy for an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/branch-policies#create-a-deployment-branch-policy +func (s *RepositoriesService) CreateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies", owner, repo, environment) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + var policy *DeploymentBranchPolicy + resp, err := s.client.Do(ctx, req, &policy) + if err != nil { + return nil, resp, err + } + + return policy, resp, nil +} + +// UpdateDeploymentBranchPolicy updates a deployment branch policy for an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/branch-policies#update-a-deployment-branch-policy +func (s *RepositoriesService) UpdateDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64, request *DeploymentBranchPolicyRequest) (*DeploymentBranchPolicy, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) + + req, err := s.client.NewRequest("PUT", u, request) + if err != nil { + return nil, nil, err + } + + var policy *DeploymentBranchPolicy + resp, err := s.client.Do(ctx, req, &policy) + if err != nil { + return nil, resp, err + } + + return policy, resp, nil +} + +// DeleteDeploymentBranchPolicy deletes a deployment branch policy for an environment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/branch-policies#delete-a-deployment-branch-policy +func (s *RepositoriesService) DeleteDeploymentBranchPolicy(ctx context.Context, owner, repo, environment string, branchPolicyID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/environments/%v/deployment-branch-policies/%v", owner, repo, environment, branchPolicyID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_deployments.go b/vendor/github.com/google/go-github/v56/github/repos_deployments.go new file mode 100644 index 000000000..36445f895 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_deployments.go @@ -0,0 +1,250 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "strings" +) + +// Deployment represents a deployment in a repo +type Deployment struct { + URL *string `json:"url,omitempty"` + ID *int64 `json:"id,omitempty"` + SHA *string `json:"sha,omitempty"` + Ref *string `json:"ref,omitempty"` + Task *string `json:"task,omitempty"` + Payload json.RawMessage `json:"payload,omitempty"` + Environment *string `json:"environment,omitempty"` + Description *string `json:"description,omitempty"` + Creator *User `json:"creator,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +// DeploymentRequest represents a deployment request +type DeploymentRequest struct { + Ref *string `json:"ref,omitempty"` + Task *string `json:"task,omitempty"` + AutoMerge *bool `json:"auto_merge,omitempty"` + RequiredContexts *[]string `json:"required_contexts,omitempty"` + Payload interface{} `json:"payload,omitempty"` + Environment *string `json:"environment,omitempty"` + Description *string `json:"description,omitempty"` + TransientEnvironment *bool `json:"transient_environment,omitempty"` + ProductionEnvironment *bool `json:"production_environment,omitempty"` +} + +// DeploymentsListOptions specifies the optional parameters to the +// RepositoriesService.ListDeployments method. +type DeploymentsListOptions struct { + // SHA of the Deployment. + SHA string `url:"sha,omitempty"` + + // List deployments for a given ref. + Ref string `url:"ref,omitempty"` + + // List deployments for a given task. + Task string `url:"task,omitempty"` + + // List deployments for a given environment. + Environment string `url:"environment,omitempty"` + + ListOptions +} + +// ListDeployments lists the deployments of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/deployments#list-deployments +func (s *RepositoriesService) ListDeployments(ctx context.Context, owner, repo string, opts *DeploymentsListOptions) ([]*Deployment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var deployments []*Deployment + resp, err := s.client.Do(ctx, req, &deployments) + if err != nil { + return nil, resp, err + } + + return deployments, resp, nil +} + +// GetDeployment returns a single deployment of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/deployments#get-a-deployment +func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Deployment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + deployment := new(Deployment) + resp, err := s.client.Do(ctx, req, deployment) + if err != nil { + return nil, resp, err + } + + return deployment, resp, nil +} + +// CreateDeployment creates a new deployment for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/deployments#create-a-deployment +func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo string, request *DeploymentRequest) (*Deployment, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments", owner, repo) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + d := new(Deployment) + resp, err := s.client.Do(ctx, req, d) + if err != nil { + return nil, resp, err + } + + return d, resp, nil +} + +// DeleteDeployment deletes an existing deployment for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/deployments#delete-a-deployment +func (s *RepositoriesService) DeleteDeployment(ctx context.Context, owner, repo string, deploymentID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments/%v", owner, repo, deploymentID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// DeploymentStatus represents the status of a +// particular deployment. +type DeploymentStatus struct { + ID *int64 `json:"id,omitempty"` + // State is the deployment state. + // Possible values are: "pending", "success", "failure", "error", + // "inactive", "in_progress", "queued". + State *string `json:"state,omitempty"` + Creator *User `json:"creator,omitempty"` + Description *string `json:"description,omitempty"` + Environment *string `json:"environment,omitempty"` + NodeID *string `json:"node_id,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + TargetURL *string `json:"target_url,omitempty"` + DeploymentURL *string `json:"deployment_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` + EnvironmentURL *string `json:"environment_url,omitempty"` + LogURL *string `json:"log_url,omitempty"` + URL *string `json:"url,omitempty"` +} + +// DeploymentStatusRequest represents a deployment request +type DeploymentStatusRequest struct { + State *string `json:"state,omitempty"` + LogURL *string `json:"log_url,omitempty"` + Description *string `json:"description,omitempty"` + Environment *string `json:"environment,omitempty"` + EnvironmentURL *string `json:"environment_url,omitempty"` + AutoInactive *bool `json:"auto_inactive,omitempty"` +} + +// ListDeploymentStatuses lists the statuses of a given deployment of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/statuses#list-deployment-statuses +func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, repo string, deployment int64, opts *ListOptions) ([]*DeploymentStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var statuses []*DeploymentStatus + resp, err := s.client.Do(ctx, req, &statuses) + if err != nil { + return nil, resp, err + } + + return statuses, resp, nil +} + +// GetDeploymentStatus returns a single deployment status of a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/statuses#get-a-deployment-status +func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, repo string, deploymentID, deploymentStatusID int64) (*DeploymentStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses/%v", owner, repo, deploymentID, deploymentStatusID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + d := new(DeploymentStatus) + resp, err := s.client.Do(ctx, req, d) + if err != nil { + return nil, resp, err + } + + return d, resp, nil +} + +// CreateDeploymentStatus creates a new status for a deployment. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/statuses#create-a-deployment-status +func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, repo string, deployment int64, request *DeploymentStatusRequest) (*DeploymentStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/deployments/%v/statuses", owner, repo, deployment) + + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeExpandDeploymentStatusPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + d := new(DeploymentStatus) + resp, err := s.client.Do(ctx, req, d) + if err != nil { + return nil, resp, err + } + + return d, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_environments.go b/vendor/github.com/google/go-github/v56/github/repos_environments.go new file mode 100644 index 000000000..e22ca5cd1 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_environments.go @@ -0,0 +1,244 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" + "net/http" +) + +// Environment represents a single environment in a repository. +type Environment struct { + Owner *string `json:"owner,omitempty"` + Repo *string `json:"repo,omitempty"` + EnvironmentName *string `json:"environment_name,omitempty"` + WaitTimer *int `json:"wait_timer,omitempty"` + Reviewers []*EnvReviewers `json:"reviewers,omitempty"` + DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy,omitempty"` + // Return/response only values + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + CanAdminsBypass *bool `json:"can_admins_bypass,omitempty"` + ProtectionRules []*ProtectionRule `json:"protection_rules,omitempty"` +} + +// EnvReviewers represents a single environment reviewer entry. +type EnvReviewers struct { + Type *string `json:"type,omitempty"` + ID *int64 `json:"id,omitempty"` +} + +// BranchPolicy represents the options for whether a branch deployment policy is applied to this environment. +type BranchPolicy struct { + ProtectedBranches *bool `json:"protected_branches,omitempty"` + CustomBranchPolicies *bool `json:"custom_branch_policies,omitempty"` +} + +// EnvResponse represents the slightly different format of response that comes back when you list an environment. +type EnvResponse struct { + TotalCount *int `json:"total_count,omitempty"` + Environments []*Environment `json:"environments,omitempty"` +} + +// ProtectionRule represents a single protection rule applied to the environment. +type ProtectionRule struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + PreventSelfReview *bool `json:"prevent_self_review,omitempty"` + Type *string `json:"type,omitempty"` + WaitTimer *int `json:"wait_timer,omitempty"` + Reviewers []*RequiredReviewer `json:"reviewers,omitempty"` +} + +// RequiredReviewer represents a required reviewer. +type RequiredReviewer struct { + Type *string `json:"type,omitempty"` + Reviewer interface{} `json:"reviewer,omitempty"` +} + +// EnvironmentListOptions specifies the optional parameters to the +// RepositoriesService.ListEnvironments method. +type EnvironmentListOptions struct { + ListOptions +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// This helps us handle the fact that RequiredReviewer can have either a User or Team type reviewer field. +func (r *RequiredReviewer) UnmarshalJSON(data []byte) error { + type aliasReviewer RequiredReviewer + var reviewer aliasReviewer + if err := json.Unmarshal(data, &reviewer); err != nil { + return err + } + + r.Type = reviewer.Type + + switch *reviewer.Type { + case "User": + reviewer.Reviewer = &User{} + if err := json.Unmarshal(data, &reviewer); err != nil { + return err + } + r.Reviewer = reviewer.Reviewer + case "Team": + reviewer.Reviewer = &Team{} + if err := json.Unmarshal(data, &reviewer); err != nil { + return err + } + r.Reviewer = reviewer.Reviewer + default: + r.Type = nil + r.Reviewer = nil + return fmt.Errorf("reviewer.Type is %T, not a string of 'User' or 'Team', unable to unmarshal", reviewer.Type) + } + + return nil +} + +// ListEnvironments lists all environments for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/environments#get-all-environments +func (s *RepositoriesService) ListEnvironments(ctx context.Context, owner, repo string, opts *EnvironmentListOptions) (*EnvResponse, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/environments", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var list *EnvResponse + resp, err := s.client.Do(ctx, req, &list) + if err != nil { + return nil, resp, err + } + return list, resp, nil +} + +// GetEnvironment get a single environment for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/environments#get-an-environment +func (s *RepositoriesService) GetEnvironment(ctx context.Context, owner, repo, name string) (*Environment, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var env *Environment + resp, err := s.client.Do(ctx, req, &env) + if err != nil { + return nil, resp, err + } + return env, resp, nil +} + +// MarshalJSON implements the json.Marshaler interface. +// As the only way to clear a WaitTimer is to set it to 0, a missing WaitTimer object should default to 0, not null. +// As the default value for CanAdminsBypass is true, a nil value here marshals to true. +func (c *CreateUpdateEnvironment) MarshalJSON() ([]byte, error) { + type Alias CreateUpdateEnvironment + if c.WaitTimer == nil { + c.WaitTimer = Int(0) + } + if c.CanAdminsBypass == nil { + c.CanAdminsBypass = Bool(true) + } + return json.Marshal(&struct { + *Alias + }{ + Alias: (*Alias)(c), + }) +} + +// CreateUpdateEnvironment represents the fields required for the create/update operation +// following the Create/Update release example. +// See https://github.com/google/go-github/issues/992 for more information. +// Removed omitempty here as the API expects null values for reviewers and deployment_branch_policy to clear them. +type CreateUpdateEnvironment struct { + WaitTimer *int `json:"wait_timer"` + Reviewers []*EnvReviewers `json:"reviewers"` + CanAdminsBypass *bool `json:"can_admins_bypass"` + DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` + PreventSelfReview *bool `json:"prevent_self_review,omitempty"` +} + +// createUpdateEnvironmentNoEnterprise represents the fields accepted for Pro/Teams private repos. +// Ref: https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment +// See https://github.com/google/go-github/issues/2602 for more information. +type createUpdateEnvironmentNoEnterprise struct { + DeploymentBranchPolicy *BranchPolicy `json:"deployment_branch_policy"` +} + +// CreateUpdateEnvironment create or update a new environment for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/environments#create-or-update-an-environment +func (s *RepositoriesService) CreateUpdateEnvironment(ctx context.Context, owner, repo, name string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) + + req, err := s.client.NewRequest("PUT", u, environment) + if err != nil { + return nil, nil, err + } + + e := new(Environment) + resp, err := s.client.Do(ctx, req, e) + if err != nil { + // The API returns 422 when the pricing plan doesn't support all the fields sent. + // This path will be executed for Pro/Teams private repos. + // For public repos, regardless of the pricing plan, all fields supported. + // For Free plan private repos the returned error code is 404. + // We are checking that the user didn't try to send a value for unsupported fields, + // and return an error if they did. + if resp != nil && resp.StatusCode == http.StatusUnprocessableEntity && environment != nil && len(environment.Reviewers) == 0 && environment.GetWaitTimer() == 0 { + return s.createNewEnvNoEnterprise(ctx, u, environment) + } + return nil, resp, err + } + return e, resp, nil +} + +// createNewEnvNoEnterprise is an internal function for cases where the original call returned 422. +// Currently only the `deployment_branch_policy` parameter is supported for Pro/Team private repos. +func (s *RepositoriesService) createNewEnvNoEnterprise(ctx context.Context, u string, environment *CreateUpdateEnvironment) (*Environment, *Response, error) { + req, err := s.client.NewRequest("PUT", u, &createUpdateEnvironmentNoEnterprise{ + DeploymentBranchPolicy: environment.DeploymentBranchPolicy, + }) + if err != nil { + return nil, nil, err + } + + e := new(Environment) + resp, err := s.client.Do(ctx, req, e) + if err != nil { + return nil, resp, err + } + return e, resp, nil +} + +// DeleteEnvironment delete an environment from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deployments/environments#delete-an-environment +func (s *RepositoriesService) DeleteEnvironment(ctx context.Context, owner, repo, name string) (*Response, error) { + u := fmt.Sprintf("repos/%s/%s/environments/%s", owner, repo, name) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_forks.go b/vendor/github.com/google/go-github/v56/github/repos_forks.go new file mode 100644 index 000000000..f175dfe3b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_forks.go @@ -0,0 +1,93 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" +) + +// RepositoryListForksOptions specifies the optional parameters to the +// RepositoriesService.ListForks method. +type RepositoryListForksOptions struct { + // How to sort the forks list. Possible values are: newest, oldest, + // watchers. Default is "newest". + Sort string `url:"sort,omitempty"` + + ListOptions +} + +// ListForks lists the forks of the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/forks#list-forks +func (s *RepositoriesService) ListForks(ctx context.Context, owner, repo string, opts *RepositoryListForksOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when topics API fully launches. + req.Header.Set("Accept", mediaTypeTopicsPreview) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// RepositoryCreateForkOptions specifies the optional parameters to the +// RepositoriesService.CreateFork method. +type RepositoryCreateForkOptions struct { + // The organization to fork the repository into. + Organization string `json:"organization,omitempty"` + Name string `json:"name,omitempty"` + DefaultBranchOnly bool `json:"default_branch_only,omitempty"` +} + +// CreateFork creates a fork of the specified repository. +// +// This method might return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing creating the fork in a background task. In this event, +// the Repository value will be returned, which includes the details about the pending fork. +// A follow up request, after a delay of a second or so, should result +// in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/forks#create-a-fork +func (s *RepositoriesService) CreateFork(ctx context.Context, owner, repo string, opts *RepositoryCreateForkOptions) (*Repository, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/forks", owner, repo) + + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + fork := new(Repository) + resp, err := s.client.Do(ctx, req, fork) + if err != nil { + // Persist AcceptedError's metadata to the Repository object. + if aerr, ok := err.(*AcceptedError); ok { + if err := json.Unmarshal(aerr.Raw, fork); err != nil { + return fork, resp, err + } + + return fork, resp, err + } + return nil, resp, err + } + + return fork, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_hooks.go b/vendor/github.com/google/go-github/v56/github/repos_hooks.go new file mode 100644 index 000000000..ba229e7bc --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_hooks.go @@ -0,0 +1,253 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" +) + +// WebHookPayload represents the data that is received from GitHub when a push +// event hook is triggered. The format of these payloads pre-date most of the +// GitHub v3 API, so there are lots of minor incompatibilities with the types +// defined in the rest of the API. Therefore, several types are duplicated +// here to account for these differences. +// +// GitHub API docs: https://help.github.com/articles/post-receive-hooks +// +// Deprecated: Please use PushEvent instead. +type WebHookPayload = PushEvent + +// WebHookCommit represents the commit variant we receive from GitHub in a +// WebHookPayload. +// +// Deprecated: Please use HeadCommit instead. +type WebHookCommit = HeadCommit + +// WebHookAuthor represents the author or committer of a commit, as specified +// in a WebHookCommit. The commit author may not correspond to a GitHub User. +// +// Deprecated: Please use CommitAuthor instead. +// NOTE Breaking API change: the `Username` field is now called `Login`. +type WebHookAuthor = CommitAuthor + +// Hook represents a GitHub (web and service) hook for a repository. +type Hook struct { + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + ID *int64 `json:"id,omitempty"` + Type *string `json:"type,omitempty"` + Name *string `json:"name,omitempty"` + TestURL *string `json:"test_url,omitempty"` + PingURL *string `json:"ping_url,omitempty"` + LastResponse map[string]interface{} `json:"last_response,omitempty"` + + // Only the following fields are used when creating a hook. + // Config is required. + Config map[string]interface{} `json:"config,omitempty"` + Events []string `json:"events,omitempty"` + Active *bool `json:"active,omitempty"` +} + +func (h Hook) String() string { + return Stringify(h) +} + +// createHookRequest is a subset of Hook and is used internally +// by CreateHook to pass only the known fields for the endpoint. +// +// See https://github.com/google/go-github/issues/1015 for more +// information. +type createHookRequest struct { + // Config is required. + Name string `json:"name"` + Config map[string]interface{} `json:"config,omitempty"` + Events []string `json:"events,omitempty"` + Active *bool `json:"active,omitempty"` +} + +// CreateHook creates a Hook for the specified repository. +// Config is a required field. +// +// Note that only a subset of the hook fields are used and hook must +// not be nil. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#create-a-repository-webhook +func (s *RepositoriesService) CreateHook(ctx context.Context, owner, repo string, hook *Hook) (*Hook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) + + hookReq := &createHookRequest{ + Name: "web", + Events: hook.Events, + Active: hook.Active, + Config: hook.Config, + } + + req, err := s.client.NewRequest("POST", u, hookReq) + if err != nil { + return nil, nil, err + } + + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// ListHooks lists all Hooks for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#list-repository-webhooks +func (s *RepositoriesService) ListHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*Hook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var hooks []*Hook + resp, err := s.client.Do(ctx, req, &hooks) + if err != nil { + return nil, resp, err + } + + return hooks, resp, nil +} + +// GetHook returns a single specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#get-a-repository-webhook +func (s *RepositoriesService) GetHook(ctx context.Context, owner, repo string, id int64) (*Hook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// EditHook updates a specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#update-a-repository-webhook +func (s *RepositoriesService) EditHook(ctx context.Context, owner, repo string, id int64, hook *Hook) (*Hook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, hook) + if err != nil { + return nil, nil, err + } + h := new(Hook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// DeleteHook deletes a specified Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#delete-a-repository-webhook +func (s *RepositoriesService) DeleteHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// PingHook triggers a 'ping' event to be sent to the Hook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#ping-a-repository-webhook +func (s *RepositoriesService) PingHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%d/pings", owner, repo, id) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// TestHook triggers a test Hook by github. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repos#test-the-push-repository-webhook +func (s *RepositoriesService) TestHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%d/tests", owner, repo, id) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// Subscribe lets servers register to receive updates when a topic is updated. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks#pubsubhubbub +func (s *RepositoriesService) Subscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { + req, err := s.createWebSubRequest("subscribe", owner, repo, event, callback, secret) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unsubscribe lets servers unregister to no longer receive updates when a topic is updated. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks#pubsubhubbub +func (s *RepositoriesService) Unsubscribe(ctx context.Context, owner, repo, event, callback string, secret []byte) (*Response, error) { + req, err := s.createWebSubRequest("unsubscribe", owner, repo, event, callback, secret) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// createWebSubRequest returns a subscribe/unsubscribe request that implements +// the WebSub (formerly PubSubHubbub) protocol. +// +// See: https://www.w3.org/TR/websub/#subscriber-sends-subscription-request +func (s *RepositoriesService) createWebSubRequest(hubMode, owner, repo, event, callback string, secret []byte) (*http.Request, error) { + topic := fmt.Sprintf( + "https://github.com/%s/%s/events/%s", + owner, + repo, + event, + ) + form := url.Values{} + form.Add("hub.mode", hubMode) + form.Add("hub.topic", topic) + form.Add("hub.callback", callback) + if secret != nil { + form.Add("hub.secret", string(secret)) + } + body := strings.NewReader(form.Encode()) + + req, err := s.client.NewFormRequest("hub", body) + if err != nil { + return nil, err + } + + return req, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_hooks_configuration.go b/vendor/github.com/google/go-github/v56/github/repos_hooks_configuration.go new file mode 100644 index 000000000..5aadfb645 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_hooks_configuration.go @@ -0,0 +1,49 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GetHookConfiguration returns the configuration for the specified repository webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-config?apiVersion=2022-11-28#get-a-webhook-configuration-for-a-repository +func (s *RepositoriesService) GetHookConfiguration(ctx context.Context, owner, repo string, id int64) (*HookConfig, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + config := new(HookConfig) + resp, err := s.client.Do(ctx, req, config) + if err != nil { + return nil, resp, err + } + + return config, resp, nil +} + +// EditHookConfiguration updates the configuration for the specified repository webhook. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-config?apiVersion=2022-11-28#update-a-webhook-configuration-for-a-repository +func (s *RepositoriesService) EditHookConfiguration(ctx context.Context, owner, repo string, id int64, config *HookConfig) (*HookConfig, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/config", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, config) + if err != nil { + return nil, nil, err + } + + c := new(HookConfig) + resp, err := s.client.Do(ctx, req, c) + if err != nil { + return nil, resp, err + } + + return c, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_hooks_deliveries.go b/vendor/github.com/google/go-github/v56/github/repos_hooks_deliveries.go new file mode 100644 index 000000000..5f2f8807e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_hooks_deliveries.go @@ -0,0 +1,136 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" +) + +// HookDelivery represents the data that is received from GitHub's Webhook Delivery API +// +// GitHub API docs: +// - https://docs.github.com/en/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook +// - https://docs.github.com/en/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook +type HookDelivery struct { + ID *int64 `json:"id,omitempty"` + GUID *string `json:"guid,omitempty"` + DeliveredAt *Timestamp `json:"delivered_at,omitempty"` + Redelivery *bool `json:"redelivery,omitempty"` + Duration *float64 `json:"duration,omitempty"` + Status *string `json:"status,omitempty"` + StatusCode *int `json:"status_code,omitempty"` + Event *string `json:"event,omitempty"` + Action *string `json:"action,omitempty"` + InstallationID *int64 `json:"installation_id,omitempty"` + RepositoryID *int64 `json:"repository_id,omitempty"` + + // Request is populated by GetHookDelivery. + Request *HookRequest `json:"request,omitempty"` + // Response is populated by GetHookDelivery. + Response *HookResponse `json:"response,omitempty"` +} + +func (d HookDelivery) String() string { + return Stringify(d) +} + +// HookRequest is a part of HookDelivery that contains +// the HTTP headers and the JSON payload of the webhook request. +type HookRequest struct { + Headers map[string]string `json:"headers,omitempty"` + RawPayload *json.RawMessage `json:"payload,omitempty"` +} + +func (r HookRequest) String() string { + return Stringify(r) +} + +// HookResponse is a part of HookDelivery that contains +// the HTTP headers and the response body served by the webhook endpoint. +type HookResponse struct { + Headers map[string]string `json:"headers,omitempty"` + RawPayload *json.RawMessage `json:"payload,omitempty"` +} + +func (r HookResponse) String() string { + return Stringify(r) +} + +// ListHookDeliveries lists webhook deliveries for a webhook configured in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook +func (s *RepositoriesService) ListHookDeliveries(ctx context.Context, owner, repo string, id int64, opts *ListCursorOptions) ([]*HookDelivery, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + deliveries := []*HookDelivery{} + resp, err := s.client.Do(ctx, req, &deliveries) + if err != nil { + return nil, resp, err + } + + return deliveries, resp, nil +} + +// GetHookDelivery returns a delivery for a webhook configured in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook +func (s *RepositoriesService) GetHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v", owner, repo, hookID, deliveryID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// RedeliverHookDelivery redelivers a delivery for a webhook configured in a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook +func (s *RepositoriesService) RedeliverHookDelivery(ctx context.Context, owner, repo string, hookID, deliveryID int64) (*HookDelivery, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/hooks/%v/deliveries/%v/attempts", owner, repo, hookID, deliveryID) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + h := new(HookDelivery) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// ParseRequestPayload parses the request payload. For recognized event types, +// a value of the corresponding struct type will be returned. +func (d *HookDelivery) ParseRequestPayload() (interface{}, error) { + eType, ok := messageToTypeName[d.GetEvent()] + if !ok { + return nil, fmt.Errorf("unsupported event type %q", d.GetEvent()) + } + + e := &Event{Type: &eType, RawPayload: d.Request.RawPayload} + return e.ParsePayload() +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_invitations.go b/vendor/github.com/google/go-github/v56/github/repos_invitations.go new file mode 100644 index 000000000..81956cd49 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_invitations.go @@ -0,0 +1,89 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RepositoryInvitation represents an invitation to collaborate on a repo. +type RepositoryInvitation struct { + ID *int64 `json:"id,omitempty"` + Repo *Repository `json:"repository,omitempty"` + Invitee *User `json:"invitee,omitempty"` + Inviter *User `json:"inviter,omitempty"` + + // Permissions represents the permissions that the associated user will have + // on the repository. Possible values are: "read", "write", "admin". + Permissions *string `json:"permissions,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` +} + +// ListInvitations lists all currently-open repository invitations. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#list-repository-invitations +func (s *RepositoriesService) ListInvitations(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/invitations", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + invites := []*RepositoryInvitation{} + resp, err := s.client.Do(ctx, req, &invites) + if err != nil { + return nil, resp, err + } + + return invites, resp, nil +} + +// DeleteInvitation deletes a repository invitation. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#delete-a-repository-invitation +func (s *RepositoriesService) DeleteInvitation(ctx context.Context, owner, repo string, invitationID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// UpdateInvitation updates the permissions associated with a repository +// invitation. +// +// permissions represents the permissions that the associated user will have +// on the repository. Possible values are: "read", "write", "admin". +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#update-a-repository-invitation +func (s *RepositoriesService) UpdateInvitation(ctx context.Context, owner, repo string, invitationID int64, permissions string) (*RepositoryInvitation, *Response, error) { + opts := &struct { + Permissions string `json:"permissions"` + }{Permissions: permissions} + u := fmt.Sprintf("repos/%v/%v/invitations/%v", owner, repo, invitationID) + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + invite := &RepositoryInvitation{} + resp, err := s.client.Do(ctx, req, invite) + if err != nil { + return nil, resp, err + } + + return invite, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_keys.go b/vendor/github.com/google/go-github/v56/github/repos_keys.go new file mode 100644 index 000000000..42c5de497 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_keys.go @@ -0,0 +1,91 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// The Key type is defined in users_keys.go + +// ListKeys lists the deploy keys for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deploy-keys#list-deploy-keys +func (s *RepositoriesService) ListKeys(ctx context.Context, owner string, repo string, opts *ListOptions) ([]*Key, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var keys []*Key + resp, err := s.client.Do(ctx, req, &keys) + if err != nil { + return nil, resp, err + } + + return keys, resp, nil +} + +// GetKey fetches a single deploy key. +// +// GitHub API docs: https://docs.github.com/en/rest/deploy-keys#get-a-deploy-key +func (s *RepositoriesService) GetKey(ctx context.Context, owner string, repo string, id int64) (*Key, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + key := new(Key) + resp, err := s.client.Do(ctx, req, key) + if err != nil { + return nil, resp, err + } + + return key, resp, nil +} + +// CreateKey adds a deploy key for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/deploy-keys#create-a-deploy-key +func (s *RepositoriesService) CreateKey(ctx context.Context, owner string, repo string, key *Key) (*Key, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/keys", owner, repo) + + req, err := s.client.NewRequest("POST", u, key) + if err != nil { + return nil, nil, err + } + + k := new(Key) + resp, err := s.client.Do(ctx, req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, nil +} + +// DeleteKey deletes a deploy key. +// +// GitHub API docs: https://docs.github.com/en/rest/deploy-keys#delete-a-deploy-key +func (s *RepositoriesService) DeleteKey(ctx context.Context, owner string, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/keys/%v", owner, repo, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_lfs.go b/vendor/github.com/google/go-github/v56/github/repos_lfs.go new file mode 100644 index 000000000..6ac2e5a87 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_lfs.go @@ -0,0 +1,49 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// EnableLFS turns the LFS (Large File Storage) feature ON for the selected repo. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/lfs#enable-git-lfs-for-a-repository +func (s *RepositoriesService) EnableLFS(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// DisableLFS turns the LFS (Large File Storage) feature OFF for the selected repo. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/lfs#disable-git-lfs-for-a-repository +func (s *RepositoriesService) DisableLFS(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/lfs", owner, repo) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_merging.go b/vendor/github.com/google/go-github/v56/github/repos_merging.go new file mode 100644 index 000000000..66e88452e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_merging.go @@ -0,0 +1,72 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RepositoryMergeRequest represents a request to merge a branch in a +// repository. +type RepositoryMergeRequest struct { + Base *string `json:"base,omitempty"` + Head *string `json:"head,omitempty"` + CommitMessage *string `json:"commit_message,omitempty"` +} + +// RepoMergeUpstreamRequest represents a request to sync a branch of +// a forked repository to keep it up-to-date with the upstream repository. +type RepoMergeUpstreamRequest struct { + Branch *string `json:"branch,omitempty"` +} + +// RepoMergeUpstreamResult represents the result of syncing a branch of +// a forked repository with the upstream repository. +type RepoMergeUpstreamResult struct { + Message *string `json:"message,omitempty"` + MergeType *string `json:"merge_type,omitempty"` + BaseBranch *string `json:"base_branch,omitempty"` +} + +// Merge a branch in the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branches#merge-a-branch +func (s *RepositoriesService) Merge(ctx context.Context, owner, repo string, request *RepositoryMergeRequest) (*RepositoryCommit, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/merges", owner, repo) + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + commit := new(RepositoryCommit) + resp, err := s.client.Do(ctx, req, commit) + if err != nil { + return nil, resp, err + } + + return commit, resp, nil +} + +// MergeUpstream syncs a branch of a forked repository to keep it up-to-date +// with the upstream repository. +// +// GitHub API docs: https://docs.github.com/en/rest/branches/branches#sync-a-fork-branch-with-the-upstream-repository +func (s *RepositoriesService) MergeUpstream(ctx context.Context, owner, repo string, request *RepoMergeUpstreamRequest) (*RepoMergeUpstreamResult, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/merge-upstream", owner, repo) + req, err := s.client.NewRequest("POST", u, request) + if err != nil { + return nil, nil, err + } + + result := new(RepoMergeUpstreamResult) + resp, err := s.client.Do(ctx, req, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_pages.go b/vendor/github.com/google/go-github/v56/github/repos_pages.go new file mode 100644 index 000000000..83075dbdd --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_pages.go @@ -0,0 +1,306 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Pages represents a GitHub Pages site configuration. +type Pages struct { + URL *string `json:"url,omitempty"` + Status *string `json:"status,omitempty"` + CNAME *string `json:"cname,omitempty"` + Custom404 *bool `json:"custom_404,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + BuildType *string `json:"build_type,omitempty"` + Source *PagesSource `json:"source,omitempty"` + Public *bool `json:"public,omitempty"` + HTTPSCertificate *PagesHTTPSCertificate `json:"https_certificate,omitempty"` + HTTPSEnforced *bool `json:"https_enforced,omitempty"` +} + +// PagesSource represents a GitHub page's source. +type PagesSource struct { + Branch *string `json:"branch,omitempty"` + Path *string `json:"path,omitempty"` +} + +// PagesError represents a build error for a GitHub Pages site. +type PagesError struct { + Message *string `json:"message,omitempty"` +} + +// PagesBuild represents the build information for a GitHub Pages site. +type PagesBuild struct { + URL *string `json:"url,omitempty"` + Status *string `json:"status,omitempty"` + Error *PagesError `json:"error,omitempty"` + Pusher *User `json:"pusher,omitempty"` + Commit *string `json:"commit,omitempty"` + Duration *int `json:"duration,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +// PagesDomain represents a domain associated with a GitHub Pages site. +type PagesDomain struct { + Host *string `json:"host,omitempty"` + URI *string `json:"uri,omitempty"` + Nameservers *string `json:"nameservers,omitempty"` + DNSResolves *bool `json:"dns_resolves,omitempty"` + IsProxied *bool `json:"is_proxied,omitempty"` + IsCloudflareIP *bool `json:"is_cloudflare_ip,omitempty"` + IsFastlyIP *bool `json:"is_fastly_ip,omitempty"` + IsOldIPAddress *bool `json:"is_old_ip_address,omitempty"` + IsARecord *bool `json:"is_a_record,omitempty"` + HasCNAMERecord *bool `json:"has_cname_record,omitempty"` + HasMXRecordsPresent *bool `json:"has_mx_records_present,omitempty"` + IsValidDomain *bool `json:"is_valid_domain,omitempty"` + IsApexDomain *bool `json:"is_apex_domain,omitempty"` + ShouldBeARecord *bool `json:"should_be_a_record,omitempty"` + IsCNAMEToGithubUserDomain *bool `json:"is_cname_to_github_user_domain,omitempty"` + IsCNAMEToPagesDotGithubDotCom *bool `json:"is_cname_to_pages_dot_github_dot_com,omitempty"` + IsCNAMEToFastly *bool `json:"is_cname_to_fastly,omitempty"` + IsPointedToGithubPagesIP *bool `json:"is_pointed_to_github_pages_ip,omitempty"` + IsNonGithubPagesIPPresent *bool `json:"is_non_github_pages_ip_present,omitempty"` + IsPagesDomain *bool `json:"is_pages_domain,omitempty"` + IsServedByPages *bool `json:"is_served_by_pages,omitempty"` + IsValid *bool `json:"is_valid,omitempty"` + Reason *string `json:"reason,omitempty"` + RespondsToHTTPS *bool `json:"responds_to_https,omitempty"` + EnforcesHTTPS *bool `json:"enforces_https,omitempty"` + HTTPSError *string `json:"https_error,omitempty"` + IsHTTPSEligible *bool `json:"is_https_eligible,omitempty"` + CAAError *string `json:"caa_error,omitempty"` +} + +// PagesHealthCheckResponse represents the response given for the health check of a GitHub Pages site. +type PagesHealthCheckResponse struct { + Domain *PagesDomain `json:"domain,omitempty"` + AltDomain *PagesDomain `json:"alt_domain,omitempty"` +} + +// PagesHTTPSCertificate represents the HTTPS Certificate information for a GitHub Pages site. +type PagesHTTPSCertificate struct { + State *string `json:"state,omitempty"` + Description *string `json:"description,omitempty"` + Domains []string `json:"domains,omitempty"` + // GitHub's API doesn't return a standard Timestamp, rather it returns a YYYY-MM-DD string. + ExpiresAt *string `json:"expires_at,omitempty"` +} + +// createPagesRequest is a subset of Pages and is used internally +// by EnablePages to pass only the known fields for the endpoint. +type createPagesRequest struct { + BuildType *string `json:"build_type,omitempty"` + Source *PagesSource `json:"source,omitempty"` +} + +// EnablePages enables GitHub Pages for the named repo. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#create-a-github-pages-site +func (s *RepositoriesService) EnablePages(ctx context.Context, owner, repo string, pages *Pages) (*Pages, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) + + pagesReq := &createPagesRequest{ + BuildType: pages.BuildType, + Source: pages.Source, + } + + req, err := s.client.NewRequest("POST", u, pagesReq) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) + + enable := new(Pages) + resp, err := s.client.Do(ctx, req, enable) + if err != nil { + return nil, resp, err + } + + return enable, resp, nil +} + +// PagesUpdate sets up parameters needed to update a GitHub Pages site. +type PagesUpdate struct { + // CNAME represents a custom domain for the repository. + // Leaving CNAME empty will remove the custom domain. + CNAME *string `json:"cname"` + // BuildType is optional and can either be "legacy" or "workflow". + // "workflow" - You are using a github workflow to build your pages. + // "legacy" - You are deploying from a branch. + BuildType *string `json:"build_type,omitempty"` + // Source must include the branch name, and may optionally specify the subdirectory "/docs". + // Possible values for Source.Branch are usually "gh-pages", "main", and "master", + // or any other existing branch name. + // Possible values for Source.Path are: "/", and "/docs". + Source *PagesSource `json:"source,omitempty"` + // Public configures access controls for the site. + // If "true", the site will be accessible to anyone on the internet. If "false", + // the site will be accessible to anyone with read access to the repository that + // published the site. + Public *bool `json:"public,omitempty"` + // HTTPSEnforced specifies whether HTTPS should be enforced for the repository. + HTTPSEnforced *bool `json:"https_enforced,omitempty"` +} + +// UpdatePages updates GitHub Pages for the named repo. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#update-information-about-a-github-pages-site +func (s *RepositoriesService) UpdatePages(ctx context.Context, owner, repo string, opts *PagesUpdate) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) + + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + return resp, err + } + + return resp, nil +} + +// DisablePages disables GitHub Pages for the named repo. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#delete-a-github-pages-site +func (s *RepositoriesService) DisablePages(ctx context.Context, owner, repo string) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeEnablePagesAPIPreview) + + return s.client.Do(ctx, req, nil) +} + +// GetPagesInfo fetches information about a GitHub Pages site. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#get-a-github-pages-site +func (s *RepositoriesService) GetPagesInfo(ctx context.Context, owner, repo string) (*Pages, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + site := new(Pages) + resp, err := s.client.Do(ctx, req, site) + if err != nil { + return nil, resp, err + } + + return site, resp, nil +} + +// ListPagesBuilds lists the builds for a GitHub Pages site. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#list-github-pages-builds +func (s *RepositoriesService) ListPagesBuilds(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PagesBuild, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pages []*PagesBuild + resp, err := s.client.Do(ctx, req, &pages) + if err != nil { + return nil, resp, err + } + + return pages, resp, nil +} + +// GetLatestPagesBuild fetches the latest build information for a GitHub pages site. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#get-latest-pages-build +func (s *RepositoriesService) GetLatestPagesBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages/builds/latest", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + build := new(PagesBuild) + resp, err := s.client.Do(ctx, req, build) + if err != nil { + return nil, resp, err + } + + return build, resp, nil +} + +// GetPageBuild fetches the specific build information for a GitHub pages site. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#get-github-pages-build +func (s *RepositoriesService) GetPageBuild(ctx context.Context, owner, repo string, id int64) (*PagesBuild, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages/builds/%v", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + build := new(PagesBuild) + resp, err := s.client.Do(ctx, req, build) + if err != nil { + return nil, resp, err + } + + return build, resp, nil +} + +// RequestPageBuild requests a build of a GitHub Pages site without needing to push new commit. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#request-a-github-pages-build +func (s *RepositoriesService) RequestPageBuild(ctx context.Context, owner, repo string) (*PagesBuild, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages/builds", owner, repo) + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, nil, err + } + + build := new(PagesBuild) + resp, err := s.client.Do(ctx, req, build) + if err != nil { + return nil, resp, err + } + + return build, resp, nil +} + +// GetPagesHealthCheck gets a DNS health check for the CNAME record configured for a repository's GitHub Pages. +// +// GitHub API docs: https://docs.github.com/en/rest/pages#get-a-dns-health-check-for-github-pages +func (s *RepositoriesService) GetPageHealthCheck(ctx context.Context, owner, repo string) (*PagesHealthCheckResponse, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pages/health", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + healthCheckResponse := new(PagesHealthCheckResponse) + resp, err := s.client.Do(ctx, req, healthCheckResponse) + if err != nil { + return nil, resp, err + } + + return healthCheckResponse, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/v56/github/repos_prereceive_hooks.go new file mode 100644 index 000000000..1ce6478d3 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_prereceive_hooks.go @@ -0,0 +1,110 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// PreReceiveHook represents a GitHub pre-receive hook for a repository. +type PreReceiveHook struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Enforcement *string `json:"enforcement,omitempty"` + ConfigURL *string `json:"configuration_url,omitempty"` +} + +func (p PreReceiveHook) String() string { + return Stringify(p) +} + +// ListPreReceiveHooks lists all pre-receive hooks for the specified repository. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#list-pre-receive-hooks +func (s *RepositoriesService) ListPreReceiveHooks(ctx context.Context, owner, repo string, opts *ListOptions) ([]*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + var hooks []*PreReceiveHook + resp, err := s.client.Do(ctx, req, &hooks) + if err != nil { + return nil, resp, err + } + + return hooks, resp, nil +} + +// GetPreReceiveHook returns a single specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#get-a-single-pre-receive-hook +func (s *RepositoriesService) GetPreReceiveHook(ctx context.Context, owner, repo string, id int64) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// UpdatePreReceiveHook updates a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#update-pre-receive-hook-enforcement +func (s *RepositoriesService) UpdatePreReceiveHook(ctx context.Context, owner, repo string, id int64, hook *PreReceiveHook) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, hook) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// DeletePreReceiveHook deletes a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#remove-enforcement-overrides-for-a-pre-receive-hook +func (s *RepositoriesService) DeletePreReceiveHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_projects.go b/vendor/github.com/google/go-github/v56/github/repos_projects.go new file mode 100644 index 000000000..a3001dee9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_projects.go @@ -0,0 +1,69 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ProjectListOptions specifies the optional parameters to the +// OrganizationsService.ListProjects and RepositoriesService.ListProjects methods. +type ProjectListOptions struct { + // Indicates the state of the projects to return. Can be either open, closed, or all. Default: open + State string `url:"state,omitempty"` + + ListOptions +} + +// ListProjects lists the projects for a repo. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#list-repository-projects +func (s *RepositoriesService) ListProjects(ctx context.Context, owner, repo string, opts *ProjectListOptions) ([]*Project, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// CreateProject creates a GitHub Project for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#create-a-repository-project +func (s *RepositoriesService) CreateProject(ctx context.Context, owner, repo string, opts *ProjectOptions) (*Project, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/projects", owner, repo) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept headers when APIs fully launch. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + project := &Project{} + resp, err := s.client.Do(ctx, req, project) + if err != nil { + return nil, resp, err + } + + return project, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_releases.go b/vendor/github.com/google/go-github/v56/github/repos_releases.go new file mode 100644 index 000000000..e2aa845af --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_releases.go @@ -0,0 +1,447 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "errors" + "fmt" + "io" + "mime" + "net/http" + "os" + "path/filepath" + "strings" +) + +// RepositoryRelease represents a GitHub release in a repository. +type RepositoryRelease struct { + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Draft *bool `json:"draft,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` + // MakeLatest can be one of: "true", "false", or "legacy". + MakeLatest *string `json:"make_latest,omitempty"` + DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` + + // The following fields are not used in EditRelease: + GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` + + // The following fields are not used in CreateRelease or EditRelease: + ID *int64 `json:"id,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PublishedAt *Timestamp `json:"published_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + AssetsURL *string `json:"assets_url,omitempty"` + Assets []*ReleaseAsset `json:"assets,omitempty"` + UploadURL *string `json:"upload_url,omitempty"` + ZipballURL *string `json:"zipball_url,omitempty"` + TarballURL *string `json:"tarball_url,omitempty"` + Author *User `json:"author,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (r RepositoryRelease) String() string { + return Stringify(r) +} + +// RepositoryReleaseNotes represents a GitHub-generated release notes. +type RepositoryReleaseNotes struct { + Name string `json:"name"` + Body string `json:"body"` +} + +// GenerateNotesOptions represents the options to generate release notes. +type GenerateNotesOptions struct { + TagName string `json:"tag_name"` + PreviousTagName *string `json:"previous_tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` +} + +// ReleaseAsset represents a GitHub release asset in a repository. +type ReleaseAsset struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Label *string `json:"label,omitempty"` + State *string `json:"state,omitempty"` + ContentType *string `json:"content_type,omitempty"` + Size *int `json:"size,omitempty"` + DownloadCount *int `json:"download_count,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + BrowserDownloadURL *string `json:"browser_download_url,omitempty"` + Uploader *User `json:"uploader,omitempty"` + NodeID *string `json:"node_id,omitempty"` +} + +func (r ReleaseAsset) String() string { + return Stringify(r) +} + +// ListReleases lists the releases for a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#list-releases +func (s *RepositoriesService) ListReleases(ctx context.Context, owner, repo string, opts *ListOptions) ([]*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var releases []*RepositoryRelease + resp, err := s.client.Do(ctx, req, &releases) + if err != nil { + return nil, resp, err + } + return releases, resp, nil +} + +// GetRelease fetches a single release. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#get-a-release +func (s *RepositoriesService) GetRelease(ctx context.Context, owner, repo string, id int64) (*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) + return s.getSingleRelease(ctx, u) +} + +// GetLatestRelease fetches the latest published release for the repository. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#get-the-latest-release +func (s *RepositoriesService) GetLatestRelease(ctx context.Context, owner, repo string) (*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/latest", owner, repo) + return s.getSingleRelease(ctx, u) +} + +// GetReleaseByTag fetches a release with the specified tag. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#get-a-release-by-tag-name +func (s *RepositoriesService) GetReleaseByTag(ctx context.Context, owner, repo, tag string) (*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/tags/%s", owner, repo, tag) + return s.getSingleRelease(ctx, u) +} + +// GenerateReleaseNotes generates the release notes for the given tag. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#generate-release-notes-content-for-a-release +func (s *RepositoriesService) GenerateReleaseNotes(ctx context.Context, owner, repo string, opts *GenerateNotesOptions) (*RepositoryReleaseNotes, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/generate-notes", owner, repo) + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + r := new(RepositoryReleaseNotes) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, nil +} + +func (s *RepositoriesService) getSingleRelease(ctx context.Context, url string) (*RepositoryRelease, *Response, error) { + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + release := new(RepositoryRelease) + resp, err := s.client.Do(ctx, req, release) + if err != nil { + return nil, resp, err + } + return release, resp, nil +} + +// repositoryReleaseRequest is a subset of RepositoryRelease and +// is used internally by CreateRelease and EditRelease to pass +// only the known fields for these endpoints. +// +// See https://github.com/google/go-github/issues/992 for more +// information. +type repositoryReleaseRequest struct { + TagName *string `json:"tag_name,omitempty"` + TargetCommitish *string `json:"target_commitish,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Draft *bool `json:"draft,omitempty"` + Prerelease *bool `json:"prerelease,omitempty"` + MakeLatest *string `json:"make_latest,omitempty"` + GenerateReleaseNotes *bool `json:"generate_release_notes,omitempty"` + DiscussionCategoryName *string `json:"discussion_category_name,omitempty"` +} + +// CreateRelease adds a new release for a repository. +// +// Note that only a subset of the release fields are used. +// See RepositoryRelease for more information. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#create-a-release +func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo string, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases", owner, repo) + + releaseReq := &repositoryReleaseRequest{ + TagName: release.TagName, + TargetCommitish: release.TargetCommitish, + Name: release.Name, + Body: release.Body, + Draft: release.Draft, + Prerelease: release.Prerelease, + MakeLatest: release.MakeLatest, + DiscussionCategoryName: release.DiscussionCategoryName, + GenerateReleaseNotes: release.GenerateReleaseNotes, + } + + req, err := s.client.NewRequest("POST", u, releaseReq) + if err != nil { + return nil, nil, err + } + + r := new(RepositoryRelease) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + return r, resp, nil +} + +// EditRelease edits a repository release. +// +// Note that only a subset of the release fields are used. +// See RepositoryRelease for more information. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#update-a-release +func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo string, id int64, release *RepositoryRelease) (*RepositoryRelease, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) + + releaseReq := &repositoryReleaseRequest{ + TagName: release.TagName, + TargetCommitish: release.TargetCommitish, + Name: release.Name, + Body: release.Body, + Draft: release.Draft, + Prerelease: release.Prerelease, + MakeLatest: release.MakeLatest, + DiscussionCategoryName: release.DiscussionCategoryName, + } + + req, err := s.client.NewRequest("PATCH", u, releaseReq) + if err != nil { + return nil, nil, err + } + + r := new(RepositoryRelease) + resp, err := s.client.Do(ctx, req, r) + if err != nil { + return nil, resp, err + } + return r, resp, nil +} + +// DeleteRelease delete a single release from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/releases#delete-a-release +func (s *RepositoriesService) DeleteRelease(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/%d", owner, repo, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// ListReleaseAssets lists the release's assets. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#list-release-assets +func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo string, id int64, opts *ListOptions) ([]*ReleaseAsset, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var assets []*ReleaseAsset + resp, err := s.client.Do(ctx, req, &assets) + if err != nil { + return nil, resp, err + } + return assets, resp, nil +} + +// GetReleaseAsset fetches a single release asset. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#get-a-release-asset +func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo string, id int64) (*ReleaseAsset, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + asset := new(ReleaseAsset) + resp, err := s.client.Do(ctx, req, asset) + if err != nil { + return nil, resp, err + } + return asset, resp, nil +} + +// DownloadReleaseAsset downloads a release asset or returns a redirect URL. +// +// DownloadReleaseAsset returns an io.ReadCloser that reads the contents of the +// specified release asset. It is the caller's responsibility to close the ReadCloser. +// If a redirect is returned, the redirect URL will be returned as a string instead +// of the io.ReadCloser. Exactly one of rc and redirectURL will be zero. +// +// followRedirectsClient can be passed to download the asset from a redirected +// location. Passing http.DefaultClient is recommended unless special circumstances +// exist, but it's possible to pass any http.Client. If nil is passed the +// redirectURL will be returned instead. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#get-a-release-asset +func (s *RepositoriesService) DownloadReleaseAsset(ctx context.Context, owner, repo string, id int64, followRedirectsClient *http.Client) (rc io.ReadCloser, redirectURL string, err error) { + u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, "", err + } + req.Header.Set("Accept", defaultMediaType) + + s.client.clientMu.Lock() + defer s.client.clientMu.Unlock() + + var loc string + saveRedirect := s.client.client.CheckRedirect + s.client.client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + loc = req.URL.String() + return errors.New("disable redirect") + } + defer func() { s.client.client.CheckRedirect = saveRedirect }() + + req = withContext(ctx, req) + resp, err := s.client.client.Do(req) + if err != nil { + if !strings.Contains(err.Error(), "disable redirect") { + return nil, "", err + } + if followRedirectsClient != nil { + rc, err := s.downloadReleaseAssetFromURL(ctx, followRedirectsClient, loc) + return rc, "", err + } + return nil, loc, nil // Intentionally return no error with valid redirect URL. + } + + if err := CheckResponse(resp); err != nil { + _ = resp.Body.Close() + return nil, "", err + } + + return resp.Body, "", nil +} + +func (s *RepositoriesService) downloadReleaseAssetFromURL(ctx context.Context, followRedirectsClient *http.Client, url string) (rc io.ReadCloser, err error) { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + req = withContext(ctx, req) + req.Header.Set("Accept", "*/*") + resp, err := followRedirectsClient.Do(req) + if err != nil { + return nil, err + } + if err := CheckResponse(resp); err != nil { + _ = resp.Body.Close() + return nil, err + } + return resp.Body, nil +} + +// EditReleaseAsset edits a repository release asset. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#update-a-release-asset +func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo string, id int64, release *ReleaseAsset) (*ReleaseAsset, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) + + req, err := s.client.NewRequest("PATCH", u, release) + if err != nil { + return nil, nil, err + } + + asset := new(ReleaseAsset) + resp, err := s.client.Do(ctx, req, asset) + if err != nil { + return nil, resp, err + } + return asset, resp, nil +} + +// DeleteReleaseAsset delete a single release asset from a repository. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#delete-a-release-asset +func (s *RepositoriesService) DeleteReleaseAsset(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/assets/%d", owner, repo, id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + return s.client.Do(ctx, req, nil) +} + +// UploadReleaseAsset creates an asset by uploading a file into a release repository. +// To upload assets that cannot be represented by an os.File, call NewUploadRequest directly. +// +// GitHub API docs: https://docs.github.com/en/rest/releases/assets#upload-a-release-asset +func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, repo string, id int64, opts *UploadOptions, file *os.File) (*ReleaseAsset, *Response, error) { + u := fmt.Sprintf("repos/%s/%s/releases/%d/assets", owner, repo, id) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + stat, err := file.Stat() + if err != nil { + return nil, nil, err + } + if stat.IsDir() { + return nil, nil, errors.New("the asset to upload can't be a directory") + } + + mediaType := mime.TypeByExtension(filepath.Ext(file.Name())) + if opts.MediaType != "" { + mediaType = opts.MediaType + } + + req, err := s.client.NewUploadRequest(u, file, stat.Size(), mediaType) + if err != nil { + return nil, nil, err + } + + asset := new(ReleaseAsset) + resp, err := s.client.Do(ctx, req, asset) + if err != nil { + return nil, resp, err + } + return asset, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_rules.go b/vendor/github.com/google/go-github/v56/github/repos_rules.go new file mode 100644 index 000000000..7f964fe66 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_rules.go @@ -0,0 +1,464 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" +) + +// BypassActor represents the bypass actors from a ruleset. +type BypassActor struct { + ActorID *int64 `json:"actor_id,omitempty"` + // Possible values for ActorType are: RepositoryRole, Team, Integration, OrganizationAdmin + ActorType *string `json:"actor_type,omitempty"` + // Possible values for BypassMode are: always, pull_request + BypassMode *string `json:"bypass_mode,omitempty"` +} + +// RulesetLink represents a single link object from GitHub ruleset request _links. +type RulesetLink struct { + HRef *string `json:"href,omitempty"` +} + +// RulesetLinks represents the "_links" object in a Ruleset. +type RulesetLinks struct { + Self *RulesetLink `json:"self,omitempty"` +} + +// RulesetRefConditionParameters represents the conditions object for ref_names. +type RulesetRefConditionParameters struct { + Include []string `json:"include"` + Exclude []string `json:"exclude"` +} + +// RulesetRepositoryNamesConditionParameters represents the conditions object for repository_names. +type RulesetRepositoryNamesConditionParameters struct { + Include []string `json:"include"` + Exclude []string `json:"exclude"` + Protected *bool `json:"protected,omitempty"` +} + +// RulesetRepositoryIDsConditionParameters represents the conditions object for repository_ids. +type RulesetRepositoryIDsConditionParameters struct { + RepositoryIDs []int64 `json:"repository_ids,omitempty"` +} + +// RulesetCondition represents the conditions object in a ruleset. +// Set either RepositoryName or RepositoryID, not both. +type RulesetConditions struct { + RefName *RulesetRefConditionParameters `json:"ref_name,omitempty"` + RepositoryName *RulesetRepositoryNamesConditionParameters `json:"repository_name,omitempty"` + RepositoryID *RulesetRepositoryIDsConditionParameters `json:"repository_id,omitempty"` +} + +// RulePatternParameters represents the rule pattern parameters. +type RulePatternParameters struct { + Name *string `json:"name,omitempty"` + // If Negate is true, the rule will fail if the pattern matches. + Negate *bool `json:"negate,omitempty"` + // Possible values for Operator are: starts_with, ends_with, contains, regex + Operator string `json:"operator"` + Pattern string `json:"pattern"` +} + +// UpdateAllowsFetchAndMergeRuleParameters represents the update rule parameters. +type UpdateAllowsFetchAndMergeRuleParameters struct { + UpdateAllowsFetchAndMerge bool `json:"update_allows_fetch_and_merge"` +} + +// RequiredDeploymentEnvironmentsRuleParameters represents the required_deployments rule parameters. +type RequiredDeploymentEnvironmentsRuleParameters struct { + RequiredDeploymentEnvironments []string `json:"required_deployment_environments"` +} + +// PullRequestRuleParameters represents the pull_request rule parameters. +type PullRequestRuleParameters struct { + DismissStaleReviewsOnPush bool `json:"dismiss_stale_reviews_on_push"` + RequireCodeOwnerReview bool `json:"require_code_owner_review"` + RequireLastPushApproval bool `json:"require_last_push_approval"` + RequiredApprovingReviewCount int `json:"required_approving_review_count"` + RequiredReviewThreadResolution bool `json:"required_review_thread_resolution"` +} + +// RuleRequiredStatusChecks represents the RequiredStatusChecks for the RequiredStatusChecksRuleParameters object. +type RuleRequiredStatusChecks struct { + Context string `json:"context"` + IntegrationID *int64 `json:"integration_id,omitempty"` +} + +// RequiredStatusChecksRuleParameters represents the required_status_checks rule parameters. +type RequiredStatusChecksRuleParameters struct { + RequiredStatusChecks []RuleRequiredStatusChecks `json:"required_status_checks"` + StrictRequiredStatusChecksPolicy bool `json:"strict_required_status_checks_policy"` +} + +// RepositoryRule represents a GitHub Rule. +type RepositoryRule struct { + Type string `json:"type"` + Parameters *json.RawMessage `json:"parameters,omitempty"` +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// This helps us handle the fact that RepositoryRule parameter field can be of numerous types. +func (r *RepositoryRule) UnmarshalJSON(data []byte) error { + type rule RepositoryRule + var RepositoryRule rule + if err := json.Unmarshal(data, &RepositoryRule); err != nil { + return err + } + + r.Type = RepositoryRule.Type + + switch RepositoryRule.Type { + case "creation", "deletion", "required_linear_history", "required_signatures", "non_fast_forward": + r.Parameters = nil + case "update": + if RepositoryRule.Parameters == nil { + r.Parameters = nil + return nil + } + params := UpdateAllowsFetchAndMergeRuleParameters{} + if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { + return err + } + + bytes, _ := json.Marshal(params) + rawParams := json.RawMessage(bytes) + + r.Parameters = &rawParams + + case "required_deployments": + params := RequiredDeploymentEnvironmentsRuleParameters{} + if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { + return err + } + + bytes, _ := json.Marshal(params) + rawParams := json.RawMessage(bytes) + + r.Parameters = &rawParams + case "commit_message_pattern", "commit_author_email_pattern", "committer_email_pattern", "branch_name_pattern", "tag_name_pattern": + params := RulePatternParameters{} + if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { + return err + } + + bytes, _ := json.Marshal(params) + rawParams := json.RawMessage(bytes) + + r.Parameters = &rawParams + case "pull_request": + params := PullRequestRuleParameters{} + if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { + return err + } + + bytes, _ := json.Marshal(params) + rawParams := json.RawMessage(bytes) + + r.Parameters = &rawParams + case "required_status_checks": + params := RequiredStatusChecksRuleParameters{} + if err := json.Unmarshal(*RepositoryRule.Parameters, ¶ms); err != nil { + return err + } + + bytes, _ := json.Marshal(params) + rawParams := json.RawMessage(bytes) + + r.Parameters = &rawParams + default: + r.Type = "" + r.Parameters = nil + return fmt.Errorf("RepositoryRule.Type %T is not yet implemented, unable to unmarshal", RepositoryRule.Type) + } + + return nil +} + +// NewCreationRule creates a rule to only allow users with bypass permission to create matching refs. +func NewCreationRule() (rule *RepositoryRule) { + return &RepositoryRule{ + Type: "creation", + } +} + +// NewUpdateRule creates a rule to only allow users with bypass permission to update matching refs. +func NewUpdateRule(params *UpdateAllowsFetchAndMergeRuleParameters) (rule *RepositoryRule) { + if params != nil { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "update", + Parameters: &rawParams, + } + } + return &RepositoryRule{ + Type: "update", + } +} + +// NewDeletionRule creates a rule to only allow users with bypass permissions to delete matching refs. +func NewDeletionRule() (rule *RepositoryRule) { + return &RepositoryRule{ + Type: "deletion", + } +} + +// NewRequiredLinearHistoryRule creates a rule to prevent merge commits from being pushed to matching branches. +func NewRequiredLinearHistoryRule() (rule *RepositoryRule) { + return &RepositoryRule{ + Type: "required_linear_history", + } +} + +// NewRequiredDeploymentsRule creates a rule to require environments to be successfully deployed before they can be merged into the matching branches. +func NewRequiredDeploymentsRule(params *RequiredDeploymentEnvironmentsRuleParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "required_deployments", + Parameters: &rawParams, + } +} + +// NewRequiredSignaturesRule creates a rule a to require commits pushed to matching branches to have verified signatures. +func NewRequiredSignaturesRule() (rule *RepositoryRule) { + return &RepositoryRule{ + Type: "required_signatures", + } +} + +// NewPullRequestRule creates a rule to require all commits be made to a non-target branch and submitted via a pull request before they can be merged. +func NewPullRequestRule(params *PullRequestRuleParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "pull_request", + Parameters: &rawParams, + } +} + +// NewRequiredStatusChecksRule creates a rule to require which status checks must pass before branches can be merged into a branch rule. +func NewRequiredStatusChecksRule(params *RequiredStatusChecksRuleParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "required_status_checks", + Parameters: &rawParams, + } +} + +// NewNonFastForwardRule creates a rule as part to prevent users with push access from force pushing to matching branches. +func NewNonFastForwardRule() (rule *RepositoryRule) { + return &RepositoryRule{ + Type: "non_fast_forward", + } +} + +// NewCommitMessagePatternRule creates a rule to restrict commit message patterns being pushed to matching branches. +func NewCommitMessagePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "commit_message_pattern", + Parameters: &rawParams, + } +} + +// NewCommitAuthorEmailPatternRule creates a rule to restrict commits with author email patterns being merged into matching branches. +func NewCommitAuthorEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "commit_author_email_pattern", + Parameters: &rawParams, + } +} + +// NewCommitterEmailPatternRule creates a rule to restrict commits with committer email patterns being merged into matching branches. +func NewCommitterEmailPatternRule(params *RulePatternParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "committer_email_pattern", + Parameters: &rawParams, + } +} + +// NewBranchNamePatternRule creates a rule to restrict branch patterns from being merged into matching branches. +func NewBranchNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "branch_name_pattern", + Parameters: &rawParams, + } +} + +// NewTagNamePatternRule creates a rule to restrict tag patterns contained in non-target branches from being merged into matching branches. +func NewTagNamePatternRule(params *RulePatternParameters) (rule *RepositoryRule) { + bytes, _ := json.Marshal(params) + + rawParams := json.RawMessage(bytes) + + return &RepositoryRule{ + Type: "tag_name_pattern", + Parameters: &rawParams, + } +} + +// Ruleset represents a GitHub ruleset object. +type Ruleset struct { + ID *int64 `json:"id,omitempty"` + Name string `json:"name"` + // Possible values for Target are branch, tag + Target *string `json:"target,omitempty"` + // Possible values for SourceType are: Repository, Organization + SourceType *string `json:"source_type,omitempty"` + Source string `json:"source"` + // Possible values for Enforcement are: disabled, active, evaluate + Enforcement string `json:"enforcement"` + BypassActors []*BypassActor `json:"bypass_actors,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Links *RulesetLinks `json:"_links,omitempty"` + Conditions *RulesetConditions `json:"conditions,omitempty"` + Rules []*RepositoryRule `json:"rules,omitempty"` +} + +// GetRulesForBranch gets all the rules that apply to the specified branch. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#get-rules-for-a-branch +func (s *RepositoriesService) GetRulesForBranch(ctx context.Context, owner, repo, branch string) ([]*RepositoryRule, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rules/branches/%v", owner, repo, branch) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var rules []*RepositoryRule + resp, err := s.client.Do(ctx, req, &rules) + if err != nil { + return nil, resp, err + } + + return rules, resp, nil +} + +// GetAllRulesets gets all the rules that apply to the specified repository. +// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#get-all-repository-rulesets +func (s *RepositoriesService) GetAllRulesets(ctx context.Context, owner, repo string, includesParents bool) ([]*Ruleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets?includes_parents=%v", owner, repo, includesParents) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var ruleset []*Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// CreateRuleset creates a ruleset for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#create-a-repository-ruleset +func (s *RepositoriesService) CreateRuleset(ctx context.Context, owner, repo string, rs *Ruleset) (*Ruleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets", owner, repo) + + req, err := s.client.NewRequest("POST", u, rs) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// GetRuleset gets a ruleset for the specified repository. +// If includesParents is true, rulesets configured at the organization level that apply to the repository will be returned. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#get-a-repository-ruleset +func (s *RepositoriesService) GetRuleset(ctx context.Context, owner, repo string, rulesetID int64, includesParents bool) (*Ruleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets/%v?includes_parents=%v", owner, repo, rulesetID, includesParents) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// UpdateRuleset updates a ruleset for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#update-a-repository-ruleset +func (s *RepositoriesService) UpdateRuleset(ctx context.Context, owner, repo string, rulesetID int64, rs *Ruleset) (*Ruleset, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) + + req, err := s.client.NewRequest("PUT", u, rs) + if err != nil { + return nil, nil, err + } + + var ruleset *Ruleset + resp, err := s.client.Do(ctx, req, &ruleset) + if err != nil { + return nil, resp, err + } + + return ruleset, resp, nil +} + +// DeleteRuleset deletes a ruleset for the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/rules#delete-a-repository-ruleset +func (s *RepositoriesService) DeleteRuleset(ctx context.Context, owner, repo string, rulesetID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/rulesets/%v", owner, repo, rulesetID) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_stats.go b/vendor/github.com/google/go-github/v56/github/repos_stats.go new file mode 100644 index 000000000..3df0a8f6d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_stats.go @@ -0,0 +1,232 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "time" +) + +// ContributorStats represents a contributor to a repository and their +// weekly contributions to a given repo. +type ContributorStats struct { + Author *Contributor `json:"author,omitempty"` + Total *int `json:"total,omitempty"` + Weeks []*WeeklyStats `json:"weeks,omitempty"` +} + +func (c ContributorStats) String() string { + return Stringify(c) +} + +// WeeklyStats represents the number of additions, deletions and commits +// a Contributor made in a given week. +type WeeklyStats struct { + Week *Timestamp `json:"w,omitempty"` + Additions *int `json:"a,omitempty"` + Deletions *int `json:"d,omitempty"` + Commits *int `json:"c,omitempty"` +} + +func (w WeeklyStats) String() string { + return Stringify(w) +} + +// ListContributorsStats gets a repo's contributor list with additions, +// deletions and commit counts. +// +// If this is the first time these statistics are requested for the given +// repository, this method will return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing the requested statistics. A follow up request, after a +// delay of a second or so, should result in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/statistics#get-all-contributor-commit-activity +func (s *RepositoriesService) ListContributorsStats(ctx context.Context, owner, repo string) ([]*ContributorStats, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/stats/contributors", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var contributorStats []*ContributorStats + resp, err := s.client.Do(ctx, req, &contributorStats) + if err != nil { + return nil, resp, err + } + + return contributorStats, resp, nil +} + +// WeeklyCommitActivity represents the weekly commit activity for a repository. +// The days array is a group of commits per day, starting on Sunday. +type WeeklyCommitActivity struct { + Days []int `json:"days,omitempty"` + Total *int `json:"total,omitempty"` + Week *Timestamp `json:"week,omitempty"` +} + +func (w WeeklyCommitActivity) String() string { + return Stringify(w) +} + +// ListCommitActivity returns the last year of commit activity +// grouped by week. The days array is a group of commits per day, +// starting on Sunday. +// +// If this is the first time these statistics are requested for the given +// repository, this method will return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing the requested statistics. A follow up request, after a +// delay of a second or so, should result in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/statistics#get-the-last-year-of-commit-activity +func (s *RepositoriesService) ListCommitActivity(ctx context.Context, owner, repo string) ([]*WeeklyCommitActivity, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/stats/commit_activity", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var weeklyCommitActivity []*WeeklyCommitActivity + resp, err := s.client.Do(ctx, req, &weeklyCommitActivity) + if err != nil { + return nil, resp, err + } + + return weeklyCommitActivity, resp, nil +} + +// ListCodeFrequency returns a weekly aggregate of the number of additions and +// deletions pushed to a repository. Returned WeeklyStats will contain +// additions and deletions, but not total commits. +// +// If this is the first time these statistics are requested for the given +// repository, this method will return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing the requested statistics. A follow up request, after a +// delay of a second or so, should result in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/statistics#get-the-weekly-commit-activity +func (s *RepositoriesService) ListCodeFrequency(ctx context.Context, owner, repo string) ([]*WeeklyStats, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/stats/code_frequency", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var weeks [][]int + resp, err := s.client.Do(ctx, req, &weeks) + if err != nil { + return nil, resp, err + } + + // convert int slices into WeeklyStats + var stats []*WeeklyStats + for _, week := range weeks { + if len(week) != 3 { + continue + } + stat := &WeeklyStats{ + Week: &Timestamp{time.Unix(int64(week[0]), 0)}, + Additions: Int(week[1]), + Deletions: Int(week[2]), + } + stats = append(stats, stat) + } + + return stats, resp, nil +} + +// RepositoryParticipation is the number of commits by everyone +// who has contributed to the repository (including the owner) +// as well as the number of commits by the owner themself. +type RepositoryParticipation struct { + All []int `json:"all,omitempty"` + Owner []int `json:"owner,omitempty"` +} + +func (r RepositoryParticipation) String() string { + return Stringify(r) +} + +// ListParticipation returns the total commit counts for the 'owner' +// and total commit counts in 'all'. 'all' is everyone combined, +// including the 'owner' in the last 52 weeks. If you’d like to get +// the commit counts for non-owners, you can subtract 'all' from 'owner'. +// +// The array order is oldest week (index 0) to most recent week. +// +// If this is the first time these statistics are requested for the given +// repository, this method will return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing the requested statistics. A follow up request, after a +// delay of a second or so, should result in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/statistics#get-the-weekly-commit-count +func (s *RepositoriesService) ListParticipation(ctx context.Context, owner, repo string) (*RepositoryParticipation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/stats/participation", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + participation := new(RepositoryParticipation) + resp, err := s.client.Do(ctx, req, participation) + if err != nil { + return nil, resp, err + } + + return participation, resp, nil +} + +// PunchCard represents the number of commits made during a given hour of a +// day of the week. +type PunchCard struct { + Day *int // Day of the week (0-6: =Sunday - Saturday). + Hour *int // Hour of day (0-23). + Commits *int // Number of commits. +} + +// ListPunchCard returns the number of commits per hour in each day. +// +// If this is the first time these statistics are requested for the given +// repository, this method will return an *AcceptedError and a status code of +// 202. This is because this is the status that GitHub returns to signify that +// it is now computing the requested statistics. A follow up request, after a +// delay of a second or so, should result in a successful request. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/statistics#get-the-hourly-commit-count-for-each-day +func (s *RepositoriesService) ListPunchCard(ctx context.Context, owner, repo string) ([]*PunchCard, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/stats/punch_card", owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var results [][]int + resp, err := s.client.Do(ctx, req, &results) + if err != nil { + return nil, resp, err + } + + // convert int slices into Punchcards + var cards []*PunchCard + for _, result := range results { + if len(result) != 3 { + continue + } + card := &PunchCard{ + Day: Int(result[0]), + Hour: Int(result[1]), + Commits: Int(result[2]), + } + cards = append(cards, card) + } + + return cards, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_statuses.go b/vendor/github.com/google/go-github/v56/github/repos_statuses.go new file mode 100644 index 000000000..ea3d166c7 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_statuses.go @@ -0,0 +1,132 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// RepoStatus represents the status of a repository at a particular reference. +type RepoStatus struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + URL *string `json:"url,omitempty"` + + // State is the current state of the repository. Possible values are: + // pending, success, error, or failure. + State *string `json:"state,omitempty"` + + // TargetURL is the URL of the page representing this status. It will be + // linked from the GitHub UI to allow users to see the source of the status. + TargetURL *string `json:"target_url,omitempty"` + + // Description is a short high level summary of the status. + Description *string `json:"description,omitempty"` + + // A string label to differentiate this status from the statuses of other systems. + Context *string `json:"context,omitempty"` + + // AvatarURL is the URL of the avatar of this status. + AvatarURL *string `json:"avatar_url,omitempty"` + + Creator *User `json:"creator,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` +} + +func (r RepoStatus) String() string { + return Stringify(r) +} + +// ListStatuses lists the statuses of a repository at the specified +// reference. ref can be a SHA, a branch name, or a tag name. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/statuses#list-commit-statuses-for-a-reference +func (s *RepositoriesService) ListStatuses(ctx context.Context, owner, repo, ref string, opts *ListOptions) ([]*RepoStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/statuses", owner, repo, refURLEscape(ref)) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var statuses []*RepoStatus + resp, err := s.client.Do(ctx, req, &statuses) + if err != nil { + return nil, resp, err + } + + return statuses, resp, nil +} + +// CreateStatus creates a new status for a repository at the specified +// reference. Ref can be a SHA, a branch name, or a tag name. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/statuses#create-a-commit-status +func (s *RepositoriesService) CreateStatus(ctx context.Context, owner, repo, ref string, status *RepoStatus) (*RepoStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/statuses/%v", owner, repo, refURLEscape(ref)) + req, err := s.client.NewRequest("POST", u, status) + if err != nil { + return nil, nil, err + } + + repoStatus := new(RepoStatus) + resp, err := s.client.Do(ctx, req, repoStatus) + if err != nil { + return nil, resp, err + } + + return repoStatus, resp, nil +} + +// CombinedStatus represents the combined status of a repository at a particular reference. +type CombinedStatus struct { + // State is the combined state of the repository. Possible values are: + // failure, pending, or success. + State *string `json:"state,omitempty"` + + Name *string `json:"name,omitempty"` + SHA *string `json:"sha,omitempty"` + TotalCount *int `json:"total_count,omitempty"` + Statuses []*RepoStatus `json:"statuses,omitempty"` + + CommitURL *string `json:"commit_url,omitempty"` + RepositoryURL *string `json:"repository_url,omitempty"` +} + +func (s CombinedStatus) String() string { + return Stringify(s) +} + +// GetCombinedStatus returns the combined status of a repository at the specified +// reference. ref can be a SHA, a branch name, or a tag name. +// +// GitHub API docs: https://docs.github.com/en/rest/commits/statuses#get-the-combined-status-for-a-specific-reference +func (s *RepositoriesService) GetCombinedStatus(ctx context.Context, owner, repo, ref string, opts *ListOptions) (*CombinedStatus, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/commits/%v/status", owner, repo, refURLEscape(ref)) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + status := new(CombinedStatus) + resp, err := s.client.Do(ctx, req, status) + if err != nil { + return nil, resp, err + } + + return status, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_tags.go b/vendor/github.com/google/go-github/v56/github/repos_tags.go new file mode 100644 index 000000000..ff46d90c7 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_tags.go @@ -0,0 +1,76 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TagProtection represents a repository tag protection. +type TagProtection struct { + ID *int64 `json:"id"` + Pattern *string `json:"pattern"` +} + +// tagProtectionRequest represents a request to create tag protection. +type tagProtectionRequest struct { + // An optional glob pattern to match against when enforcing tag protection. + Pattern string `json:"pattern"` +} + +// ListTagProtection lists tag protection of the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/tags#list-tag-protection-states-for-a-repository +func (s *RepositoriesService) ListTagProtection(ctx context.Context, owner, repo string) ([]*TagProtection, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var tagProtections []*TagProtection + resp, err := s.client.Do(ctx, req, &tagProtections) + if err != nil { + return nil, resp, err + } + + return tagProtections, resp, nil +} + +// CreateTagProtection creates the tag protection of the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/tags#create-a-tag-protection-state-for-a-repository +func (s *RepositoriesService) CreateTagProtection(ctx context.Context, owner, repo, pattern string) (*TagProtection, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/tags/protection", owner, repo) + r := &tagProtectionRequest{Pattern: pattern} + req, err := s.client.NewRequest("POST", u, r) + if err != nil { + return nil, nil, err + } + + tagProtection := new(TagProtection) + resp, err := s.client.Do(ctx, req, tagProtection) + if err != nil { + return nil, resp, err + } + + return tagProtection, resp, nil +} + +// DeleteTagProtection deletes a tag protection from the specified repository. +// +// GitHub API docs: https://docs.github.com/en/rest/repos/tags#delete-a-tag-protection-state-for-a-repository +func (s *RepositoriesService) DeleteTagProtection(ctx context.Context, owner, repo string, tagProtectionID int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/tags/protection/%v", owner, repo, tagProtectionID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/repos_traffic.go b/vendor/github.com/google/go-github/v56/github/repos_traffic.go new file mode 100644 index 000000000..bf093c03e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/repos_traffic.go @@ -0,0 +1,141 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TrafficReferrer represent information about traffic from a referrer . +type TrafficReferrer struct { + Referrer *string `json:"referrer,omitempty"` + Count *int `json:"count,omitempty"` + Uniques *int `json:"uniques,omitempty"` +} + +// TrafficPath represent information about the traffic on a path of the repo. +type TrafficPath struct { + Path *string `json:"path,omitempty"` + Title *string `json:"title,omitempty"` + Count *int `json:"count,omitempty"` + Uniques *int `json:"uniques,omitempty"` +} + +// TrafficData represent information about a specific timestamp in views or clones list. +type TrafficData struct { + Timestamp *Timestamp `json:"timestamp,omitempty"` + Count *int `json:"count,omitempty"` + Uniques *int `json:"uniques,omitempty"` +} + +// TrafficViews represent information about the number of views in the last 14 days. +type TrafficViews struct { + Views []*TrafficData `json:"views,omitempty"` + Count *int `json:"count,omitempty"` + Uniques *int `json:"uniques,omitempty"` +} + +// TrafficClones represent information about the number of clones in the last 14 days. +type TrafficClones struct { + Clones []*TrafficData `json:"clones,omitempty"` + Count *int `json:"count,omitempty"` + Uniques *int `json:"uniques,omitempty"` +} + +// TrafficBreakdownOptions specifies the parameters to methods that support breakdown per day or week. +// Can be one of: day, week. Default: day. +type TrafficBreakdownOptions struct { + Per string `url:"per,omitempty"` +} + +// ListTrafficReferrers list the top 10 referrers over the last 14 days. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/traffic#get-top-referral-sources +func (s *RepositoriesService) ListTrafficReferrers(ctx context.Context, owner, repo string) ([]*TrafficReferrer, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/traffic/popular/referrers", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var trafficReferrers []*TrafficReferrer + resp, err := s.client.Do(ctx, req, &trafficReferrers) + if err != nil { + return nil, resp, err + } + + return trafficReferrers, resp, nil +} + +// ListTrafficPaths list the top 10 popular content over the last 14 days. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/traffic#get-top-referral-paths +func (s *RepositoriesService) ListTrafficPaths(ctx context.Context, owner, repo string) ([]*TrafficPath, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/traffic/popular/paths", owner, repo) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var paths []*TrafficPath + resp, err := s.client.Do(ctx, req, &paths) + if err != nil { + return nil, resp, err + } + + return paths, resp, nil +} + +// ListTrafficViews get total number of views for the last 14 days and breaks it down either per day or week. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/traffic#get-page-views +func (s *RepositoriesService) ListTrafficViews(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficViews, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/traffic/views", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + trafficViews := new(TrafficViews) + resp, err := s.client.Do(ctx, req, &trafficViews) + if err != nil { + return nil, resp, err + } + + return trafficViews, resp, nil +} + +// ListTrafficClones get total number of clones for the last 14 days and breaks it down either per day or week for the last 14 days. +// +// GitHub API docs: https://docs.github.com/en/rest/metrics/traffic#get-repository-clones +func (s *RepositoriesService) ListTrafficClones(ctx context.Context, owner, repo string, opts *TrafficBreakdownOptions) (*TrafficClones, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/traffic/clones", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + trafficClones := new(TrafficClones) + resp, err := s.client.Do(ctx, req, &trafficClones) + if err != nil { + return nil, resp, err + } + + return trafficClones, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/scim.go b/vendor/github.com/google/go-github/v56/github/scim.go new file mode 100644 index 000000000..7deee6be4 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/scim.go @@ -0,0 +1,205 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "encoding/json" + "fmt" +) + +// SCIMService provides access to SCIM related functions in the +// GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/scim +type SCIMService service + +// SCIMUserAttributes represents supported SCIM User attributes. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#supported-scim-user-attributes +type SCIMUserAttributes struct { + UserName string `json:"userName"` // Configured by the admin. Could be an email, login, or username. (Required.) + Name SCIMUserName `json:"name"` // (Required.) + DisplayName *string `json:"displayName,omitempty"` // The name of the user, suitable for display to end-users. (Optional.) + Emails []*SCIMUserEmail `json:"emails"` // User emails. (Required.) + Schemas []string `json:"schemas,omitempty"` // (Optional.) + ExternalID *string `json:"externalId,omitempty"` // (Optional.) + Groups []string `json:"groups,omitempty"` // (Optional.) + Active *bool `json:"active,omitempty"` // (Optional.) + // Only populated as a result of calling ListSCIMProvisionedIdentitiesOptions or GetSCIMProvisioningInfoForUser: + ID *string `json:"id,omitempty"` + Meta *SCIMMeta `json:"meta,omitempty"` +} + +// SCIMUserName represents SCIM user information. +type SCIMUserName struct { + GivenName string `json:"givenName"` // The first name of the user. (Required.) + FamilyName string `json:"familyName"` // The family name of the user. (Required.) + Formatted *string `json:"formatted,omitempty"` // (Optional.) +} + +// SCIMUserEmail represents SCIM user email. +type SCIMUserEmail struct { + Value string `json:"value"` // (Required.) + Primary *bool `json:"primary,omitempty"` // (Optional.) + Type *string `json:"type,omitempty"` // (Optional.) +} + +// SCIMMeta represents metadata about the SCIM resource. +type SCIMMeta struct { + ResourceType *string `json:"resourceType,omitempty"` + Created *Timestamp `json:"created,omitempty"` + LastModified *Timestamp `json:"lastModified,omitempty"` + Location *string `json:"location,omitempty"` +} + +// SCIMProvisionedIdentities represents the result of calling ListSCIMProvisionedIdentities. +type SCIMProvisionedIdentities struct { + Schemas []string `json:"schemas,omitempty"` + TotalResults *int `json:"totalResults,omitempty"` + ItemsPerPage *int `json:"itemsPerPage,omitempty"` + StartIndex *int `json:"startIndex,omitempty"` + Resources []*SCIMUserAttributes `json:"Resources,omitempty"` +} + +// ListSCIMProvisionedIdentitiesOptions represents options for ListSCIMProvisionedIdentities. +// +// Github API docs: https://docs.github.com/en/rest/scim#list-scim-provisioned-identities--parameters +type ListSCIMProvisionedIdentitiesOptions struct { + StartIndex *int `url:"startIndex,omitempty"` // Used for pagination: the index of the first result to return. (Optional.) + Count *int `url:"count,omitempty"` // Used for pagination: the number of results to return. (Optional.) + // Filter results using the equals query parameter operator (eq). + // You can filter results that are equal to id, userName, emails, and external_id. + // For example, to search for an identity with the userName Octocat, you would use this query: ?filter=userName%20eq%20\"Octocat\". + // To filter results for the identity with the email octocat@github.com, you would use this query: ?filter=emails%20eq%20\"octocat@github.com\". + // (Optional.) + Filter *string `url:"filter,omitempty"` +} + +// ListSCIMProvisionedIdentities lists SCIM provisioned identities. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#list-scim-provisioned-identities +func (s *SCIMService) ListSCIMProvisionedIdentities(ctx context.Context, org string, opts *ListSCIMProvisionedIdentitiesOptions) (*SCIMProvisionedIdentities, *Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + identities := new(SCIMProvisionedIdentities) + resp, err := s.client.Do(ctx, req, identities) + if err != nil { + return nil, resp, err + } + + return identities, resp, nil +} + +// ProvisionAndInviteSCIMUser provisions organization membership for a user, and sends an activation email to the email address. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#provision-and-invite-a-scim-user +func (s *SCIMService) ProvisionAndInviteSCIMUser(ctx context.Context, org string, opts *SCIMUserAttributes) (*Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, err + } + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// GetSCIMProvisioningInfoForUser returns SCIM provisioning information for a user. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#supported-scim-user-attributes +func (s *SCIMService) GetSCIMProvisioningInfoForUser(ctx context.Context, org, scimUserID string) (*SCIMUserAttributes, *Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + user := new(SCIMUserAttributes) + resp, err := s.client.Do(ctx, req, &user) + if err != nil { + return nil, resp, err + } + + return user, resp, nil +} + +// UpdateProvisionedOrgMembership updates a provisioned organization membership. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#update-a-provisioned-organization-membership +func (s *SCIMService) UpdateProvisionedOrgMembership(ctx context.Context, org, scimUserID string, opts *SCIMUserAttributes) (*Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) + u, err := addOptions(u, opts) + if err != nil { + return nil, err + } + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// UpdateAttributeForSCIMUserOptions represents options for UpdateAttributeForSCIMUser. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#update-an-attribute-for-a-scim-user--parameters +type UpdateAttributeForSCIMUserOptions struct { + Schemas []string `json:"schemas,omitempty"` // (Optional.) + Operations UpdateAttributeForSCIMUserOperations `json:"operations"` // Set of operations to be performed. (Required.) +} + +// UpdateAttributeForSCIMUserOperations represents operations for UpdateAttributeForSCIMUser. +type UpdateAttributeForSCIMUserOperations struct { + Op string `json:"op"` // (Required.) + Path *string `json:"path,omitempty"` // (Optional.) + Value json.RawMessage `json:"value,omitempty"` // (Optional.) +} + +// UpdateAttributeForSCIMUser updates an attribute for an SCIM user. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#update-an-attribute-for-a-scim-user +func (s *SCIMService) UpdateAttributeForSCIMUser(ctx context.Context, org, scimUserID string, opts *UpdateAttributeForSCIMUserOptions) (*Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) + u, err := addOptions(u, opts) + if err != nil { + return nil, err + } + + req, err := s.client.NewRequest("PATCH", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteSCIMUserFromOrg deletes SCIM user from an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/scim#delete-a-scim-user-from-an-organization +func (s *SCIMService) DeleteSCIMUserFromOrg(ctx context.Context, org, scimUserID string) (*Response, error) { + u := fmt.Sprintf("scim/v2/organizations/%v/Users/%v", org, scimUserID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/search.go b/vendor/github.com/google/go-github/v56/github/search.go new file mode 100644 index 000000000..adb832d0d --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/search.go @@ -0,0 +1,333 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "strconv" + "strings" + + qs "github.com/google/go-querystring/query" +) + +// SearchService provides access to the search related functions +// in the GitHub API. +// +// Each method takes a query string defining the search keywords and any search qualifiers. +// For example, when searching issues, the query "gopher is:issue language:go" will search +// for issues containing the word "gopher" in Go repositories. The method call +// +// opts := &github.SearchOptions{Sort: "created", Order: "asc"} +// cl.Search.Issues(ctx, "gopher is:issue language:go", opts) +// +// will search for such issues, sorting by creation date in ascending order +// (i.e., oldest first). +// +// If query includes multiple conditions, it MUST NOT include "+" as the condition separator. +// You have to use " " as the separator instead. +// For example, querying with "language:c++" and "leveldb", then query should be +// "language:c++ leveldb" but not "language:c+++leveldb". +// +// GitHub API docs: https://docs.github.com/en/rest/search/ +type SearchService service + +// SearchOptions specifies optional parameters to the SearchService methods. +type SearchOptions struct { + // How to sort the search results. Possible values are: + // - for repositories: stars, fork, updated + // - for commits: author-date, committer-date + // - for code: indexed + // - for issues: comments, created, updated + // - for users: followers, repositories, joined + // + // Default is to sort by best match. + Sort string `url:"sort,omitempty"` + + // Sort order if sort parameter is provided. Possible values are: asc, + // desc. Default is desc. + Order string `url:"order,omitempty"` + + // Whether to retrieve text match metadata with a query + TextMatch bool `url:"-"` + + ListOptions +} + +// Common search parameters. +type searchParameters struct { + Query string + RepositoryID *int64 // Sent if non-nil. +} + +// RepositoriesSearchResult represents the result of a repositories search. +type RepositoriesSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Repositories []*Repository `json:"items,omitempty"` +} + +// Repositories searches repositories via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-repositories +func (s *SearchService) Repositories(ctx context.Context, query string, opts *SearchOptions) (*RepositoriesSearchResult, *Response, error) { + result := new(RepositoriesSearchResult) + resp, err := s.search(ctx, "repositories", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// TopicsSearchResult represents the result of a topics search. +type TopicsSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Topics []*TopicResult `json:"items,omitempty"` +} + +type TopicResult struct { + Name *string `json:"name,omitempty"` + DisplayName *string `json:"display_name,omitempty"` + ShortDescription *string `json:"short_description,omitempty"` + Description *string `json:"description,omitempty"` + CreatedBy *string `json:"created_by,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *string `json:"updated_at,omitempty"` + Featured *bool `json:"featured,omitempty"` + Curated *bool `json:"curated,omitempty"` + Score *float64 `json:"score,omitempty"` +} + +// Topics finds topics via various criteria. Results are sorted by best match. +// Please see https://help.github.com/en/articles/searching-topics for more +// information about search qualifiers. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-topics +func (s *SearchService) Topics(ctx context.Context, query string, opts *SearchOptions) (*TopicsSearchResult, *Response, error) { + result := new(TopicsSearchResult) + resp, err := s.search(ctx, "topics", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// CommitsSearchResult represents the result of a commits search. +type CommitsSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Commits []*CommitResult `json:"items,omitempty"` +} + +// CommitResult represents a commit object as returned in commit search endpoint response. +type CommitResult struct { + SHA *string `json:"sha,omitempty"` + Commit *Commit `json:"commit,omitempty"` + Author *User `json:"author,omitempty"` + Committer *User `json:"committer,omitempty"` + Parents []*Commit `json:"parents,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + URL *string `json:"url,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + + Repository *Repository `json:"repository,omitempty"` + Score *float64 `json:"score,omitempty"` +} + +// Commits searches commits via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-commits +func (s *SearchService) Commits(ctx context.Context, query string, opts *SearchOptions) (*CommitsSearchResult, *Response, error) { + result := new(CommitsSearchResult) + resp, err := s.search(ctx, "commits", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// IssuesSearchResult represents the result of an issues search. +type IssuesSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Issues []*Issue `json:"items,omitempty"` +} + +// Issues searches issues via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-issues-and-pull-requests +func (s *SearchService) Issues(ctx context.Context, query string, opts *SearchOptions) (*IssuesSearchResult, *Response, error) { + result := new(IssuesSearchResult) + resp, err := s.search(ctx, "issues", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// UsersSearchResult represents the result of a users search. +type UsersSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Users []*User `json:"items,omitempty"` +} + +// Users searches users via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-users +func (s *SearchService) Users(ctx context.Context, query string, opts *SearchOptions) (*UsersSearchResult, *Response, error) { + result := new(UsersSearchResult) + resp, err := s.search(ctx, "users", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// Match represents a single text match. +type Match struct { + Text *string `json:"text,omitempty"` + Indices []int `json:"indices,omitempty"` +} + +// TextMatch represents a text match for a SearchResult +type TextMatch struct { + ObjectURL *string `json:"object_url,omitempty"` + ObjectType *string `json:"object_type,omitempty"` + Property *string `json:"property,omitempty"` + Fragment *string `json:"fragment,omitempty"` + Matches []*Match `json:"matches,omitempty"` +} + +func (tm TextMatch) String() string { + return Stringify(tm) +} + +// CodeSearchResult represents the result of a code search. +type CodeSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + CodeResults []*CodeResult `json:"items,omitempty"` +} + +// CodeResult represents a single search result. +type CodeResult struct { + Name *string `json:"name,omitempty"` + Path *string `json:"path,omitempty"` + SHA *string `json:"sha,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + Repository *Repository `json:"repository,omitempty"` + TextMatches []*TextMatch `json:"text_matches,omitempty"` +} + +func (c CodeResult) String() string { + return Stringify(c) +} + +// Code searches code via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-code +func (s *SearchService) Code(ctx context.Context, query string, opts *SearchOptions) (*CodeSearchResult, *Response, error) { + result := new(CodeSearchResult) + resp, err := s.search(ctx, "code", &searchParameters{Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// LabelsSearchResult represents the result of a code search. +type LabelsSearchResult struct { + Total *int `json:"total_count,omitempty"` + IncompleteResults *bool `json:"incomplete_results,omitempty"` + Labels []*LabelResult `json:"items,omitempty"` +} + +// LabelResult represents a single search result. +type LabelResult struct { + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + Name *string `json:"name,omitempty"` + Color *string `json:"color,omitempty"` + Default *bool `json:"default,omitempty"` + Description *string `json:"description,omitempty"` + Score *float64 `json:"score,omitempty"` +} + +func (l LabelResult) String() string { + return Stringify(l) +} + +// Labels searches labels in the repository with ID repoID via various criteria. +// +// GitHub API docs: https://docs.github.com/en/rest/search#search-labels +func (s *SearchService) Labels(ctx context.Context, repoID int64, query string, opts *SearchOptions) (*LabelsSearchResult, *Response, error) { + result := new(LabelsSearchResult) + resp, err := s.search(ctx, "labels", &searchParameters{RepositoryID: &repoID, Query: query}, opts, result) + if err != nil { + return nil, resp, err + } + + return result, resp, nil +} + +// Helper function that executes search queries against different +// GitHub search types (repositories, commits, code, issues, users, labels) +// +// If searchParameters.Query includes multiple condition, it MUST NOT include "+" as condition separator. +// For example, querying with "language:c++" and "leveldb", then searchParameters.Query should be "language:c++ leveldb" but not "language:c+++leveldb". +func (s *SearchService) search(ctx context.Context, searchType string, parameters *searchParameters, opts *SearchOptions, result interface{}) (*Response, error) { + params, err := qs.Values(opts) + if err != nil { + return nil, err + } + + if parameters.RepositoryID != nil { + params.Set("repository_id", strconv.FormatInt(*parameters.RepositoryID, 10)) + } + params.Set("q", parameters.Query) + u := fmt.Sprintf("search/%s?%s", searchType, params.Encode()) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, err + } + var acceptHeaders []string + switch { + case searchType == "commits": + // Accept header for search commits preview endpoint + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders = append(acceptHeaders, mediaTypeCommitSearchPreview) + case searchType == "topics": + // Accept header for search repositories based on topics preview endpoint + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) + case searchType == "repositories": + // Accept header for search repositories based on topics preview endpoint + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders = append(acceptHeaders, mediaTypeTopicsPreview) + case searchType == "issues": + // Accept header for search issues based on reactions preview endpoint + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders = append(acceptHeaders, mediaTypeReactionsPreview) + } + // https://docs.github.com/en/rest/search#search-repositories + // Accept header defaults to "application/vnd.github.v3+json" + // We change it here to fetch back text-match metadata + if opts != nil && opts.TextMatch { + acceptHeaders = append(acceptHeaders, "application/vnd.github.v3.text-match+json") + } + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, result) +} diff --git a/vendor/github.com/google/go-github/v56/github/secret_scanning.go b/vendor/github.com/google/go-github/v56/github/secret_scanning.go new file mode 100644 index 000000000..30d3da8eb --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/secret_scanning.go @@ -0,0 +1,245 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// SecretScanningService handles communication with the secret scanning related +// methods of the GitHub API. +type SecretScanningService service + +// SecretScanningAlert represents a GitHub secret scanning alert. +type SecretScanningAlert struct { + Number *int `json:"number,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + LocationsURL *string `json:"locations_url,omitempty"` + State *string `json:"state,omitempty"` + Resolution *string `json:"resolution,omitempty"` + ResolvedAt *Timestamp `json:"resolved_at,omitempty"` + ResolvedBy *User `json:"resolved_by,omitempty"` + SecretType *string `json:"secret_type,omitempty"` + SecretTypeDisplayName *string `json:"secret_type_display_name,omitempty"` + Secret *string `json:"secret,omitempty"` + Repository *Repository `json:"repository,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + PushProtectionBypassed *bool `json:"push_protection_bypassed,omitempty"` + PushProtectionBypassedBy *User `json:"push_protection_bypassed_by,omitempty"` + PushProtectionBypassedAt *Timestamp `json:"push_protection_bypassed_at,omitempty"` + ResolutionComment *string `json:"resolution_comment,omitempty"` +} + +// SecretScanningAlertLocation represents the location for a secret scanning alert. +type SecretScanningAlertLocation struct { + Type *string `json:"type,omitempty"` + Details *SecretScanningAlertLocationDetails `json:"details,omitempty"` +} + +// SecretScanningAlertLocationDetails represents the location details for a secret scanning alert. +type SecretScanningAlertLocationDetails struct { + Path *string `json:"path,omitempty"` + Startline *int `json:"start_line,omitempty"` + EndLine *int `json:"end_line,omitempty"` + StartColumn *int `json:"start_column,omitempty"` + EndColumn *int `json:"end_column,omitempty"` + BlobSHA *string `json:"blob_sha,omitempty"` + BlobURL *string `json:"blob_url,omitempty"` + CommitSHA *string `json:"commit_sha,omitempty"` + CommitURL *string `json:"commit_url,omitempty"` +} + +// SecretScanningAlertListOptions specifies optional parameters to the SecretScanningService.ListAlertsForEnterprise method. +type SecretScanningAlertListOptions struct { + // State of the secret scanning alerts to list. Set to open or resolved to only list secret scanning alerts in a specific state. + State string `url:"state,omitempty"` + + // A comma-separated list of secret types to return. By default all secret types are returned. + SecretType string `url:"secret_type,omitempty"` + + // A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. + // Valid resolutions are false_positive, wont_fix, revoked, pattern_edited, pattern_deleted or used_in_tests. + Resolution string `url:"resolution,omitempty"` + + ListCursorOptions + + // List options can vary on the Enterprise type. + // On Enterprise Cloud, Secret Scan alerts support requesting by page number + // along with providing a cursor for an "after" param. + // See: https://docs.github.com/en/enterprise-cloud@latest/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization + // Whereas on Enterprise Server, pagination is by index. + // See: https://docs.github.com/en/enterprise-server@3.6/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization + ListOptions +} + +// SecretScanningAlertUpdateOptions specifies optional parameters to the SecretScanningService.UpdateAlert method. +type SecretScanningAlertUpdateOptions struct { + // State is required and sets the state of the secret scanning alert. + // Can be either "open" or "resolved". + // You must provide resolution when you set the state to "resolved". + State string `json:"state"` + + // Required when the state is "resolved" and represents the reason for resolving the alert. + // Can be one of: "false_positive", "wont_fix", "revoked", or "used_in_tests". + Resolution *string `json:"resolution,omitempty"` +} + +// Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. +// +// To use this endpoint, you must be a member of the enterprise, and you must use an access token with the repo scope or +// security_events scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#list-secret-scanning-alerts-for-an-enterprise +func (s *SecretScanningService) ListAlertsForEnterprise(ctx context.Context, enterprise string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { + u := fmt.Sprintf("enterprises/%v/secret-scanning/alerts", enterprise) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*SecretScanningAlert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. +// +// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with +// the repo scope or security_events scope. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#list-secret-scanning-alerts-for-an-organization +func (s *SecretScanningService) ListAlertsForOrg(ctx context.Context, org string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { + u := fmt.Sprintf("orgs/%v/secret-scanning/alerts", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*SecretScanningAlert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// Lists secret scanning alerts for a private repository, from newest to oldest. +// +// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with +// the repo scope or security_events scope. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#list-secret-scanning-alerts-for-a-repository +func (s *SecretScanningService) ListAlertsForRepo(ctx context.Context, owner, repo string, opts *SecretScanningAlertListOptions) ([]*SecretScanningAlert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts", owner, repo) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alerts []*SecretScanningAlert + resp, err := s.client.Do(ctx, req, &alerts) + if err != nil { + return nil, resp, err + } + + return alerts, resp, nil +} + +// Gets a single secret scanning alert detected in a private repository. +// +// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with +// the repo scope or security_events scope. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#get-a-secret-scanning-alert +func (s *SecretScanningService) GetAlert(ctx context.Context, owner, repo string, number int64) (*SecretScanningAlert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var alert *SecretScanningAlert + resp, err := s.client.Do(ctx, req, &alert) + if err != nil { + return nil, resp, err + } + + return alert, resp, nil +} + +// Updates the status of a secret scanning alert in a private repository. +// +// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with +// the repo scope or security_events scope. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#update-a-secret-scanning-alert +func (s *SecretScanningService) UpdateAlert(ctx context.Context, owner, repo string, number int64, opts *SecretScanningAlertUpdateOptions) (*SecretScanningAlert, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v", owner, repo, number) + + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + var alert *SecretScanningAlert + resp, err := s.client.Do(ctx, req, &alert) + if err != nil { + return nil, resp, err + } + + return alert, resp, nil +} + +// Lists all locations for a given secret scanning alert for a private repository. +// +// To use this endpoint, you must be an administrator for the repository or organization, and you must use an access token with +// the repo scope or security_events scope. +// +// GitHub API docs: https://docs.github.com/en/enterprise-server@3.5/rest/secret-scanning#list-locations-for-a-secret-scanning-alert +func (s *SecretScanningService) ListLocationsForAlert(ctx context.Context, owner, repo string, number int64, opts *ListOptions) ([]*SecretScanningAlertLocation, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/secret-scanning/alerts/%v/locations", owner, repo, number) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var locations []*SecretScanningAlertLocation + resp, err := s.client.Do(ctx, req, &locations) + if err != nil { + return nil, resp, err + } + + return locations, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/security_advisories.go b/vendor/github.com/google/go-github/v56/github/security_advisories.go new file mode 100644 index 000000000..681d0cd4b --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/security_advisories.go @@ -0,0 +1,120 @@ +// Copyright 2023 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +type SecurityAdvisoriesService service + +// SecurityAdvisorySubmission represents the Security Advisory Submission. +type SecurityAdvisorySubmission struct { + // Accepted represents whether a private vulnerability report was accepted by the repository's administrators. + Accepted *bool `json:"accepted,omitempty"` +} + +// RepoAdvisoryCredit represents the credit object for a repository Security Advisory. +type RepoAdvisoryCredit struct { + Login *string `json:"login,omitempty"` + Type *string `json:"type,omitempty"` +} + +// RepoAdvisoryCreditDetailed represents a credit given to a user for a repository Security Advisory. +type RepoAdvisoryCreditDetailed struct { + User *User `json:"user,omitempty"` + Type *string `json:"type,omitempty"` + State *string `json:"state,omitempty"` +} + +// ListRepositorySecurityAdvisoriesOptions specifies the optional parameters to list the repository security advisories. +type ListRepositorySecurityAdvisoriesOptions struct { + ListCursorOptions + + // Direction in which to sort advisories. Possible values are: asc, desc. + // Default is "asc". + Direction string `url:"direction,omitempty"` + + // Sort specifies how to sort advisories. Possible values are: created, updated, + // and published. Default value is "created". + Sort string `url:"sort,omitempty"` + + // State filters advisories based on their state. Possible values are: triage, draft, published, closed. + State string `url:"state,omitempty"` +} + +// RequestCVE requests a Common Vulnerabilities and Exposures (CVE) for a repository security advisory. +// The ghsaID is the GitHub Security Advisory identifier of the advisory. +// +// GitHub API docs: https://docs.github.com/en/rest/security-advisories/repository-advisories#request-a-cve-for-a-repository-security-advisory +func (s *SecurityAdvisoriesService) RequestCVE(ctx context.Context, owner, repo, ghsaID string) (*Response, error) { + url := fmt.Sprintf("repos/%v/%v/security-advisories/%v/cve", owner, repo, ghsaID) + + req, err := s.client.NewRequest("POST", url, nil) + if err != nil { + return nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + if err != nil { + if _, ok := err.(*AcceptedError); ok { + return resp, nil + } + + return resp, err + } + + return resp, nil +} + +// ListRepositorySecurityAdvisoriesForOrg lists the repository security advisories for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/security-advisories/repository-advisories?apiVersion=2022-11-28#list-repository-security-advisories-for-an-organization +func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisoriesForOrg(ctx context.Context, org string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { + url := fmt.Sprintf("orgs/%v/security-advisories", org) + url, err := addOptions(url, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var advisories []*SecurityAdvisory + resp, err := s.client.Do(ctx, req, &advisories) + if err != nil { + return nil, resp, err + } + + return advisories, resp, nil +} + +// ListRepositorySecurityAdvisories lists the security advisories in a repository. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/security-advisories/repository-advisories?apiVersion=2022-11-28#list-repository-security-advisories +func (s *SecurityAdvisoriesService) ListRepositorySecurityAdvisories(ctx context.Context, owner, repo string, opt *ListRepositorySecurityAdvisoriesOptions) ([]*SecurityAdvisory, *Response, error) { + url := fmt.Sprintf("repos/%v/%v/security-advisories", owner, repo) + url, err := addOptions(url, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", url, nil) + if err != nil { + return nil, nil, err + } + + var advisories []*SecurityAdvisory + resp, err := s.client.Do(ctx, req, &advisories) + if err != nil { + return nil, resp, err + } + + return advisories, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/strings.go b/vendor/github.com/google/go-github/v56/github/strings.go new file mode 100644 index 000000000..147c515e2 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/strings.go @@ -0,0 +1,94 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "bytes" + "fmt" + "reflect" +) + +var timestampType = reflect.TypeOf(Timestamp{}) + +// Stringify attempts to create a reasonable string representation of types in +// the GitHub library. It does things like resolve pointers to their values +// and omits struct fields with nil values. +func Stringify(message interface{}) string { + var buf bytes.Buffer + v := reflect.ValueOf(message) + stringifyValue(&buf, v) + return buf.String() +} + +// stringifyValue was heavily inspired by the goprotobuf library. + +func stringifyValue(w *bytes.Buffer, val reflect.Value) { + if val.Kind() == reflect.Ptr && val.IsNil() { + w.Write([]byte("")) + return + } + + v := reflect.Indirect(val) + + switch v.Kind() { + case reflect.String: + fmt.Fprintf(w, `"%s"`, v) + case reflect.Slice: + w.Write([]byte{'['}) + for i := 0; i < v.Len(); i++ { + if i > 0 { + w.Write([]byte{' '}) + } + + stringifyValue(w, v.Index(i)) + } + + w.Write([]byte{']'}) + return + case reflect.Struct: + if v.Type().Name() != "" { + w.Write([]byte(v.Type().String())) + } + + // special handling of Timestamp values + if v.Type() == timestampType { + fmt.Fprintf(w, "{%s}", v.Interface()) + return + } + + w.Write([]byte{'{'}) + + var sep bool + for i := 0; i < v.NumField(); i++ { + fv := v.Field(i) + if fv.Kind() == reflect.Ptr && fv.IsNil() { + continue + } + if fv.Kind() == reflect.Slice && fv.IsNil() { + continue + } + if fv.Kind() == reflect.Map && fv.IsNil() { + continue + } + + if sep { + w.Write([]byte(", ")) + } else { + sep = true + } + + w.Write([]byte(v.Type().Field(i).Name)) + w.Write([]byte{':'}) + stringifyValue(w, fv) + } + + w.Write([]byte{'}'}) + default: + if v.CanInterface() { + fmt.Fprint(w, v.Interface()) + } + } +} diff --git a/vendor/github.com/google/go-github/v56/github/teams.go b/vendor/github.com/google/go-github/v56/github/teams.go new file mode 100644 index 000000000..0ee7c2007 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/teams.go @@ -0,0 +1,992 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" + "net/http" + "strings" +) + +// TeamsService provides access to the team-related functions +// in the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/ +type TeamsService service + +// Team represents a team within a GitHub organization. Teams are used to +// manage access to an organization's repositories. +type Team struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + URL *string `json:"url,omitempty"` + Slug *string `json:"slug,omitempty"` + + // Permission specifies the default permission for repositories owned by the team. + Permission *string `json:"permission,omitempty"` + + // Permissions identifies the permissions that a team has on a given + // repository. This is only populated when calling Repositories.ListTeams. + Permissions map[string]bool `json:"permissions,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + MembersCount *int `json:"members_count,omitempty"` + ReposCount *int `json:"repos_count,omitempty"` + Organization *Organization `json:"organization,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + MembersURL *string `json:"members_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` + Parent *Team `json:"parent,omitempty"` + + // LDAPDN is only available in GitHub Enterprise and when the team + // membership is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (t Team) String() string { + return Stringify(t) +} + +// Invitation represents a team member's invitation status. +type Invitation struct { + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Login *string `json:"login,omitempty"` + Email *string `json:"email,omitempty"` + // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. + Role *string `json:"role,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + Inviter *User `json:"inviter,omitempty"` + TeamCount *int `json:"team_count,omitempty"` + InvitationTeamURL *string `json:"invitation_team_url,omitempty"` + FailedAt *Timestamp `json:"failed_at,omitempty"` + FailedReason *string `json:"failed_reason,omitempty"` +} + +func (i Invitation) String() string { + return Stringify(i) +} + +// ListTeams lists all of the teams for an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-teams +func (s *TeamsService) ListTeams(ctx context.Context, org string, opts *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// GetTeamByID fetches a team, given a specified organization ID, by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#get-a-team-by-name +func (s *TeamsService) GetTeamByID(ctx context.Context, orgID, teamID int64) (*Team, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// GetTeamBySlug fetches a team, given a specified organization name, by slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#get-a-team-by-name +func (s *TeamsService) GetTeamBySlug(ctx context.Context, org, slug string) (*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// NewTeam represents a team to be created or modified. +type NewTeam struct { + Name string `json:"name"` // Name of the team. (Required.) + Description *string `json:"description,omitempty"` + Maintainers []string `json:"maintainers,omitempty"` + RepoNames []string `json:"repo_names,omitempty"` + ParentTeamID *int64 `json:"parent_team_id,omitempty"` + + // Deprecated: Permission is deprecated when creating or editing a team in an org + // using the new GitHub permission model. It no longer identifies the + // permission a team has on its repos, but only specifies the default + // permission a repo is initially added with. Avoid confusion by + // specifying a permission value when calling AddTeamRepo. + Permission *string `json:"permission,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + // LDAPDN may be used in GitHub Enterprise when the team membership + // is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (s NewTeam) String() string { + return Stringify(s) +} + +// CreateTeam creates a new team within an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#create-a-team +func (s *TeamsService) CreateTeam(ctx context.Context, org string, team NewTeam) (*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + req, err := s.client.NewRequest("POST", u, team) + if err != nil { + return nil, nil, err + } + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// newTeamNoParent is the same as NewTeam but ensures that the +// "parent_team_id" field will be null. It is for internal use +// only and should not be exported. +type newTeamNoParent struct { + Name string `json:"name"` + Description *string `json:"description,omitempty"` + Maintainers []string `json:"maintainers,omitempty"` + RepoNames []string `json:"repo_names,omitempty"` + ParentTeamID *int64 `json:"parent_team_id"` // This will be "null" + Privacy *string `json:"privacy,omitempty"` + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +// copyNewTeamWithoutParent is used to set the "parent_team_id" +// field to "null" after copying the other fields from a NewTeam. +// It is for internal use only and should not be exported. +func copyNewTeamWithoutParent(team *NewTeam) *newTeamNoParent { + return &newTeamNoParent{ + Name: team.Name, + Description: team.Description, + Maintainers: team.Maintainers, + RepoNames: team.RepoNames, + Privacy: team.Privacy, + LDAPDN: team.LDAPDN, + } +} + +// EditTeamByID edits a team, given an organization ID, selected by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#update-a-team +func (s *TeamsService) EditTeamByID(ctx context.Context, orgID, teamID int64, team NewTeam, removeParent bool) (*Team, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) + + var req *http.Request + var err error + if removeParent { + teamRemoveParent := copyNewTeamWithoutParent(&team) + req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) + } else { + req, err = s.client.NewRequest("PATCH", u, team) + } + if err != nil { + return nil, nil, err + } + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// EditTeamBySlug edits a team, given an organization name, by slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#update-a-team +func (s *TeamsService) EditTeamBySlug(ctx context.Context, org, slug string, team NewTeam, removeParent bool) (*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) + + var req *http.Request + var err error + if removeParent { + teamRemoveParent := copyNewTeamWithoutParent(&team) + req, err = s.client.NewRequest("PATCH", u, teamRemoveParent) + } else { + req, err = s.client.NewRequest("PATCH", u, team) + } + if err != nil { + return nil, nil, err + } + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// DeleteTeamByID deletes a team referenced by ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#delete-a-team +func (s *TeamsService) DeleteTeamByID(ctx context.Context, orgID, teamID int64) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v", orgID, teamID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteTeamBySlug deletes a team reference by slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#delete-a-team +func (s *TeamsService) DeleteTeamBySlug(ctx context.Context, org, slug string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v", org, slug) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListChildTeamsByParentID lists child teams for a parent team given parent ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-child-teams +func (s *TeamsService) ListChildTeamsByParentID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/teams", orgID, teamID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListChildTeamsByParentSlug lists child teams for a parent team given parent slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-child-teams +func (s *TeamsService) ListChildTeamsByParentSlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/teams", org, slug) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListTeamReposByID lists the repositories given a team ID that the specified team has access to. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-team-repositories +func (s *TeamsService) ListTeamReposByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/repos", orgID, teamID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when topics API fully launches. + headers := []string{mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// ListTeamReposBySlug lists the repositories given a team slug that the specified team has access to. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-team-repositories +func (s *TeamsService) ListTeamReposBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/repos", org, slug) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when topics API fully launches. + headers := []string{mediaTypeTopicsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// IsTeamRepoByID checks if a team, given its ID, manages the specified repository. If the +// repository is managed by team, a Repository is returned which includes the +// permissions team has for that repo. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#check-team-permissions-for-a-repository +func (s *TeamsService) IsTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Repository, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + headers := []string{mediaTypeOrgPermissionRepo} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// IsTeamRepoBySlug checks if a team, given its slug, manages the specified repository. If the +// repository is managed by team, a Repository is returned which includes the +// permissions team has for that repo. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#check-team-permissions-for-a-repository +func (s *TeamsService) IsTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Repository, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + headers := []string{mediaTypeOrgPermissionRepo} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// TeamAddTeamRepoOptions specifies the optional parameters to the +// TeamsService.AddTeamRepoByID and TeamsService.AddTeamRepoBySlug methods. +type TeamAddTeamRepoOptions struct { + // Permission specifies the permission to grant the team on this repository. + // Possible values are: + // pull - team members can pull, but not push to or administer this repository + // push - team members can pull and push, but not administer this repository + // admin - team members can pull, push and administer this repository + // maintain - team members can manage the repository without access to sensitive or destructive actions. + // triage - team members can proactively manage issues and pull requests without write access. + // + // If not specified, the team's permission attribute will be used. + Permission string `json:"permission,omitempty"` +} + +// AddTeamRepoByID adds a repository to be managed by the specified team given the team ID. +// The specified repository must be owned by the organization to which the team +// belongs, or a direct fork of a repository owned by the organization. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#add-or-update-team-repository-permissions +func (s *TeamsService) AddTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// AddTeamRepoBySlug adds a repository to be managed by the specified team given the team slug. +// The specified repository must be owned by the organization to which the team +// belongs, or a direct fork of a repository owned by the organization. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#add-or-update-team-repository-permissions +func (s *TeamsService) AddTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string, opts *TeamAddTeamRepoOptions) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamRepoByID removes a repository from being managed by the specified +// team given the team ID. Note that this does not delete the repository, it +// just removes it from the team. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#remove-a-repository-from-a-team +func (s *TeamsService) RemoveTeamRepoByID(ctx context.Context, orgID, teamID int64, owner, repo string) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/repos/%v/%v", orgID, teamID, owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamRepoBySlug removes a repository from being managed by the specified +// team given the team slug. Note that this does not delete the repository, it +// just removes it from the team. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#remove-a-repository-from-a-team +func (s *TeamsService) RemoveTeamRepoBySlug(ctx context.Context, org, slug, owner, repo string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/repos/%v/%v", org, slug, owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListUserTeams lists a user's teams +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-teams-for-the-authenticated-user +func (s *TeamsService) ListUserTeams(ctx context.Context, opts *ListOptions) ([]*Team, *Response, error) { + u := "user/teams" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListTeamProjectsByID lists the organization projects for a team given the team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-team-projects +func (s *TeamsService) ListTeamProjectsByID(ctx context.Context, orgID, teamID int64) ([]*Project, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/projects", orgID, teamID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// ListTeamProjectsBySlug lists the organization projects for a team given the team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#list-team-projects +func (s *TeamsService) ListTeamProjectsBySlug(ctx context.Context, org, slug string) ([]*Project, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/projects", org, slug) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// ReviewTeamProjectsByID checks whether a team, given its ID, has read, write, or admin +// permissions for an organization project. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#check-team-permissions-for-a-project +func (s *TeamsService) ReviewTeamProjectsByID(ctx context.Context, orgID, teamID, projectID int64) (*Project, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + projects := &Project{} + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// ReviewTeamProjectsBySlug checks whether a team, given its slug, has read, write, or admin +// permissions for an organization project. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#check-team-permissions-for-a-project +func (s *TeamsService) ReviewTeamProjectsBySlug(ctx context.Context, org, slug string, projectID int64) (*Project, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + projects := &Project{} + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// TeamProjectOptions specifies the optional parameters to the +// TeamsService.AddTeamProject method. +type TeamProjectOptions struct { + // Permission specifies the permission to grant to the team for this project. + // Possible values are: + // "read" - team members can read, but not write to or administer this project. + // "write" - team members can read and write, but not administer this project. + // "admin" - team members can read, write and administer this project. + // + Permission *string `json:"permission,omitempty"` +} + +// AddTeamProjectByID adds an organization project to a team given the team ID. +// To add a project to a team or update the team's permission on a project, the +// authenticated user must have admin permissions for the project. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#add-or-update-team-project-permissions +func (s *TeamsService) AddTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64, opts *TeamProjectOptions) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} + +// AddTeamProjectBySlug adds an organization project to a team given the team slug. +// To add a project to a team or update the team's permission on a project, the +// authenticated user must have admin permissions for the project. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#add-or-update-team-project-permissions +func (s *TeamsService) AddTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64, opts *TeamProjectOptions) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamProjectByID removes an organization project from a team given team ID. +// An organization owner or a team maintainer can remove any project from the team. +// To remove a project from a team as an organization member, the authenticated user +// must have "read" access to both the team and project, or "admin" access to the team +// or project. +// Note: This endpoint removes the project from the team, but does not delete it. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#remove-a-project-from-a-team +func (s *TeamsService) RemoveTeamProjectByID(ctx context.Context, orgID, teamID, projectID int64) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/projects/%v", orgID, teamID, projectID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamProjectBySlug removes an organization project from a team given team slug. +// An organization owner or a team maintainer can remove any project from the team. +// To remove a project from a team as an organization member, the authenticated user +// must have "read" access to both the team and project, or "admin" access to the team +// or project. +// Note: This endpoint removes the project from the team, but does not delete it. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/teams#remove-a-project-from-a-team +func (s *TeamsService) RemoveTeamProjectBySlug(ctx context.Context, org, slug string, projectID int64) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/projects/%v", org, slug, projectID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + acceptHeaders := []string{mediaTypeProjectsPreview} + req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + + return s.client.Do(ctx, req, nil) +} + +// IDPGroupList represents a list of external identity provider (IDP) groups. +type IDPGroupList struct { + Groups []*IDPGroup `json:"groups"` +} + +// IDPGroup represents an external identity provider (IDP) group. +type IDPGroup struct { + GroupID *string `json:"group_id,omitempty"` + GroupName *string `json:"group_name,omitempty"` + GroupDescription *string `json:"group_description,omitempty"` +} + +// ListIDPGroupsInOrganization lists IDP groups available in an organization. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/team-sync#list-idp-groups-for-an-organization +func (s *TeamsService) ListIDPGroupsInOrganization(ctx context.Context, org string, opts *ListCursorOptions) (*IDPGroupList, *Response, error) { + u := fmt.Sprintf("orgs/%v/team-sync/groups", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + groups := new(IDPGroupList) + resp, err := s.client.Do(ctx, req, groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// ListIDPGroupsForTeamByID lists IDP groups connected to a team on GitHub +// given organization and team IDs. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/team-sync#list-idp-groups-for-a-team +func (s *TeamsService) ListIDPGroupsForTeamByID(ctx context.Context, orgID, teamID int64) (*IDPGroupList, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + groups := new(IDPGroupList) + resp, err := s.client.Do(ctx, req, groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// ListIDPGroupsForTeamBySlug lists IDP groups connected to a team on GitHub +// given organization name and team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/team-sync#list-idp-groups-for-a-team +func (s *TeamsService) ListIDPGroupsForTeamBySlug(ctx context.Context, org, slug string) (*IDPGroupList, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + groups := new(IDPGroupList) + resp, err := s.client.Do(ctx, req, groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// CreateOrUpdateIDPGroupConnectionsByID creates, updates, or removes a connection +// between a team and an IDP group given organization and team IDs. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/team-sync#create-or-update-idp-group-connections +func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsByID(ctx context.Context, orgID, teamID int64, opts IDPGroupList) (*IDPGroupList, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/team-sync/group-mappings", orgID, teamID) + + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + groups := new(IDPGroupList) + resp, err := s.client.Do(ctx, req, groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// CreateOrUpdateIDPGroupConnectionsBySlug creates, updates, or removes a connection +// between a team and an IDP group given organization name and team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/team-sync#create-or-update-idp-group-connections +func (s *TeamsService) CreateOrUpdateIDPGroupConnectionsBySlug(ctx context.Context, org, slug string, opts IDPGroupList) (*IDPGroupList, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/team-sync/group-mappings", org, slug) + + req, err := s.client.NewRequest("PATCH", u, opts) + if err != nil { + return nil, nil, err + } + + groups := new(IDPGroupList) + resp, err := s.client.Do(ctx, req, groups) + if err != nil { + return nil, resp, err + } + + return groups, resp, nil +} + +// ExternalGroupMember represents a member of an external group. +type ExternalGroupMember struct { + MemberID *int64 `json:"member_id,omitempty"` + MemberLogin *string `json:"member_login,omitempty"` + MemberName *string `json:"member_name,omitempty"` + MemberEmail *string `json:"member_email,omitempty"` +} + +// ExternalGroupTeam represents a team connected to an external group. +type ExternalGroupTeam struct { + TeamID *int64 `json:"team_id,omitempty"` + TeamName *string `json:"team_name,omitempty"` +} + +// ExternalGroup represents an external group. +type ExternalGroup struct { + GroupID *int64 `json:"group_id,omitempty"` + GroupName *string `json:"group_name,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Teams []*ExternalGroupTeam `json:"teams,omitempty"` + Members []*ExternalGroupMember `json:"members,omitempty"` +} + +// ExternalGroupList represents a list of external groups. +type ExternalGroupList struct { + Groups []*ExternalGroup `json:"groups"` +} + +// GetExternalGroup fetches an external group. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/teams/external-groups#get-an-external-group +func (s *TeamsService) GetExternalGroup(ctx context.Context, org string, groupID int64) (*ExternalGroup, *Response, error) { + u := fmt.Sprintf("orgs/%v/external-group/%v", org, groupID) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + externalGroup := new(ExternalGroup) + resp, err := s.client.Do(ctx, req, externalGroup) + if err != nil { + return nil, resp, err + } + + return externalGroup, resp, nil +} + +// ListExternalGroupsOptions specifies the optional parameters to the +// TeamsService.ListExternalGroups method. +type ListExternalGroupsOptions struct { + DisplayName *string `url:"display_name,omitempty"` + + ListOptions +} + +// ListExternalGroups lists external groups in an organization on GitHub. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/teams/external-groups#list-external-groups-in-an-organization +func (s *TeamsService) ListExternalGroups(ctx context.Context, org string, opts *ListExternalGroupsOptions) (*ExternalGroupList, *Response, error) { + u := fmt.Sprintf("orgs/%v/external-groups", org) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + externalGroups := new(ExternalGroupList) + resp, err := s.client.Do(ctx, req, externalGroups) + if err != nil { + return nil, resp, err + } + + return externalGroups, resp, nil +} + +// ListExternalGroupsForTeamBySlug lists external groups connected to a team on GitHub. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/teams/external-groups#list-a-connection-between-an-external-group-and-a-team +func (s *TeamsService) ListExternalGroupsForTeamBySlug(ctx context.Context, org, slug string) (*ExternalGroupList, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + externalGroups := new(ExternalGroupList) + resp, err := s.client.Do(ctx, req, externalGroups) + if err != nil { + return nil, resp, err + } + + return externalGroups, resp, nil +} + +// UpdateConnectedExternalGroup updates the connection between an external group and a team. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/teams/external-groups#update-the-connection-between-an-external-group-and-a-team +func (s *TeamsService) UpdateConnectedExternalGroup(ctx context.Context, org, slug string, eg *ExternalGroup) (*ExternalGroup, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) + + req, err := s.client.NewRequest("PATCH", u, eg) + if err != nil { + return nil, nil, err + } + + externalGroup := new(ExternalGroup) + resp, err := s.client.Do(ctx, req, externalGroup) + if err != nil { + return nil, resp, err + } + + return externalGroup, resp, nil +} + +// RemoveConnectedExternalGroup removes the connection between an external group and a team. +// +// GitHub API docs: https://docs.github.com/en/enterprise-cloud@latest/rest/teams/external-groups#remove-the-connection-between-an-external-group-and-a-team +func (s *TeamsService) RemoveConnectedExternalGroup(ctx context.Context, org, slug string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/external-groups", org, slug) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/v56/github/teams_discussion_comments.go new file mode 100644 index 000000000..f3a1cc4dc --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/teams_discussion_comments.go @@ -0,0 +1,242 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// DiscussionComment represents a GitHub dicussion in a team. +type DiscussionComment struct { + Author *User `json:"author,omitempty"` + Body *string `json:"body,omitempty"` + BodyHTML *string `json:"body_html,omitempty"` + BodyVersion *string `json:"body_version,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` + DiscussionURL *string `json:"discussion_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Number *int `json:"number,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` +} + +func (c DiscussionComment) String() string { + return Stringify(c) +} + +// DiscussionCommentListOptions specifies optional parameters to the +// TeamServices.ListComments method. +type DiscussionCommentListOptions struct { + // Sorts the discussion comments by the date they were created. + // Accepted values are asc and desc. Default is desc. + Direction string `url:"direction,omitempty"` + ListOptions +} + +// ListCommentsByID lists all comments on a team discussion by team ID. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#list-discussion-comments +func (s *TeamsService) ListCommentsByID(ctx context.Context, orgID, teamID int64, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discussionNumber) + u, err := addOptions(u, options) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var comments []*DiscussionComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// ListCommentsBySlug lists all comments on a team discussion by team slug. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#list-discussion-comments +func (s *TeamsService) ListCommentsBySlug(ctx context.Context, org, slug string, discussionNumber int, options *DiscussionCommentListOptions) ([]*DiscussionComment, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discussionNumber) + u, err := addOptions(u, options) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var comments []*DiscussionComment + resp, err := s.client.Do(ctx, req, &comments) + if err != nil { + return nil, resp, err + } + + return comments, resp, nil +} + +// GetCommentByID gets a specific comment on a team discussion by team ID. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#get-a-discussion-comment +func (s *TeamsService) GetCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// GetCommentBySlug gets a specific comment on a team discussion by team slug. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#get-a-discussion-comment +func (s *TeamsService) GetCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// CreateCommentByID creates a new comment on a team discussion by team ID. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#create-a-discussion-comment +func (s *TeamsService) CreateCommentByID(ctx context.Context, orgID, teamID int64, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments", orgID, teamID, discsusionNumber) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// CreateCommentBySlug creates a new comment on a team discussion by team slug. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#create-a-discussion-comment +func (s *TeamsService) CreateCommentBySlug(ctx context.Context, org, slug string, discsusionNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments", org, slug, discsusionNumber) + req, err := s.client.NewRequest("POST", u, comment) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// EditCommentByID edits the body text of a discussion comment by team ID. +// Authenticated user must grant write:discussion scope. +// User is allowed to edit body of a comment only. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#update-a-discussion-comment +func (s *TeamsService) EditCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// EditCommentBySlug edits the body text of a discussion comment by team slug. +// Authenticated user must grant write:discussion scope. +// User is allowed to edit body of a comment only. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#update-a-discussion-comment +func (s *TeamsService) EditCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int, comment DiscussionComment) (*DiscussionComment, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) + req, err := s.client.NewRequest("PATCH", u, comment) + if err != nil { + return nil, nil, err + } + + discussionComment := &DiscussionComment{} + resp, err := s.client.Do(ctx, req, discussionComment) + if err != nil { + return nil, resp, err + } + + return discussionComment, resp, nil +} + +// DeleteCommentByID deletes a comment on a team discussion by team ID. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#delete-a-discussion-comment +func (s *TeamsService) DeleteCommentByID(ctx context.Context, orgID, teamID int64, discussionNumber, commentNumber int) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v/comments/%v", orgID, teamID, discussionNumber, commentNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteCommentBySlug deletes a comment on a team discussion by team slug. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussion-comments#delete-a-discussion-comment +func (s *TeamsService) DeleteCommentBySlug(ctx context.Context, org, slug string, discussionNumber, commentNumber int) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v/comments/%v", org, slug, discussionNumber, commentNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/teams_discussions.go b/vendor/github.com/google/go-github/v56/github/teams_discussions.go new file mode 100644 index 000000000..69a3ebd51 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/teams_discussions.go @@ -0,0 +1,247 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TeamDiscussion represents a GitHub dicussion in a team. +type TeamDiscussion struct { + Author *User `json:"author,omitempty"` + Body *string `json:"body,omitempty"` + BodyHTML *string `json:"body_html,omitempty"` + BodyVersion *string `json:"body_version,omitempty"` + CommentsCount *int `json:"comments_count,omitempty"` + CommentsURL *string `json:"comments_url,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Number *int `json:"number,omitempty"` + Pinned *bool `json:"pinned,omitempty"` + Private *bool `json:"private,omitempty"` + TeamURL *string `json:"team_url,omitempty"` + Title *string `json:"title,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + URL *string `json:"url,omitempty"` + Reactions *Reactions `json:"reactions,omitempty"` +} + +func (d TeamDiscussion) String() string { + return Stringify(d) +} + +// DiscussionListOptions specifies optional parameters to the +// TeamServices.ListDiscussions method. +type DiscussionListOptions struct { + // Sorts the discussion by the date they were created. + // Accepted values are asc and desc. Default is desc. + Direction string `url:"direction,omitempty"` + + ListOptions +} + +// ListDiscussionsByID lists all discussions on team's page given Organization and Team ID. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#list-discussions +func (s *TeamsService) ListDiscussionsByID(ctx context.Context, orgID, teamID int64, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teamDiscussions []*TeamDiscussion + resp, err := s.client.Do(ctx, req, &teamDiscussions) + if err != nil { + return nil, resp, err + } + + return teamDiscussions, resp, nil +} + +// ListDiscussionsBySlug lists all discussions on team's page given Organization name and Team's slug. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#list-discussions +func (s *TeamsService) ListDiscussionsBySlug(ctx context.Context, org, slug string, opts *DiscussionListOptions) ([]*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var teamDiscussions []*TeamDiscussion + resp, err := s.client.Do(ctx, req, &teamDiscussions) + if err != nil { + return nil, resp, err + } + + return teamDiscussions, resp, nil +} + +// GetDiscussionByID gets a specific discussion on a team's page given Organization and Team ID. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#get-a-discussion +func (s *TeamsService) GetDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// GetDiscussionBySlug gets a specific discussion on a team's page given Organization name and Team's slug. +// Authenticated user must grant read:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#get-a-discussion +func (s *TeamsService) GetDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// CreateDiscussionByID creates a new discussion post on a team's page given Organization and Team ID. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#create-a-discussion +func (s *TeamsService) CreateDiscussionByID(ctx context.Context, orgID, teamID int64, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions", orgID, teamID) + req, err := s.client.NewRequest("POST", u, discussion) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// CreateDiscussionBySlug creates a new discussion post on a team's page given Organization name and Team's slug. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#create-a-discussion +func (s *TeamsService) CreateDiscussionBySlug(ctx context.Context, org, slug string, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions", org, slug) + req, err := s.client.NewRequest("POST", u, discussion) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// EditDiscussionByID edits the title and body text of a discussion post given Organization and Team ID. +// Authenticated user must grant write:discussion scope. +// User is allowed to change Title and Body of a discussion only. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#update-a-discussion +func (s *TeamsService) EditDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) + req, err := s.client.NewRequest("PATCH", u, discussion) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// EditDiscussionBySlug edits the title and body text of a discussion post given Organization name and Team's slug. +// Authenticated user must grant write:discussion scope. +// User is allowed to change Title and Body of a discussion only. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#update-a-discussion +func (s *TeamsService) EditDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int, discussion TeamDiscussion) (*TeamDiscussion, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) + req, err := s.client.NewRequest("PATCH", u, discussion) + if err != nil { + return nil, nil, err + } + + teamDiscussion := &TeamDiscussion{} + resp, err := s.client.Do(ctx, req, teamDiscussion) + if err != nil { + return nil, resp, err + } + + return teamDiscussion, resp, nil +} + +// DeleteDiscussionByID deletes a discussion from team's page given Organization and Team ID. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#delete-a-discussion +func (s *TeamsService) DeleteDiscussionByID(ctx context.Context, orgID, teamID int64, discussionNumber int) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/discussions/%v", orgID, teamID, discussionNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeleteDiscussionBySlug deletes a discussion from team's page given Organization name and Team's slug. +// Authenticated user must grant write:discussion scope. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/discussions#delete-a-discussion +func (s *TeamsService) DeleteDiscussionBySlug(ctx context.Context, org, slug string, discussionNumber int) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/discussions/%v", org, slug, discussionNumber) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/teams_members.go b/vendor/github.com/google/go-github/v56/github/teams_members.go new file mode 100644 index 000000000..58cb79744 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/teams_members.go @@ -0,0 +1,243 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TeamListTeamMembersOptions specifies the optional parameters to the +// TeamsService.ListTeamMembers method. +type TeamListTeamMembersOptions struct { + // Role filters members returned by their role in the team. Possible + // values are "all", "member", "maintainer". Default is "all". + Role string `url:"role,omitempty"` + + ListOptions +} + +// ListTeamMembersByID lists all of the users who are members of a team, given a specified +// organization ID, by team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#list-team-members +func (s *TeamsService) ListTeamMembersByID(ctx context.Context, orgID, teamID int64, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/members", orgID, teamID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// ListTeamMembersBySlug lists all of the users who are members of a team, given a specified +// organization name, by team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#list-team-members +func (s *TeamsService) ListTeamMembersBySlug(ctx context.Context, org, slug string, opts *TeamListTeamMembersOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/members", org, slug) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// GetTeamMembershipByID returns the membership status for a user in a team, given a specified +// organization ID, by team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#list-team-members +func (s *TeamsService) GetTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Membership, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// GetTeamMembershipBySlug returns the membership status for a user in a team, given a specified +// organization name, by team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#get-team-membership-for-a-user +func (s *TeamsService) GetTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Membership, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// TeamAddTeamMembershipOptions specifies the optional +// parameters to the TeamsService.AddTeamMembership method. +type TeamAddTeamMembershipOptions struct { + // Role specifies the role the user should have in the team. Possible + // values are: + // member - a normal member of the team + // maintainer - a team maintainer. Able to add/remove other team + // members, promote other team members to team + // maintainer, and edit the team’s name and description + // + // Default value is "member". + Role string `json:"role,omitempty"` +} + +// AddTeamMembershipByID adds or invites a user to a team, given a specified +// organization ID, by team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#add-or-update-team-membership-for-a-user +func (s *TeamsService) AddTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// AddTeamMembershipBySlug adds or invites a user to a team, given a specified +// organization name, by team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#add-or-update-team-membership-for-a-user +func (s *TeamsService) AddTeamMembershipBySlug(ctx context.Context, org, slug, user string, opts *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// RemoveTeamMembershipByID removes a user from a team, given a specified +// organization ID, by team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#remove-team-membership-for-a-user +func (s *TeamsService) RemoveTeamMembershipByID(ctx context.Context, orgID, teamID int64, user string) (*Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/memberships/%v", orgID, teamID, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamMembershipBySlug removes a user from a team, given a specified +// organization name, by team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#remove-team-membership-for-a-user +func (s *TeamsService) RemoveTeamMembershipBySlug(ctx context.Context, org, slug, user string) (*Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/memberships/%v", org, slug, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListPendingTeamInvitationsByID gets pending invitation list of a team, given a specified +// organization ID, by team ID. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#list-pending-team-invitations +func (s *TeamsService) ListPendingTeamInvitationsByID(ctx context.Context, orgID, teamID int64, opts *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("organizations/%v/team/%v/invitations", orgID, teamID) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pendingInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &pendingInvitations) + if err != nil { + return nil, resp, err + } + + return pendingInvitations, resp, nil +} + +// ListPendingTeamInvitationsBySlug get pending invitation list of a team, given a specified +// organization name, by team slug. +// +// GitHub API docs: https://docs.github.com/en/rest/teams/members#list-pending-team-invitations +func (s *TeamsService) ListPendingTeamInvitationsBySlug(ctx context.Context, org, slug string, opts *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams/%v/invitations", org, slug) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pendingInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &pendingInvitations) + if err != nil { + return nil, resp, err + } + + return pendingInvitations, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/timestamp.go b/vendor/github.com/google/go-github/v56/github/timestamp.go new file mode 100644 index 000000000..00c1235e9 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/timestamp.go @@ -0,0 +1,52 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "strconv" + "time" +) + +// Timestamp represents a time that can be unmarshalled from a JSON string +// formatted as either an RFC3339 or Unix timestamp. This is necessary for some +// fields since the GitHub API is inconsistent in how it represents times. All +// exported methods of time.Time can be called on Timestamp. +type Timestamp struct { + time.Time +} + +func (t Timestamp) String() string { + return t.Time.String() +} + +// GetTime returns std time.Time. +func (t *Timestamp) GetTime() *time.Time { + if t == nil { + return nil + } + return &t.Time +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// Time is expected in RFC3339 or Unix format. +func (t *Timestamp) UnmarshalJSON(data []byte) (err error) { + str := string(data) + i, err := strconv.ParseInt(str, 10, 64) + if err == nil { + t.Time = time.Unix(i, 0) + if t.Time.Year() > 3000 { + t.Time = time.Unix(0, i*1e6) + } + } else { + t.Time, err = time.Parse(`"`+time.RFC3339+`"`, str) + } + return +} + +// Equal reports whether t and u are equal based on time.Equal +func (t Timestamp) Equal(u Timestamp) bool { + return t.Time.Equal(u.Time) +} diff --git a/vendor/github.com/google/go-github/v56/github/users.go b/vendor/github.com/google/go-github/v56/github/users.go new file mode 100644 index 000000000..1b0670103 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users.go @@ -0,0 +1,277 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// UsersService handles communication with the user related +// methods of the GitHub API. +// +// GitHub API docs: https://docs.github.com/en/rest/users/ +type UsersService service + +// User represents a GitHub user. +type User struct { + Login *string `json:"login,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + AvatarURL *string `json:"avatar_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + GravatarID *string `json:"gravatar_id,omitempty"` + Name *string `json:"name,omitempty"` + Company *string `json:"company,omitempty"` + Blog *string `json:"blog,omitempty"` + Location *string `json:"location,omitempty"` + Email *string `json:"email,omitempty"` + Hireable *bool `json:"hireable,omitempty"` + Bio *string `json:"bio,omitempty"` + TwitterUsername *string `json:"twitter_username,omitempty"` + PublicRepos *int `json:"public_repos,omitempty"` + PublicGists *int `json:"public_gists,omitempty"` + Followers *int `json:"followers,omitempty"` + Following *int `json:"following,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + SuspendedAt *Timestamp `json:"suspended_at,omitempty"` + Type *string `json:"type,omitempty"` + SiteAdmin *bool `json:"site_admin,omitempty"` + TotalPrivateRepos *int64 `json:"total_private_repos,omitempty"` + OwnedPrivateRepos *int64 `json:"owned_private_repos,omitempty"` + PrivateGists *int `json:"private_gists,omitempty"` + DiskUsage *int `json:"disk_usage,omitempty"` + Collaborators *int `json:"collaborators,omitempty"` + TwoFactorAuthentication *bool `json:"two_factor_authentication,omitempty"` + Plan *Plan `json:"plan,omitempty"` + LdapDn *string `json:"ldap_dn,omitempty"` + + // API URLs + URL *string `json:"url,omitempty"` + EventsURL *string `json:"events_url,omitempty"` + FollowingURL *string `json:"following_url,omitempty"` + FollowersURL *string `json:"followers_url,omitempty"` + GistsURL *string `json:"gists_url,omitempty"` + OrganizationsURL *string `json:"organizations_url,omitempty"` + ReceivedEventsURL *string `json:"received_events_url,omitempty"` + ReposURL *string `json:"repos_url,omitempty"` + StarredURL *string `json:"starred_url,omitempty"` + SubscriptionsURL *string `json:"subscriptions_url,omitempty"` + + // TextMatches is only populated from search results that request text matches + // See: search.go and https://docs.github.com/en/rest/search/#text-match-metadata + TextMatches []*TextMatch `json:"text_matches,omitempty"` + + // Permissions and RoleName identify the permissions and role that a user has on a given + // repository. These are only populated when calling Repositories.ListCollaborators. + Permissions map[string]bool `json:"permissions,omitempty"` + RoleName *string `json:"role_name,omitempty"` +} + +func (u User) String() string { + return Stringify(u) +} + +// Get fetches a user. Passing the empty string will fetch the authenticated +// user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/users#get-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/users#get-a-user +func (s *UsersService) Get(ctx context.Context, user string) (*User, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v", user) + } else { + u = "user" + } + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + uResp := new(User) + resp, err := s.client.Do(ctx, req, uResp) + if err != nil { + return nil, resp, err + } + + return uResp, resp, nil +} + +// GetByID fetches a user. +// +// Note: GetByID uses the undocumented GitHub API endpoint /user/:id. +func (s *UsersService) GetByID(ctx context.Context, id int64) (*User, *Response, error) { + u := fmt.Sprintf("user/%d", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + user := new(User) + resp, err := s.client.Do(ctx, req, user) + if err != nil { + return nil, resp, err + } + + return user, resp, nil +} + +// Edit the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/users#update-the-authenticated-user +func (s *UsersService) Edit(ctx context.Context, user *User) (*User, *Response, error) { + u := "user" + req, err := s.client.NewRequest("PATCH", u, user) + if err != nil { + return nil, nil, err + } + + uResp := new(User) + resp, err := s.client.Do(ctx, req, uResp) + if err != nil { + return nil, resp, err + } + + return uResp, resp, nil +} + +// HovercardOptions specifies optional parameters to the UsersService.GetHovercard +// method. +type HovercardOptions struct { + // SubjectType specifies the additional information to be received about the hovercard. + // Possible values are: organization, repository, issue, pull_request. (Required when using subject_id.) + SubjectType string `url:"subject_type"` + + // SubjectID specifies the ID for the SubjectType. (Required when using subject_type.) + SubjectID string `url:"subject_id"` +} + +// Hovercard represents hovercard information about a user. +type Hovercard struct { + Contexts []*UserContext `json:"contexts,omitempty"` +} + +// UserContext represents the contextual information about user. +type UserContext struct { + Message *string `json:"message,omitempty"` + Octicon *string `json:"octicon,omitempty"` +} + +// GetHovercard fetches contextual information about user. It requires authentication +// via Basic Auth or via OAuth with the repo scope. +// +// GitHub API docs: https://docs.github.com/en/rest/users/users#get-contextual-information-for-a-user +func (s *UsersService) GetHovercard(ctx context.Context, user string, opts *HovercardOptions) (*Hovercard, *Response, error) { + u := fmt.Sprintf("users/%v/hovercard", user) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + hc := new(Hovercard) + resp, err := s.client.Do(ctx, req, hc) + if err != nil { + return nil, resp, err + } + + return hc, resp, nil +} + +// UserListOptions specifies optional parameters to the UsersService.ListAll +// method. +type UserListOptions struct { + // ID of the last user seen + Since int64 `url:"since,omitempty"` + + // Note: Pagination is powered exclusively by the Since parameter, + // ListOptions.Page has no effect. + // ListOptions.PerPage controls an undocumented GitHub API parameter. + ListOptions +} + +// ListAll lists all GitHub users. +// +// To paginate through all users, populate 'Since' with the ID of the last user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/users#list-users +func (s *UsersService) ListAll(ctx context.Context, opts *UserListOptions) ([]*User, *Response, error) { + u, err := addOptions("users", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// ListInvitations lists all currently-open repository invitations for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user +func (s *UsersService) ListInvitations(ctx context.Context, opts *ListOptions) ([]*RepositoryInvitation, *Response, error) { + u, err := addOptions("user/repository_invitations", opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + invites := []*RepositoryInvitation{} + resp, err := s.client.Do(ctx, req, &invites) + if err != nil { + return nil, resp, err + } + + return invites, resp, nil +} + +// AcceptInvitation accepts the currently-open repository invitation for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#accept-a-repository-invitation +func (s *UsersService) AcceptInvitation(ctx context.Context, invitationID int64) (*Response, error) { + u := fmt.Sprintf("user/repository_invitations/%v", invitationID) + req, err := s.client.NewRequest("PATCH", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DeclineInvitation declines the currently-open repository invitation for the +// authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/collaborators/invitations#decline-a-repository-invitation +func (s *UsersService) DeclineInvitation(ctx context.Context, invitationID int64) (*Response, error) { + u := fmt.Sprintf("user/repository_invitations/%v", invitationID) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_administration.go b/vendor/github.com/google/go-github/v56/github/users_administration.go new file mode 100644 index 000000000..aef947ec4 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_administration.go @@ -0,0 +1,72 @@ +// Copyright 2014 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// PromoteSiteAdmin promotes a user to a site administrator of a GitHub Enterprise instance. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#promote-an-ordinary-user-to-a-site-administrator +func (s *UsersService) PromoteSiteAdmin(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("users/%v/site_admin", user) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// DemoteSiteAdmin demotes a user from site administrator of a GitHub Enterprise instance. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#demote-a-site-administrator-to-an-ordinary-user +func (s *UsersService) DemoteSiteAdmin(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("users/%v/site_admin", user) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// UserSuspendOptions represents the reason a user is being suspended. +type UserSuspendOptions struct { + Reason *string `json:"reason,omitempty"` +} + +// Suspend a user on a GitHub Enterprise instance. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#suspend-a-user +func (s *UsersService) Suspend(ctx context.Context, user string, opts *UserSuspendOptions) (*Response, error) { + u := fmt.Sprintf("users/%v/suspended", user) + + req, err := s.client.NewRequest("PUT", u, opts) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unsuspend a user on a GitHub Enterprise instance. +// +// GitHub API docs: https://developer.github.com/enterprise/v3/enterprise-admin/users/#unsuspend-a-user +func (s *UsersService) Unsuspend(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("users/%v/suspended", user) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_blocking.go b/vendor/github.com/google/go-github/v56/github/users_blocking.go new file mode 100644 index 000000000..3d38d9478 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_blocking.go @@ -0,0 +1,91 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListBlockedUsers lists all the blocked users by the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/blocking#list-users-blocked-by-the-authenticated-user +func (s *UsersService) ListBlockedUsers(ctx context.Context, opts *ListOptions) ([]*User, *Response, error) { + u := "user/blocks" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + var blockedUsers []*User + resp, err := s.client.Do(ctx, req, &blockedUsers) + if err != nil { + return nil, resp, err + } + + return blockedUsers, resp, nil +} + +// IsBlocked reports whether specified user is blocked by the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/blocking#check-if-a-user-is-blocked-by-the-authenticated-user +func (s *UsersService) IsBlocked(ctx context.Context, user string) (bool, *Response, error) { + u := fmt.Sprintf("user/blocks/%v", user) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + resp, err := s.client.Do(ctx, req, nil) + isBlocked, err := parseBoolResponse(err) + return isBlocked, resp, err +} + +// BlockUser blocks specified user for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/blocking#block-a-user +func (s *UsersService) BlockUser(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("user/blocks/%v", user) + + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + return s.client.Do(ctx, req, nil) +} + +// UnblockUser unblocks specified user for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/blocking#unblock-a-user +func (s *UsersService) UnblockUser(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("user/blocks/%v", user) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeBlockUsersPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_emails.go b/vendor/github.com/google/go-github/v56/github/users_emails.go new file mode 100644 index 000000000..67bd210e8 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_emails.go @@ -0,0 +1,97 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import "context" + +// UserEmail represents user's email address +type UserEmail struct { + Email *string `json:"email,omitempty"` + Primary *bool `json:"primary,omitempty"` + Verified *bool `json:"verified,omitempty"` + Visibility *string `json:"visibility,omitempty"` +} + +// ListEmails lists all email addresses for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/emails#list-email-addresses-for-the-authenticated-user +func (s *UsersService) ListEmails(ctx context.Context, opts *ListOptions) ([]*UserEmail, *Response, error) { + u := "user/emails" + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var emails []*UserEmail + resp, err := s.client.Do(ctx, req, &emails) + if err != nil { + return nil, resp, err + } + + return emails, resp, nil +} + +// AddEmails adds email addresses of the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/emails#add-an-email-address-for-the-authenticated-user +func (s *UsersService) AddEmails(ctx context.Context, emails []string) ([]*UserEmail, *Response, error) { + u := "user/emails" + req, err := s.client.NewRequest("POST", u, emails) + if err != nil { + return nil, nil, err + } + + var e []*UserEmail + resp, err := s.client.Do(ctx, req, &e) + if err != nil { + return nil, resp, err + } + + return e, resp, nil +} + +// DeleteEmails deletes email addresses from authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/emails#delete-an-email-address-for-the-authenticated-user +func (s *UsersService) DeleteEmails(ctx context.Context, emails []string) (*Response, error) { + u := "user/emails" + req, err := s.client.NewRequest("DELETE", u, emails) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// SetEmailVisibility sets the visibility for the primary email address of the authenticated user. +// `visibility` can be "private" or "public". +// +// GitHub API docs: https://docs.github.com/en/rest/users/emails#set-primary-email-visibility-for-the-authenticated-user +func (s *UsersService) SetEmailVisibility(ctx context.Context, visibility string) ([]*UserEmail, *Response, error) { + u := "user/email/visibility" + + updateVisiblilityReq := &UserEmail{ + Visibility: &visibility, + } + + req, err := s.client.NewRequest("PATCH", u, updateVisiblilityReq) + if err != nil { + return nil, nil, err + } + + var e []*UserEmail + resp, err := s.client.Do(ctx, req, &e) + if err != nil { + return nil, resp, err + } + + return e, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/users_followers.go b/vendor/github.com/google/go-github/v56/github/users_followers.go new file mode 100644 index 000000000..1266e0e9e --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_followers.go @@ -0,0 +1,122 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListFollowers lists the followers for a user. Passing the empty string will +// fetch followers for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/followers#list-followers-of-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/followers#list-followers-of-a-user +func (s *UsersService) ListFollowers(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/followers", user) + } else { + u = "user/followers" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// ListFollowing lists the people that a user is following. Passing the empty +// string will list people the authenticated user is following. +// +// GitHub API docs: https://docs.github.com/en/rest/users/followers#list-the-people-the-authenticated-user-follows +// GitHub API docs: https://docs.github.com/en/rest/users/followers#list-the-people-a-user-follows +func (s *UsersService) ListFollowing(ctx context.Context, user string, opts *ListOptions) ([]*User, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/following", user) + } else { + u = "user/following" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var users []*User + resp, err := s.client.Do(ctx, req, &users) + if err != nil { + return nil, resp, err + } + + return users, resp, nil +} + +// IsFollowing checks if "user" is following "target". Passing the empty +// string for "user" will check if the authenticated user is following "target". +// +// GitHub API docs: https://docs.github.com/en/rest/users/followers#check-if-a-person-is-followed-by-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/followers#check-if-a-user-follows-another-user +func (s *UsersService) IsFollowing(ctx context.Context, user, target string) (bool, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/following/%v", user, target) + } else { + u = fmt.Sprintf("user/following/%v", target) + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + following, err := parseBoolResponse(err) + return following, resp, err +} + +// Follow will cause the authenticated user to follow the specified user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/followers#follow-a-user +func (s *UsersService) Follow(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("user/following/%v", user) + req, err := s.client.NewRequest("PUT", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Unfollow will cause the authenticated user to unfollow the specified user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/followers#unfollow-a-user +func (s *UsersService) Unfollow(ctx context.Context, user string) (*Response, error) { + u := fmt.Sprintf("user/following/%v", user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_gpg_keys.go b/vendor/github.com/google/go-github/v56/github/users_gpg_keys.go new file mode 100644 index 000000000..54189b830 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_gpg_keys.go @@ -0,0 +1,129 @@ +// Copyright 2016 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// GPGKey represents a GitHub user's public GPG key used to verify GPG signed commits and tags. +// +// https://developer.github.com/changes/2016-04-04-git-signing-api-preview/ +type GPGKey struct { + ID *int64 `json:"id,omitempty"` + PrimaryKeyID *int64 `json:"primary_key_id,omitempty"` + KeyID *string `json:"key_id,omitempty"` + RawKey *string `json:"raw_key,omitempty"` + PublicKey *string `json:"public_key,omitempty"` + Emails []*GPGEmail `json:"emails,omitempty"` + Subkeys []*GPGKey `json:"subkeys,omitempty"` + CanSign *bool `json:"can_sign,omitempty"` + CanEncryptComms *bool `json:"can_encrypt_comms,omitempty"` + CanEncryptStorage *bool `json:"can_encrypt_storage,omitempty"` + CanCertify *bool `json:"can_certify,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + ExpiresAt *Timestamp `json:"expires_at,omitempty"` +} + +// String stringifies a GPGKey. +func (k GPGKey) String() string { + return Stringify(k) +} + +// GPGEmail represents an email address associated to a GPG key. +type GPGEmail struct { + Email *string `json:"email,omitempty"` + Verified *bool `json:"verified,omitempty"` +} + +// ListGPGKeys lists the public GPG keys for a user. Passing the empty +// string will fetch keys for the authenticated user. It requires authentication +// via Basic Auth or via OAuth with at least read:gpg_key scope. +// +// GitHub API docs: https://docs.github.com/en/rest/users/gpg-keys#list-gpg-keys-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/gpg-keys#list-gpg-keys-for-a-user +func (s *UsersService) ListGPGKeys(ctx context.Context, user string, opts *ListOptions) ([]*GPGKey, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/gpg_keys", user) + } else { + u = "user/gpg_keys" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var keys []*GPGKey + resp, err := s.client.Do(ctx, req, &keys) + if err != nil { + return nil, resp, err + } + + return keys, resp, nil +} + +// GetGPGKey gets extended details for a single GPG key. It requires authentication +// via Basic Auth or via OAuth with at least read:gpg_key scope. +// +// GitHub API docs: https://docs.github.com/en/rest/users/gpg-keys#get-a-gpg-key-for-the-authenticated-user +func (s *UsersService) GetGPGKey(ctx context.Context, id int64) (*GPGKey, *Response, error) { + u := fmt.Sprintf("user/gpg_keys/%v", id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + key := &GPGKey{} + resp, err := s.client.Do(ctx, req, key) + if err != nil { + return nil, resp, err + } + + return key, resp, nil +} + +// CreateGPGKey creates a GPG key. It requires authenticatation via Basic Auth +// or OAuth with at least write:gpg_key scope. +// +// GitHub API docs: https://docs.github.com/en/rest/users/gpg-keys#create-a-gpg-key +func (s *UsersService) CreateGPGKey(ctx context.Context, armoredPublicKey string) (*GPGKey, *Response, error) { + gpgKey := &struct { + ArmoredPublicKey string `json:"armored_public_key"` + }{ArmoredPublicKey: armoredPublicKey} + req, err := s.client.NewRequest("POST", "user/gpg_keys", gpgKey) + if err != nil { + return nil, nil, err + } + + key := &GPGKey{} + resp, err := s.client.Do(ctx, req, key) + if err != nil { + return nil, resp, err + } + + return key, resp, nil +} + +// DeleteGPGKey deletes a GPG key. It requires authentication via Basic Auth or +// via OAuth with at least admin:gpg_key scope. +// +// GitHub API docs: https://docs.github.com/en/rest/users/gpg-keys#delete-a-gpg-key-for-the-authenticated-user +func (s *UsersService) DeleteGPGKey(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("user/gpg_keys/%v", id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_keys.go b/vendor/github.com/google/go-github/v56/github/users_keys.go new file mode 100644 index 000000000..b49b8e4b4 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_keys.go @@ -0,0 +1,113 @@ +// Copyright 2013 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// Key represents a public SSH key used to authenticate a user or deploy script. +type Key struct { + ID *int64 `json:"id,omitempty"` + Key *string `json:"key,omitempty"` + URL *string `json:"url,omitempty"` + Title *string `json:"title,omitempty"` + ReadOnly *bool `json:"read_only,omitempty"` + Verified *bool `json:"verified,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + AddedBy *string `json:"added_by,omitempty"` + LastUsed *Timestamp `json:"last_used,omitempty"` +} + +func (k Key) String() string { + return Stringify(k) +} + +// ListKeys lists the verified public keys for a user. Passing the empty +// string will fetch keys for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/keys#list-public-ssh-keys-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/keys#list-public-keys-for-a-user +func (s *UsersService) ListKeys(ctx context.Context, user string, opts *ListOptions) ([]*Key, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/keys", user) + } else { + u = "user/keys" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var keys []*Key + resp, err := s.client.Do(ctx, req, &keys) + if err != nil { + return nil, resp, err + } + + return keys, resp, nil +} + +// GetKey fetches a single public key. +// +// GitHub API docs: https://docs.github.com/en/rest/users/keys#get-a-public-ssh-key-for-the-authenticated-user +func (s *UsersService) GetKey(ctx context.Context, id int64) (*Key, *Response, error) { + u := fmt.Sprintf("user/keys/%v", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + key := new(Key) + resp, err := s.client.Do(ctx, req, key) + if err != nil { + return nil, resp, err + } + + return key, resp, nil +} + +// CreateKey adds a public key for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/keys#create-a-public-ssh-key-for-the-authenticated-user +func (s *UsersService) CreateKey(ctx context.Context, key *Key) (*Key, *Response, error) { + u := "user/keys" + + req, err := s.client.NewRequest("POST", u, key) + if err != nil { + return nil, nil, err + } + + k := new(Key) + resp, err := s.client.Do(ctx, req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, nil +} + +// DeleteKey deletes a public key. +// +// GitHub API docs: https://docs.github.com/en/rest/users/keys#delete-a-public-ssh-key-for-the-authenticated-user +func (s *UsersService) DeleteKey(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("user/keys/%v", id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_packages.go b/vendor/github.com/google/go-github/v56/github/users_packages.go new file mode 100644 index 000000000..da04919ec --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_packages.go @@ -0,0 +1,211 @@ +// Copyright 2021 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// List the packages for a user. Passing the empty string for "user" will +// list packages for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#list-packages-for-the-authenticated-users-namespace +// GitHub API docs: https://docs.github.com/en/rest/packages#list-packages-for-a-user +func (s *UsersService) ListPackages(ctx context.Context, user string, opts *PackageListOptions) ([]*Package, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages", user) + } else { + u = "user/packages" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var packages []*Package + resp, err := s.client.Do(ctx, req, &packages) + if err != nil { + return nil, resp, err + } + + return packages, resp, nil +} + +// Get a package by name for a user. Passing the empty string for "user" will +// get the package for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-for-a-user +func (s *UsersService) GetPackage(ctx context.Context, user, packageType, packageName string) (*Package, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) + } else { + u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pack *Package + resp, err := s.client.Do(ctx, req, &pack) + if err != nil { + return nil, resp, err + } + + return pack, resp, nil +} + +// Delete a package from a user. Passing the empty string for "user" will +// delete the package for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-a-package-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-a-package-for-a-user +func (s *UsersService) DeletePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v", user, packageType, packageName) + } else { + u = fmt.Sprintf("user/packages/%v/%v", packageType, packageName) + } + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Restore a package to a user. Passing the empty string for "user" will +// restore the package for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-a-package-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-a-package-for-a-user +func (s *UsersService) RestorePackage(ctx context.Context, user, packageType, packageName string) (*Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v/restore", user, packageType, packageName) + } else { + u = fmt.Sprintf("user/packages/%v/%v/restore", packageType, packageName) + } + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Get all versions of a package for a user. Passing the empty string for "user" will +// get versions for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-a-user +func (s *UsersService) PackageGetAllVersions(ctx context.Context, user, packageType, packageName string, opts *PackageListOptions) ([]*PackageVersion, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v/versions", user, packageType, packageName) + } else { + u = fmt.Sprintf("user/packages/%v/%v/versions", packageType, packageName) + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var versions []*PackageVersion + resp, err := s.client.Do(ctx, req, &versions) + if err != nil { + return nil, resp, err + } + + return versions, resp, nil +} + +// Get a specific version of a package for a user. Passing the empty string for "user" will +// get the version for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-version-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#get-a-package-version-for-a-user +func (s *UsersService) PackageGetVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*PackageVersion, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) + } else { + u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var version *PackageVersion + resp, err := s.client.Do(ctx, req, &version) + if err != nil { + return nil, resp, err + } + + return version, resp, nil +} + +// Delete a package version for a user. Passing the empty string for "user" will +// delete the version for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#delete-package-version-for-a-user +func (s *UsersService) PackageDeleteVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v", user, packageType, packageName, packageVersionID) + } else { + u = fmt.Sprintf("user/packages/%v/%v/versions/%v", packageType, packageName, packageVersionID) + } + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// Restore a package version to a user. Passing the empty string for "user" will +// restore the version for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-a-package-version-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/packages#restore-package-version-for-a-user +func (s *UsersService) PackageRestoreVersion(ctx context.Context, user, packageType, packageName string, packageVersionID int64) (*Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/packages/%v/%v/versions/%v/restore", user, packageType, packageName, packageVersionID) + } else { + u = fmt.Sprintf("user/packages/%v/%v/versions/%v/restore", packageType, packageName, packageVersionID) + } + + req, err := s.client.NewRequest("POST", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/users_projects.go b/vendor/github.com/google/go-github/v56/github/users_projects.go new file mode 100644 index 000000000..0cbd61f92 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_projects.go @@ -0,0 +1,68 @@ +// Copyright 2019 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// ListProjects lists the projects for the specified user. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#list-user-projects +func (s *UsersService) ListProjects(ctx context.Context, user string, opts *ProjectListOptions) ([]*Project, *Response, error) { + u := fmt.Sprintf("users/%v/projects", user) + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + var projects []*Project + resp, err := s.client.Do(ctx, req, &projects) + if err != nil { + return nil, resp, err + } + + return projects, resp, nil +} + +// CreateUserProjectOptions specifies the parameters to the UsersService.CreateProject method. +type CreateUserProjectOptions struct { + // The name of the project. (Required.) + Name string `json:"name"` + // The description of the project. (Optional.) + Body *string `json:"body,omitempty"` +} + +// CreateProject creates a GitHub Project for the current user. +// +// GitHub API docs: https://docs.github.com/en/rest/projects/projects#create-a-user-project +func (s *UsersService) CreateProject(ctx context.Context, opts *CreateUserProjectOptions) (*Project, *Response, error) { + u := "user/projects" + req, err := s.client.NewRequest("POST", u, opts) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeProjectsPreview) + + project := &Project{} + resp, err := s.client.Do(ctx, req, project) + if err != nil { + return nil, resp, err + } + + return project, resp, nil +} diff --git a/vendor/github.com/google/go-github/v56/github/users_ssh_signing_keys.go b/vendor/github.com/google/go-github/v56/github/users_ssh_signing_keys.go new file mode 100644 index 000000000..567623f88 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/users_ssh_signing_keys.go @@ -0,0 +1,108 @@ +// Copyright 2022 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// SSHSigningKey represents a public SSH key used to sign git commits. +type SSHSigningKey struct { + ID *int64 `json:"id,omitempty"` + Key *string `json:"key,omitempty"` + Title *string `json:"title,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` +} + +func (k SSHSigningKey) String() string { + return Stringify(k) +} + +// ListSSHSigningKeys lists the SSH signing keys for a user. Passing an empty +// username string will fetch SSH signing keys for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-the-authenticated-user +// GitHub API docs: https://docs.github.com/en/rest/users/ssh-signing-keys#list-ssh-signing-keys-for-a-user +func (s *UsersService) ListSSHSigningKeys(ctx context.Context, user string, opts *ListOptions) ([]*SSHSigningKey, *Response, error) { + var u string + if user != "" { + u = fmt.Sprintf("users/%v/ssh_signing_keys", user) + } else { + u = "user/ssh_signing_keys" + } + u, err := addOptions(u, opts) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var keys []*SSHSigningKey + resp, err := s.client.Do(ctx, req, &keys) + if err != nil { + return nil, resp, err + } + + return keys, resp, nil +} + +// GetSSHSigningKey fetches a single SSH signing key for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/ssh-signing-keys#get-an-ssh-signing-key-for-the-authenticated-user +func (s *UsersService) GetSSHSigningKey(ctx context.Context, id int64) (*SSHSigningKey, *Response, error) { + u := fmt.Sprintf("user/ssh_signing_keys/%v", id) + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + key := new(SSHSigningKey) + resp, err := s.client.Do(ctx, req, key) + if err != nil { + return nil, resp, err + } + + return key, resp, nil +} + +// CreateSSHSigningKey adds a SSH signing key for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/ssh-signing-keys#create-a-ssh-signing-key-for-the-authenticated-user +func (s *UsersService) CreateSSHSigningKey(ctx context.Context, key *Key) (*SSHSigningKey, *Response, error) { + u := "user/ssh_signing_keys" + + req, err := s.client.NewRequest("POST", u, key) + if err != nil { + return nil, nil, err + } + + k := new(SSHSigningKey) + resp, err := s.client.Do(ctx, req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, nil +} + +// DeleteKey deletes a SSH signing key for the authenticated user. +// +// GitHub API docs: https://docs.github.com/en/rest/users/ssh-signing-keys#delete-an-ssh-signing-key-for-the-authenticated-user +func (s *UsersService) DeleteSSHSigningKey(ctx context.Context, id int64) (*Response, error) { + u := fmt.Sprintf("user/ssh_signing_keys/%v", id) + + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/v56/github/with_appengine.go b/vendor/github.com/google/go-github/v56/github/with_appengine.go new file mode 100644 index 000000000..9053ce105 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/with_appengine.go @@ -0,0 +1,21 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build appengine +// +build appengine + +// This file provides glue for making github work on App Engine. + +package github + +import ( + "context" + "net/http" +) + +func withContext(ctx context.Context, req *http.Request) *http.Request { + // No-op because App Engine adds context to a request differently. + return req +} diff --git a/vendor/github.com/google/go-github/v56/github/without_appengine.go b/vendor/github.com/google/go-github/v56/github/without_appengine.go new file mode 100644 index 000000000..0024ae415 --- /dev/null +++ b/vendor/github.com/google/go-github/v56/github/without_appengine.go @@ -0,0 +1,20 @@ +// Copyright 2017 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !appengine +// +build !appengine + +// This file provides glue for making github work without App Engine. + +package github + +import ( + "context" + "net/http" +) + +func withContext(ctx context.Context, req *http.Request) *http.Request { + return req.WithContext(ctx) +} diff --git a/vendor/github.com/google/uuid/.travis.yml b/vendor/github.com/google/uuid/.travis.yml deleted file mode 100644 index d8156a60b..000000000 --- a/vendor/github.com/google/uuid/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go - -go: - - 1.4.3 - - 1.5.3 - - tip - -script: - - go test -v ./... diff --git a/vendor/github.com/google/uuid/CHANGELOG.md b/vendor/github.com/google/uuid/CHANGELOG.md new file mode 100644 index 000000000..2bd78667a --- /dev/null +++ b/vendor/github.com/google/uuid/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## [1.3.1](https://github.com/google/uuid/compare/v1.3.0...v1.3.1) (2023-08-18) + + +### Bug Fixes + +* Use .EqualFold() to parse urn prefixed UUIDs ([#118](https://github.com/google/uuid/issues/118)) ([574e687](https://github.com/google/uuid/commit/574e6874943741fb99d41764c705173ada5293f0)) + +## Changelog diff --git a/vendor/github.com/google/uuid/CONTRIBUTING.md b/vendor/github.com/google/uuid/CONTRIBUTING.md index 04fdf09f1..556688872 100644 --- a/vendor/github.com/google/uuid/CONTRIBUTING.md +++ b/vendor/github.com/google/uuid/CONTRIBUTING.md @@ -2,6 +2,22 @@ We definitely welcome patches and contribution to this project! +### Tips + +Commits must be formatted according to the [Conventional Commits Specification](https://www.conventionalcommits.org). + +Always try to include a test case! If it is not possible or not necessary, +please explain why in the pull request description. + +### Releasing + +Commits that would precipitate a SemVer change, as desrcibed in the Conventional +Commits Specification, will trigger [`release-please`](https://github.com/google-github-actions/release-please-action) +to create a release candidate pull request. Once submitted, `release-please` +will create a release. + +For tips on how to work with `release-please`, see its documentation. + ### Legal requirements In order to protect both you and ourselves, you will need to sign the diff --git a/vendor/github.com/google/uuid/README.md b/vendor/github.com/google/uuid/README.md index f765a46f9..3e9a61889 100644 --- a/vendor/github.com/google/uuid/README.md +++ b/vendor/github.com/google/uuid/README.md @@ -1,6 +1,6 @@ -# uuid ![build status](https://travis-ci.org/google/uuid.svg?branch=master) +# uuid The uuid package generates and inspects UUIDs based on -[RFC 4122](http://tools.ietf.org/html/rfc4122) +[RFC 4122](https://datatracker.ietf.org/doc/html/rfc4122) and DCE 1.1: Authentication and Security Services. This package is based on the github.com/pborman/uuid package (previously named @@ -9,10 +9,12 @@ a UUID is a 16 byte array rather than a byte slice. One loss due to this change is the ability to represent an invalid UUID (vs a NIL UUID). ###### Install -`go get github.com/google/uuid` +```sh +go get github.com/google/uuid +``` ###### Documentation -[![GoDoc](https://godoc.org/github.com/google/uuid?status.svg)](http://godoc.org/github.com/google/uuid) +[![Go Reference](https://pkg.go.dev/badge/github.com/google/uuid.svg)](https://pkg.go.dev/github.com/google/uuid) Full `go doc` style documentation for the package can be viewed online without installing this package by using the GoDoc site here: diff --git a/vendor/github.com/google/uuid/node_js.go b/vendor/github.com/google/uuid/node_js.go index 24b78edc9..b2a0bc871 100644 --- a/vendor/github.com/google/uuid/node_js.go +++ b/vendor/github.com/google/uuid/node_js.go @@ -7,6 +7,6 @@ package uuid // getHardwareInterface returns nil values for the JS version of the code. -// This remvoves the "net" dependency, because it is not used in the browser. +// This removes the "net" dependency, because it is not used in the browser. // Using the "net" library inflates the size of the transpiled JS code by 673k bytes. func getHardwareInterface(name string) (string, []byte) { return "", nil } diff --git a/vendor/github.com/google/uuid/uuid.go b/vendor/github.com/google/uuid/uuid.go index a57207aeb..a56138cc4 100644 --- a/vendor/github.com/google/uuid/uuid.go +++ b/vendor/github.com/google/uuid/uuid.go @@ -69,7 +69,7 @@ func Parse(s string) (UUID, error) { // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx case 36 + 9: - if strings.ToLower(s[:9]) != "urn:uuid:" { + if !strings.EqualFold(s[:9], "urn:uuid:") { return uuid, fmt.Errorf("invalid urn prefix: %q", s[:9]) } s = s[9:] @@ -101,7 +101,8 @@ func Parse(s string) (UUID, error) { 9, 11, 14, 16, 19, 21, - 24, 26, 28, 30, 32, 34} { + 24, 26, 28, 30, 32, 34, + } { v, ok := xtob(s[x], s[x+1]) if !ok { return uuid, errors.New("invalid UUID format") @@ -117,7 +118,7 @@ func ParseBytes(b []byte) (UUID, error) { switch len(b) { case 36: // xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx case 36 + 9: // urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - if !bytes.Equal(bytes.ToLower(b[:9]), []byte("urn:uuid:")) { + if !bytes.EqualFold(b[:9], []byte("urn:uuid:")) { return uuid, fmt.Errorf("invalid urn prefix: %q", b[:9]) } b = b[9:] @@ -145,7 +146,8 @@ func ParseBytes(b []byte) (UUID, error) { 9, 11, 14, 16, 19, 21, - 24, 26, 28, 30, 32, 34} { + 24, 26, 28, 30, 32, 34, + } { v, ok := xtob(b[x], b[x+1]) if !ok { return uuid, errors.New("invalid UUID format") diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/fieldmask.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/fieldmask.go index a03dd166b..19d9d37ff 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/fieldmask.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/fieldmask.go @@ -27,7 +27,7 @@ func FieldMaskFromRequestBody(r io.Reader, msg proto.Message) (*field_mask.Field var root interface{} if err := json.NewDecoder(r).Decode(&root); err != nil { - if err == io.EOF { + if errors.Is(err, io.EOF) { return fm, nil } return nil, err diff --git a/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/handler.go b/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/handler.go index 945f3a5eb..305b1458b 100644 --- a/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/handler.go +++ b/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/handler.go @@ -2,6 +2,7 @@ package runtime import ( "context" + "errors" "fmt" "io" "net/http" @@ -48,7 +49,7 @@ func ForwardResponseStream(ctx context.Context, mux *ServeMux, marshaler Marshal var wroteHeader bool for { resp, err := recv() - if err == io.EOF { + if errors.Is(err, io.EOF) { return } if err != nil { diff --git a/vendor/github.com/hashicorp/golang-lru/README.md b/vendor/github.com/hashicorp/golang-lru/README.md index 063bb1605..03bcfb5b7 100644 --- a/vendor/github.com/hashicorp/golang-lru/README.md +++ b/vendor/github.com/hashicorp/golang-lru/README.md @@ -1,25 +1,7 @@ golang-lru ========== -This provides the `lru` package which implements a fixed-size -thread safe LRU cache. It is based on the cache in Groupcache. - -Documentation -============= - -Full docs are available on [Godoc](https://pkg.go.dev/github.com/hashicorp/golang-lru) - -Example -======= - -Using the LRU is very simple: - -```go -l, _ := New(128) -for i := 0; i < 256; i++ { - l.Add(i, nil) -} -if l.Len() != 128 { - panic(fmt.Sprintf("bad len: %v", l.Len())) -} -``` +Please upgrade to github.com/hashicorp/golang-lru/v2 for all new code as v1 will +not be updated anymore. The v2 version supports generics and is faster; old code +can specify a specific tag, e.g. github.com/hashicorp/golang-lru/v1.0.2 for +backwards compatibility. diff --git a/vendor/github.com/ktrysmt/go-bitbucket/bitbucket.go b/vendor/github.com/ktrysmt/go-bitbucket/bitbucket.go index 8f13b1152..74fb07c2f 100644 --- a/vendor/github.com/ktrysmt/go-bitbucket/bitbucket.go +++ b/vendor/github.com/ktrysmt/go-bitbucket/bitbucket.go @@ -191,12 +191,18 @@ type RepositoryBlobOptions struct { Path string `json:"path"` } +type File struct { + Path string + Name string +} + // Based on https://developer.atlassian.com/bitbucket/api/2/reference/resource/repositories/%7Bworkspace%7D/%7Brepo_slug%7D/src#post type RepositoryBlobWriteOptions struct { Owner string `json:"owner"` RepoSlug string `json:"repo_slug"` FilePath string `json:"filepath"` FileName string `json:"filename"` + Files []File `json:"files"` Author string `json:"author"` Message string `json:"message"` Branch string `json:"branch"` @@ -455,6 +461,7 @@ type DownloadsOptions struct { RepoSlug string `json:"repo_slug"` FilePath string `json:"filepath"` FileName string `json:"filename"` + Files []File `json:"files"` } type PageRes struct { diff --git a/vendor/github.com/ktrysmt/go-bitbucket/client.go b/vendor/github.com/ktrysmt/go-bitbucket/client.go index 211341e60..2d8f6ddfc 100644 --- a/vendor/github.com/ktrysmt/go-bitbucket/client.go +++ b/vendor/github.com/ktrysmt/go-bitbucket/client.go @@ -279,28 +279,30 @@ func (c *Client) executePaginated(method string, urlStr string, text string, pag return result, nil } -func (c *Client) executeFileUpload(method string, urlStr string, filePath string, fileName string, fieldname string, params map[string]string) (interface{}, error) { - fileReader, err := os.Open(filePath) - if err != nil { - return nil, err - } - defer fileReader.Close() - +func (c *Client) executeFileUpload(method string, urlStr string, files []File, params map[string]string) (interface{}, error) { // Prepare a form that you will submit to that URL. var b bytes.Buffer w := multipart.NewWriter(&b) var fw io.Writer - if fw, err = w.CreateFormFile(fieldname, fileName); err != nil { - return nil, err - } + for _, file := range files { + fileReader, err := os.Open(file.Path) + if err != nil { + return nil, err + } + defer fileReader.Close() - if _, err = io.Copy(fw, fileReader); err != nil { - return nil, err + if fw, err = w.CreateFormFile(file.Name, file.Name); err != nil { + return nil, err + } + + if _, err = io.Copy(fw, fileReader); err != nil { + return nil, err + } } for key, value := range params { - err = w.WriteField(key, value) + err := w.WriteField(key, value) if err != nil { return nil, err } diff --git a/vendor/github.com/ktrysmt/go-bitbucket/downloads.go b/vendor/github.com/ktrysmt/go-bitbucket/downloads.go index b8b9939e0..468fc7ce3 100644 --- a/vendor/github.com/ktrysmt/go-bitbucket/downloads.go +++ b/vendor/github.com/ktrysmt/go-bitbucket/downloads.go @@ -1,12 +1,24 @@ package bitbucket +import "fmt" + type Downloads struct { c *Client } func (dl *Downloads) Create(do *DownloadsOptions) (interface{}, error) { urlStr := dl.c.requestUrl("/repositories/%s/%s/downloads", do.Owner, do.RepoSlug) - return dl.c.executeFileUpload("POST", urlStr, do.FilePath, do.FileName, "files", make(map[string]string)) + + if do.FileName != "" { + if len(do.Files) > 0 { + return nil, fmt.Errorf("can't specify both files and filename") + } + do.Files = []File{{ + Path: do.FileName, + Name: do.FileName, + }} + } + return dl.c.executeFileUpload("POST", urlStr, do.Files, make(map[string]string)) } func (dl *Downloads) List(do *DownloadsOptions) (interface{}, error) { diff --git a/vendor/github.com/ktrysmt/go-bitbucket/repository.go b/vendor/github.com/ktrysmt/go-bitbucket/repository.go index 44a85d9a7..757e5b0c2 100644 --- a/vendor/github.com/ktrysmt/go-bitbucket/repository.go +++ b/vendor/github.com/ktrysmt/go-bitbucket/repository.go @@ -387,9 +387,19 @@ func (r *Repository) WriteFileBlob(ro *RepositoryBlobWriteOptions) error { m["branch"] = ro.Branch } + if ro.FileName != "" { + if len(ro.Files) > 0 { + return fmt.Errorf("can't specify both files and filename") + } + ro.Files = []File{{ + Path: ro.FileName, + Name: ro.FileName, + }} + } + urlStr := r.c.requestUrl("/repositories/%s/%s/src", ro.Owner, ro.RepoSlug) - _, err := r.c.executeFileUpload("POST", urlStr, ro.FilePath, ro.FileName, ro.FileName, m) + _, err := r.c.executeFileUpload("POST", urlStr, ro.Files, m) return err } diff --git a/vendor/github.com/openzipkin/zipkin-go/.golangci.yml b/vendor/github.com/openzipkin/zipkin-go/.golangci.yml index 3d615b8b0..e990f027f 100644 --- a/vendor/github.com/openzipkin/zipkin-go/.golangci.yml +++ b/vendor/github.com/openzipkin/zipkin-go/.golangci.yml @@ -16,9 +16,8 @@ linters: - lll - misspell - nakedret - - structcheck - unparam - - varcheck + - unused linters-settings: dupl: diff --git a/vendor/github.com/openzipkin/zipkin-go/README.md b/vendor/github.com/openzipkin/zipkin-go/README.md index 68e7081e1..05000f80a 100644 --- a/vendor/github.com/openzipkin/zipkin-go/README.md +++ b/vendor/github.com/openzipkin/zipkin-go/README.md @@ -109,7 +109,7 @@ backend asynchronously. #### Kafka Reporter High performance Reporter transporting Spans to the Zipkin server using a Kafka Producer digesting JSON V2 Spans. The reporter uses the -[Sarama async producer](https://godoc.org/github.com/Shopify/sarama#AsyncProducer) +[Sarama async producer](https://pkg.go.dev/github.com/IBM/sarama#AsyncProducer) underneath. ## usage and examples diff --git a/vendor/github.com/prometheus/client_golang/prometheus/counter.go b/vendor/github.com/prometheus/client_golang/prometheus/counter.go index 62de4dc59..4ce84e7a8 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/counter.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/counter.go @@ -20,6 +20,7 @@ import ( "time" dto "github.com/prometheus/client_model/go" + "google.golang.org/protobuf/types/known/timestamppb" ) // Counter is a Metric that represents a single numerical value that only ever @@ -66,7 +67,7 @@ type CounterVecOpts struct { CounterOpts // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Contraint + // of labels. Each label value will be constrained with the optional Constraint // function, if provided. VariableLabels ConstrainableLabels } @@ -90,8 +91,12 @@ func NewCounter(opts CounterOpts) Counter { nil, opts.ConstLabels, ) - result := &counter{desc: desc, labelPairs: desc.constLabelPairs, now: time.Now} + if opts.now == nil { + opts.now = time.Now + } + result := &counter{desc: desc, labelPairs: desc.constLabelPairs, now: opts.now} result.init(result) // Init self-collection. + result.createdTs = timestamppb.New(opts.now()) return result } @@ -106,10 +111,12 @@ type counter struct { selfCollector desc *Desc + createdTs *timestamppb.Timestamp labelPairs []*dto.LabelPair exemplar atomic.Value // Containing nil or a *dto.Exemplar. - now func() time.Time // To mock out time.Now() for testing. + // now is for testing purposes, by default it's time.Now. + now func() time.Time } func (c *counter) Desc() *Desc { @@ -159,8 +166,7 @@ func (c *counter) Write(out *dto.Metric) error { exemplar = e.(*dto.Exemplar) } val := c.get() - - return populateMetric(CounterValue, val, c.labelPairs, exemplar, out) + return populateMetric(CounterValue, val, c.labelPairs, exemplar, out, c.createdTs) } func (c *counter) updateExemplar(v float64, l Labels) { @@ -200,13 +206,17 @@ func (v2) NewCounterVec(opts CounterVecOpts) *CounterVec { opts.VariableLabels, opts.ConstLabels, ) + if opts.now == nil { + opts.now = time.Now + } return &CounterVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - if len(lvs) != len(desc.variableLabels) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), lvs)) + if len(lvs) != len(desc.variableLabels.names) { + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, lvs)) } - result := &counter{desc: desc, labelPairs: MakeLabelPairs(desc, lvs), now: time.Now} + result := &counter{desc: desc, labelPairs: MakeLabelPairs(desc, lvs), now: opts.now} result.init(result) // Init self-collection. + result.createdTs = timestamppb.New(opts.now()) return result }), } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/desc.go b/vendor/github.com/prometheus/client_golang/prometheus/desc.go index deedc2dfb..68ffe3c24 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/desc.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/desc.go @@ -52,7 +52,7 @@ type Desc struct { constLabelPairs []*dto.LabelPair // variableLabels contains names of labels and normalization function for // which the metric maintains variable values. - variableLabels ConstrainedLabels + variableLabels *compiledLabels // id is a hash of the values of the ConstLabels and fqName. This // must be unique among all registered descriptors and can therefore be // used as an identifier of the descriptor. @@ -93,7 +93,7 @@ func (v2) NewDesc(fqName, help string, variableLabels ConstrainableLabels, const d := &Desc{ fqName: fqName, help: help, - variableLabels: variableLabels.constrainedLabels(), + variableLabels: variableLabels.compile(), } if !model.IsValidMetricName(model.LabelValue(fqName)) { d.err = fmt.Errorf("%q is not a valid metric name", fqName) @@ -103,7 +103,7 @@ func (v2) NewDesc(fqName, help string, variableLabels ConstrainableLabels, const // their sorted label names) plus the fqName (at position 0). labelValues := make([]string, 1, len(constLabels)+1) labelValues[0] = fqName - labelNames := make([]string, 0, len(constLabels)+len(d.variableLabels)) + labelNames := make([]string, 0, len(constLabels)+len(d.variableLabels.names)) labelNameSet := map[string]struct{}{} // First add only the const label names and sort them... for labelName := range constLabels { @@ -128,13 +128,13 @@ func (v2) NewDesc(fqName, help string, variableLabels ConstrainableLabels, const // Now add the variable label names, but prefix them with something that // cannot be in a regular label name. That prevents matching the label // dimension with a different mix between preset and variable labels. - for _, label := range d.variableLabels { - if !checkLabelName(label.Name) { - d.err = fmt.Errorf("%q is not a valid label name for metric %q", label.Name, fqName) + for _, label := range d.variableLabels.names { + if !checkLabelName(label) { + d.err = fmt.Errorf("%q is not a valid label name for metric %q", label, fqName) return d } - labelNames = append(labelNames, "$"+label.Name) - labelNameSet[label.Name] = struct{}{} + labelNames = append(labelNames, "$"+label) + labelNameSet[label] = struct{}{} } if len(labelNames) != len(labelNameSet) { d.err = fmt.Errorf("duplicate label names in constant and variable labels for metric %q", fqName) @@ -189,11 +189,19 @@ func (d *Desc) String() string { fmt.Sprintf("%s=%q", lp.GetName(), lp.GetValue()), ) } + vlStrings := make([]string, 0, len(d.variableLabels.names)) + for _, vl := range d.variableLabels.names { + if fn, ok := d.variableLabels.labelConstraints[vl]; ok && fn != nil { + vlStrings = append(vlStrings, fmt.Sprintf("c(%s)", vl)) + } else { + vlStrings = append(vlStrings, vl) + } + } return fmt.Sprintf( - "Desc{fqName: %q, help: %q, constLabels: {%s}, variableLabels: %v}", + "Desc{fqName: %q, help: %q, constLabels: {%s}, variableLabels: {%s}}", d.fqName, d.help, strings.Join(lpStrings, ","), - d.variableLabels, + strings.Join(vlStrings, ","), ) } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go index c41ab37f3..de5a85629 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go @@ -48,7 +48,7 @@ func (e *expvarCollector) Collect(ch chan<- Metric) { continue } var v interface{} - labels := make([]string, len(desc.variableLabels)) + labels := make([]string, len(desc.variableLabels.names)) if err := json.Unmarshal([]byte(expVar.String()), &v); err != nil { ch <- NewInvalidMetric(desc, err) continue diff --git a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go index f1ea6c76f..dd2eac940 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go @@ -62,7 +62,7 @@ type GaugeVecOpts struct { GaugeOpts // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Contraint + // of labels. Each label value will be constrained with the optional Constraint // function, if provided. VariableLabels ConstrainableLabels } @@ -135,7 +135,7 @@ func (g *gauge) Sub(val float64) { func (g *gauge) Write(out *dto.Metric) error { val := math.Float64frombits(atomic.LoadUint64(&g.valBits)) - return populateMetric(GaugeValue, val, g.labelPairs, nil, out) + return populateMetric(GaugeValue, val, g.labelPairs, nil, out, nil) } // GaugeVec is a Collector that bundles a set of Gauges that all share the same @@ -166,8 +166,8 @@ func (v2) NewGaugeVec(opts GaugeVecOpts) *GaugeVec { ) return &GaugeVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - if len(lvs) != len(desc.variableLabels) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), lvs)) + if len(lvs) != len(desc.variableLabels.names) { + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, lvs)) } result := &gauge{desc: desc, labelPairs: MakeLabelPairs(desc, lvs)} result.init(result) // Init self-collection. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go index 8d818afe9..1feba62c6 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go @@ -25,6 +25,7 @@ import ( dto "github.com/prometheus/client_model/go" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" ) // nativeHistogramBounds for the frac of observed values. Only relevant for @@ -391,7 +392,7 @@ type HistogramOpts struct { // zero, it is replaced by default buckets. The default buckets are // DefBuckets if no buckets for a native histogram (see below) are used, // otherwise the default is no buckets. (In other words, if you want to - // use both reguler buckets and buckets for a native histogram, you have + // use both regular buckets and buckets for a native histogram, you have // to define the regular buckets here explicitly.) Buckets []float64 @@ -413,8 +414,8 @@ type HistogramOpts struct { // and 2, same as between 2 and 4, and 4 and 8, etc.). // // Details about the actually used factor: The factor is calculated as - // 2^(2^n), where n is an integer number between (and including) -8 and - // 4. n is chosen so that the resulting factor is the largest that is + // 2^(2^-n), where n is an integer number between (and including) -4 and + // 8. n is chosen so that the resulting factor is the largest that is // still smaller or equal to NativeHistogramBucketFactor. Note that the // smallest possible factor is therefore approx. 1.00271 (i.e. 2^(2^-8) // ). If NativeHistogramBucketFactor is greater than 1 but smaller than @@ -428,12 +429,12 @@ type HistogramOpts struct { // a major version bump. NativeHistogramBucketFactor float64 // All observations with an absolute value of less or equal - // NativeHistogramZeroThreshold are accumulated into a “zero” - // bucket. For best results, this should be close to a bucket - // boundary. This is usually the case if picking a power of two. If + // NativeHistogramZeroThreshold are accumulated into a “zero” bucket. + // For best results, this should be close to a bucket boundary. This is + // usually the case if picking a power of two. If // NativeHistogramZeroThreshold is left at zero, - // DefNativeHistogramZeroThreshold is used as the threshold. To configure - // a zero bucket with an actual threshold of zero (i.e. only + // DefNativeHistogramZeroThreshold is used as the threshold. To + // configure a zero bucket with an actual threshold of zero (i.e. only // observations of precisely zero will go into the zero bucket), set // NativeHistogramZeroThreshold to the NativeHistogramZeroThresholdZero // constant (or any negative float value). @@ -446,26 +447,34 @@ type HistogramOpts struct { // Histogram are sufficiently wide-spread. In particular, this could be // used as a DoS attack vector. Where the observed values depend on // external inputs, it is highly recommended to set a - // NativeHistogramMaxBucketNumber.) Once the set + // NativeHistogramMaxBucketNumber.) Once the set // NativeHistogramMaxBucketNumber is exceeded, the following strategy is - // enacted: First, if the last reset (or the creation) of the histogram - // is at least NativeHistogramMinResetDuration ago, then the whole - // histogram is reset to its initial state (including regular - // buckets). If less time has passed, or if - // NativeHistogramMinResetDuration is zero, no reset is - // performed. Instead, the zero threshold is increased sufficiently to - // reduce the number of buckets to or below - // NativeHistogramMaxBucketNumber, but not to more than - // NativeHistogramMaxZeroThreshold. Thus, if - // NativeHistogramMaxZeroThreshold is already at or below the current - // zero threshold, nothing happens at this step. After that, if the - // number of buckets still exceeds NativeHistogramMaxBucketNumber, the - // resolution of the histogram is reduced by doubling the width of the - // sparse buckets (up to a growth factor between one bucket to the next - // of 2^(2^4) = 65536, see above). + // enacted: + // - First, if the last reset (or the creation) of the histogram is at + // least NativeHistogramMinResetDuration ago, then the whole + // histogram is reset to its initial state (including regular + // buckets). + // - If less time has passed, or if NativeHistogramMinResetDuration is + // zero, no reset is performed. Instead, the zero threshold is + // increased sufficiently to reduce the number of buckets to or below + // NativeHistogramMaxBucketNumber, but not to more than + // NativeHistogramMaxZeroThreshold. Thus, if + // NativeHistogramMaxZeroThreshold is already at or below the current + // zero threshold, nothing happens at this step. + // - After that, if the number of buckets still exceeds + // NativeHistogramMaxBucketNumber, the resolution of the histogram is + // reduced by doubling the width of the sparse buckets (up to a + // growth factor between one bucket to the next of 2^(2^4) = 65536, + // see above). + // - Any increased zero threshold or reduced resolution is reset back + // to their original values once NativeHistogramMinResetDuration has + // passed (since the last reset or the creation of the histogram). NativeHistogramMaxBucketNumber uint32 NativeHistogramMinResetDuration time.Duration NativeHistogramMaxZeroThreshold float64 + + // now is for testing purposes, by default it's time.Now. + now func() time.Time } // HistogramVecOpts bundles the options to create a HistogramVec metric. @@ -475,7 +484,7 @@ type HistogramVecOpts struct { HistogramOpts // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Contraint + // of labels. Each label value will be constrained with the optional Constraint // function, if provided. VariableLabels ConstrainableLabels } @@ -499,12 +508,12 @@ func NewHistogram(opts HistogramOpts) Histogram { } func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogram { - if len(desc.variableLabels) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), labelValues)) + if len(desc.variableLabels.names) != len(labelValues) { + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, labelValues)) } - for _, n := range desc.variableLabels { - if n.Name == bucketLabel { + for _, n := range desc.variableLabels.names { + if n == bucketLabel { panic(errBucketLabelNotAllowed) } } @@ -514,6 +523,10 @@ func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogr } } + if opts.now == nil { + opts.now = time.Now + } + h := &histogram{ desc: desc, upperBounds: opts.Buckets, @@ -521,8 +534,8 @@ func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogr nativeHistogramMaxBuckets: opts.NativeHistogramMaxBucketNumber, nativeHistogramMaxZeroThreshold: opts.NativeHistogramMaxZeroThreshold, nativeHistogramMinResetDuration: opts.NativeHistogramMinResetDuration, - lastResetTime: time.Now(), - now: time.Now, + lastResetTime: opts.now(), + now: opts.now, } if len(h.upperBounds) == 0 && opts.NativeHistogramBucketFactor <= 1 { h.upperBounds = DefBuckets @@ -701,9 +714,11 @@ type histogram struct { nativeHistogramMaxZeroThreshold float64 nativeHistogramMaxBuckets uint32 nativeHistogramMinResetDuration time.Duration - lastResetTime time.Time // Protected by mtx. + // lastResetTime is protected by mtx. It is also used as created timestamp. + lastResetTime time.Time - now func() time.Time // To mock out time.Now() for testing. + // now is for testing purposes, by default it's time.Now. + now func() time.Time } func (h *histogram) Desc() *Desc { @@ -742,9 +757,10 @@ func (h *histogram) Write(out *dto.Metric) error { waitForCooldown(count, coldCounts) his := &dto.Histogram{ - Bucket: make([]*dto.Bucket, len(h.upperBounds)), - SampleCount: proto.Uint64(count), - SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), + Bucket: make([]*dto.Bucket, len(h.upperBounds)), + SampleCount: proto.Uint64(count), + SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), + CreatedTimestamp: timestamppb.New(h.lastResetTime), } out.Histogram = his out.Label = h.labelPairs @@ -782,6 +798,16 @@ func (h *histogram) Write(out *dto.Metric) error { his.ZeroCount = proto.Uint64(zeroBucket) his.NegativeSpan, his.NegativeDelta = makeBuckets(&coldCounts.nativeHistogramBucketsNegative) his.PositiveSpan, his.PositiveDelta = makeBuckets(&coldCounts.nativeHistogramBucketsPositive) + + // Add a no-op span to a histogram without observations and with + // a zero threshold of zero. Otherwise, a native histogram would + // look like a classic histogram to scrapers. + if *his.ZeroThreshold == 0 && *his.ZeroCount == 0 && len(his.PositiveSpan) == 0 && len(his.NegativeSpan) == 0 { + his.PositiveSpan = []*dto.BucketSpan{{ + Offset: proto.Int32(0), + Length: proto.Uint32(0), + }} + } } addAndResetCounts(hotCounts, coldCounts) return nil @@ -854,20 +880,23 @@ func (h *histogram) limitBuckets(counts *histogramCounts, value float64, bucket h.doubleBucketWidth(hotCounts, coldCounts) } -// maybeReset resests the whole histogram if at least h.nativeHistogramMinResetDuration +// maybeReset resets the whole histogram if at least h.nativeHistogramMinResetDuration // has been passed. It returns true if the histogram has been reset. The caller // must have locked h.mtx. -func (h *histogram) maybeReset(hot, cold *histogramCounts, coldIdx uint64, value float64, bucket int) bool { +func (h *histogram) maybeReset( + hot, cold *histogramCounts, coldIdx uint64, value float64, bucket int, +) bool { // We are using the possibly mocked h.now() rather than // time.Since(h.lastResetTime) to enable testing. - if h.nativeHistogramMinResetDuration == 0 || h.now().Sub(h.lastResetTime) < h.nativeHistogramMinResetDuration { + if h.nativeHistogramMinResetDuration == 0 || + h.now().Sub(h.lastResetTime) < h.nativeHistogramMinResetDuration { return false } // Completely reset coldCounts. h.resetCounts(cold) // Repeat the latest observation to not lose it completely. cold.observe(value, bucket, true) - // Make coldCounts the new hot counts while ressetting countAndHotIdx. + // Make coldCounts the new hot counts while resetting countAndHotIdx. n := atomic.SwapUint64(&h.countAndHotIdx, (coldIdx<<63)+1) count := n & ((1 << 63) - 1) waitForCooldown(count, hot) @@ -1176,6 +1205,7 @@ type constHistogram struct { sum float64 buckets map[float64]uint64 labelPairs []*dto.LabelPair + createdTs *timestamppb.Timestamp } func (h *constHistogram) Desc() *Desc { @@ -1183,7 +1213,9 @@ func (h *constHistogram) Desc() *Desc { } func (h *constHistogram) Write(out *dto.Metric) error { - his := &dto.Histogram{} + his := &dto.Histogram{ + CreatedTimestamp: h.createdTs, + } buckets := make([]*dto.Bucket, 0, len(h.buckets)) @@ -1230,7 +1262,7 @@ func NewConstHistogram( if desc.err != nil { return nil, desc.err } - if err := validateLabelValues(labelValues, len(desc.variableLabels)); err != nil { + if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { return nil, err } return &constHistogram{ @@ -1324,7 +1356,7 @@ func makeBuckets(buckets *sync.Map) ([]*dto.BucketSpan, []int64) { // Multiple spans with only small gaps in between are probably // encoded more efficiently as one larger span with a few empty // buckets. Needs some research to find the sweet spot. For now, - // we assume that gaps of one ore two buckets should not create + // we assume that gaps of one or two buckets should not create // a new span. iDelta := int32(i - nextI) if n == 0 || iDelta > 2 { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go index fd0750f2c..a595a2036 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/internal/difflib.go @@ -14,7 +14,7 @@ // It provides tools to compare sequences of strings and generate textual diffs. // // Maintaining `GetUnifiedDiffString` here because original repository -// (https://github.com/pmezard/go-difflib) is no loger maintained. +// (https://github.com/pmezard/go-difflib) is no longer maintained. package internal import ( diff --git a/vendor/github.com/prometheus/client_golang/prometheus/labels.go b/vendor/github.com/prometheus/client_golang/prometheus/labels.go index 63ff8683c..b3c4eca2b 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/labels.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/labels.go @@ -32,19 +32,15 @@ import ( // create a Desc. type Labels map[string]string +// LabelConstraint normalizes label values. +type LabelConstraint func(string) string + // ConstrainedLabels represents a label name and its constrain function // to normalize label values. This type is commonly used when constructing // metric vector Collectors. type ConstrainedLabel struct { Name string - Constraint func(string) string -} - -func (cl ConstrainedLabel) Constrain(v string) string { - if cl.Constraint == nil { - return v - } - return cl.Constraint(v) + Constraint LabelConstraint } // ConstrainableLabels is an interface that allows creating of labels that can @@ -58,7 +54,7 @@ func (cl ConstrainedLabel) Constrain(v string) string { // }, // }) type ConstrainableLabels interface { - constrainedLabels() ConstrainedLabels + compile() *compiledLabels labelNames() []string } @@ -67,8 +63,20 @@ type ConstrainableLabels interface { // metric vector Collectors. type ConstrainedLabels []ConstrainedLabel -func (cls ConstrainedLabels) constrainedLabels() ConstrainedLabels { - return cls +func (cls ConstrainedLabels) compile() *compiledLabels { + compiled := &compiledLabels{ + names: make([]string, len(cls)), + labelConstraints: map[string]LabelConstraint{}, + } + + for i, label := range cls { + compiled.names[i] = label.Name + if label.Constraint != nil { + compiled.labelConstraints[label.Name] = label.Constraint + } + } + + return compiled } func (cls ConstrainedLabels) labelNames() []string { @@ -92,18 +100,36 @@ func (cls ConstrainedLabels) labelNames() []string { // } type UnconstrainedLabels []string -func (uls UnconstrainedLabels) constrainedLabels() ConstrainedLabels { - constrainedLabels := make([]ConstrainedLabel, len(uls)) - for i, l := range uls { - constrainedLabels[i] = ConstrainedLabel{Name: l} +func (uls UnconstrainedLabels) compile() *compiledLabels { + return &compiledLabels{ + names: uls, } - return constrainedLabels } func (uls UnconstrainedLabels) labelNames() []string { return uls } +type compiledLabels struct { + names []string + labelConstraints map[string]LabelConstraint +} + +func (cls *compiledLabels) compile() *compiledLabels { + return cls +} + +func (cls *compiledLabels) labelNames() []string { + return cls.names +} + +func (cls *compiledLabels) constrain(labelName, value string) string { + if fn, ok := cls.labelConstraints[labelName]; ok && fn != nil { + return fn(value) + } + return value +} + // reservedLabelPrefix is a prefix which is not legal in user-supplied // label names. const reservedLabelPrefix = "__" diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go index 07bbc9d76..f018e5723 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/metric.go @@ -92,6 +92,9 @@ type Opts struct { // machine_role metric). See also // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels ConstLabels Labels + + // now is for testing purposes, by default it's time.Now. + now func() time.Time } // BuildFQName joins the given three name components by "_". Empty name diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go index 3793036ad..356edb786 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go @@ -389,15 +389,12 @@ func isLabelCurried(c prometheus.Collector, label string) bool { return true } -// emptyLabels is a one-time allocation for non-partitioned metrics to avoid -// unnecessary allocations on each request. -var emptyLabels = prometheus.Labels{} - func labels(code, method bool, reqMethod string, status int, extraMethods ...string) prometheus.Labels { + labels := prometheus.Labels{} + if !(code || method) { - return emptyLabels + return labels } - labels := prometheus.Labels{} if code { labels["code"] = sanitizeCode(status) diff --git a/vendor/github.com/prometheus/client_golang/prometheus/registry.go b/vendor/github.com/prometheus/client_golang/prometheus/registry.go index 44da9433b..5e2ced25a 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/registry.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/registry.go @@ -548,7 +548,7 @@ func (r *Registry) Gather() ([]*dto.MetricFamily, error) { goroutineBudget-- runtime.Gosched() } - // Once both checkedMetricChan and uncheckdMetricChan are closed + // Once both checkedMetricChan and uncheckedMetricChan are closed // and drained, the contraption above will nil out cmc and umc, // and then we can leave the collect loop here. if cmc == nil && umc == nil { @@ -963,9 +963,9 @@ func checkDescConsistency( // Is the desc consistent with the content of the metric? lpsFromDesc := make([]*dto.LabelPair, len(desc.constLabelPairs), len(dtoMetric.Label)) copy(lpsFromDesc, desc.constLabelPairs) - for _, l := range desc.variableLabels { + for _, l := range desc.variableLabels.names { lpsFromDesc = append(lpsFromDesc, &dto.LabelPair{ - Name: proto.String(l.Name), + Name: proto.String(l), }) } if len(lpsFromDesc) != len(dtoMetric.Label) { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/summary.go b/vendor/github.com/prometheus/client_golang/prometheus/summary.go index dd359264e..146270444 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/summary.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/summary.go @@ -26,6 +26,7 @@ import ( "github.com/beorn7/perks/quantile" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" ) // quantileLabel is used for the label that defines the quantile in a @@ -145,6 +146,9 @@ type SummaryOpts struct { // is the internal buffer size of the underlying package // "github.com/bmizerany/perks/quantile"). BufCap uint32 + + // now is for testing purposes, by default it's time.Now. + now func() time.Time } // SummaryVecOpts bundles the options to create a SummaryVec metric. @@ -154,7 +158,7 @@ type SummaryVecOpts struct { SummaryOpts // VariableLabels are used to partition the metric vector by the given set - // of labels. Each label value will be constrained with the optional Contraint + // of labels. Each label value will be constrained with the optional Constraint // function, if provided. VariableLabels ConstrainableLabels } @@ -188,12 +192,12 @@ func NewSummary(opts SummaryOpts) Summary { } func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { - if len(desc.variableLabels) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), labelValues)) + if len(desc.variableLabels.names) != len(labelValues) { + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.names, labelValues)) } - for _, n := range desc.variableLabels { - if n.Name == quantileLabel { + for _, n := range desc.variableLabels.names { + if n == quantileLabel { panic(errQuantileLabelNotAllowed) } } @@ -222,6 +226,9 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { opts.BufCap = DefBufCap } + if opts.now == nil { + opts.now = time.Now + } if len(opts.Objectives) == 0 { // Use the lock-free implementation of a Summary without objectives. s := &noObjectivesSummary{ @@ -230,6 +237,7 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { counts: [2]*summaryCounts{{}, {}}, } s.init(s) // Init self-collection. + s.createdTs = timestamppb.New(opts.now()) return s } @@ -245,7 +253,7 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { coldBuf: make([]float64, 0, opts.BufCap), streamDuration: opts.MaxAge / time.Duration(opts.AgeBuckets), } - s.headStreamExpTime = time.Now().Add(s.streamDuration) + s.headStreamExpTime = opts.now().Add(s.streamDuration) s.hotBufExpTime = s.headStreamExpTime for i := uint32(0); i < opts.AgeBuckets; i++ { @@ -259,6 +267,7 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { sort.Float64s(s.sortedObjectives) s.init(s) // Init self-collection. + s.createdTs = timestamppb.New(opts.now()) return s } @@ -286,6 +295,8 @@ type summary struct { headStream *quantile.Stream headStreamIdx int headStreamExpTime, hotBufExpTime time.Time + + createdTs *timestamppb.Timestamp } func (s *summary) Desc() *Desc { @@ -307,7 +318,9 @@ func (s *summary) Observe(v float64) { } func (s *summary) Write(out *dto.Metric) error { - sum := &dto.Summary{} + sum := &dto.Summary{ + CreatedTimestamp: s.createdTs, + } qs := make([]*dto.Quantile, 0, len(s.objectives)) s.bufMtx.Lock() @@ -440,6 +453,8 @@ type noObjectivesSummary struct { counts [2]*summaryCounts labelPairs []*dto.LabelPair + + createdTs *timestamppb.Timestamp } func (s *noObjectivesSummary) Desc() *Desc { @@ -490,8 +505,9 @@ func (s *noObjectivesSummary) Write(out *dto.Metric) error { } sum := &dto.Summary{ - SampleCount: proto.Uint64(count), - SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), + SampleCount: proto.Uint64(count), + SampleSum: proto.Float64(math.Float64frombits(atomic.LoadUint64(&coldCounts.sumBits))), + CreatedTimestamp: s.createdTs, } out.Summary = sum @@ -681,6 +697,7 @@ type constSummary struct { sum float64 quantiles map[float64]float64 labelPairs []*dto.LabelPair + createdTs *timestamppb.Timestamp } func (s *constSummary) Desc() *Desc { @@ -688,7 +705,9 @@ func (s *constSummary) Desc() *Desc { } func (s *constSummary) Write(out *dto.Metric) error { - sum := &dto.Summary{} + sum := &dto.Summary{ + CreatedTimestamp: s.createdTs, + } qs := make([]*dto.Quantile, 0, len(s.quantiles)) sum.SampleCount = proto.Uint64(s.count) @@ -737,7 +756,7 @@ func NewConstSummary( if desc.err != nil { return nil, desc.err } - if err := validateLabelValues(labelValues, len(desc.variableLabels)); err != nil { + if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { return nil, err } return &constSummary{ diff --git a/vendor/github.com/prometheus/client_golang/prometheus/value.go b/vendor/github.com/prometheus/client_golang/prometheus/value.go index 5f6bb8001..cc23011fa 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/value.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/value.go @@ -14,6 +14,7 @@ package prometheus import ( + "errors" "fmt" "sort" "time" @@ -91,7 +92,7 @@ func (v *valueFunc) Desc() *Desc { } func (v *valueFunc) Write(out *dto.Metric) error { - return populateMetric(v.valType, v.function(), v.labelPairs, nil, out) + return populateMetric(v.valType, v.function(), v.labelPairs, nil, out, nil) } // NewConstMetric returns a metric with one fixed value that cannot be @@ -105,12 +106,12 @@ func NewConstMetric(desc *Desc, valueType ValueType, value float64, labelValues if desc.err != nil { return nil, desc.err } - if err := validateLabelValues(labelValues, len(desc.variableLabels)); err != nil { + if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { return nil, err } metric := &dto.Metric{} - if err := populateMetric(valueType, value, MakeLabelPairs(desc, labelValues), nil, metric); err != nil { + if err := populateMetric(valueType, value, MakeLabelPairs(desc, labelValues), nil, metric, nil); err != nil { return nil, err } @@ -130,6 +131,43 @@ func MustNewConstMetric(desc *Desc, valueType ValueType, value float64, labelVal return m } +// NewConstMetricWithCreatedTimestamp does the same thing as NewConstMetric, but generates Counters +// with created timestamp set and returns an error for other metric types. +func NewConstMetricWithCreatedTimestamp(desc *Desc, valueType ValueType, value float64, ct time.Time, labelValues ...string) (Metric, error) { + if desc.err != nil { + return nil, desc.err + } + if err := validateLabelValues(labelValues, len(desc.variableLabels.names)); err != nil { + return nil, err + } + switch valueType { + case CounterValue: + break + default: + return nil, errors.New("created timestamps are only supported for counters") + } + + metric := &dto.Metric{} + if err := populateMetric(valueType, value, MakeLabelPairs(desc, labelValues), nil, metric, timestamppb.New(ct)); err != nil { + return nil, err + } + + return &constMetric{ + desc: desc, + metric: metric, + }, nil +} + +// MustNewConstMetricWithCreatedTimestamp is a version of NewConstMetricWithCreatedTimestamp that panics where +// NewConstMetricWithCreatedTimestamp would have returned an error. +func MustNewConstMetricWithCreatedTimestamp(desc *Desc, valueType ValueType, value float64, ct time.Time, labelValues ...string) Metric { + m, err := NewConstMetricWithCreatedTimestamp(desc, valueType, value, ct, labelValues...) + if err != nil { + panic(err) + } + return m +} + type constMetric struct { desc *Desc metric *dto.Metric @@ -153,11 +191,12 @@ func populateMetric( labelPairs []*dto.LabelPair, e *dto.Exemplar, m *dto.Metric, + ct *timestamppb.Timestamp, ) error { m.Label = labelPairs switch t { case CounterValue: - m.Counter = &dto.Counter{Value: proto.Float64(v), Exemplar: e} + m.Counter = &dto.Counter{Value: proto.Float64(v), Exemplar: e, CreatedTimestamp: ct} case GaugeValue: m.Gauge = &dto.Gauge{Value: proto.Float64(v)} case UntypedValue: @@ -176,19 +215,19 @@ func populateMetric( // This function is only needed for custom Metric implementations. See MetricVec // example. func MakeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair { - totalLen := len(desc.variableLabels) + len(desc.constLabelPairs) + totalLen := len(desc.variableLabels.names) + len(desc.constLabelPairs) if totalLen == 0 { // Super fast path. return nil } - if len(desc.variableLabels) == 0 { + if len(desc.variableLabels.names) == 0 { // Moderately fast path. return desc.constLabelPairs } labelPairs := make([]*dto.LabelPair, 0, totalLen) - for i, l := range desc.variableLabels { + for i, l := range desc.variableLabels.names { labelPairs = append(labelPairs, &dto.LabelPair{ - Name: proto.String(l.Name), + Name: proto.String(l), Value: proto.String(labelValues[i]), }) } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go index f0d0015a0..955cfd59f 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/vec.go @@ -20,24 +20,6 @@ import ( "github.com/prometheus/common/model" ) -var labelsPool = &sync.Pool{ - New: func() interface{} { - return make(Labels) - }, -} - -func getLabelsFromPool() Labels { - return labelsPool.Get().(Labels) -} - -func putLabelsToPool(labels Labels) { - for k := range labels { - delete(labels, k) - } - - labelsPool.Put(labels) -} - // MetricVec is a Collector to bundle metrics of the same name that differ in // their label values. MetricVec is not used directly but as a building block // for implementations of vectors of a given metric type, like GaugeVec, @@ -91,6 +73,7 @@ func NewMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *MetricVec { // See also the CounterVec example. func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { lvs = constrainLabelValues(m.desc, lvs, m.curry) + h, err := m.hashLabelValues(lvs) if err != nil { return false @@ -110,8 +93,8 @@ func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { // This method is used for the same purpose as DeleteLabelValues(...string). See // there for pros and cons of the two methods. func (m *MetricVec) Delete(labels Labels) bool { - labels = constrainLabels(m.desc, labels) - defer putLabelsToPool(labels) + labels, closer := constrainLabels(m.desc, labels) + defer closer() h, err := m.hashLabels(labels) if err != nil { @@ -128,8 +111,8 @@ func (m *MetricVec) Delete(labels Labels) bool { // Note that curried labels will never be matched if deleting from the curried vector. // To match curried labels with DeletePartialMatch, it must be called on the base vector. func (m *MetricVec) DeletePartialMatch(labels Labels) int { - labels = constrainLabels(m.desc, labels) - defer putLabelsToPool(labels) + labels, closer := constrainLabels(m.desc, labels) + defer closer() return m.metricMap.deleteByLabels(labels, m.curry) } @@ -169,11 +152,11 @@ func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { oldCurry = m.curry iCurry int ) - for i, label := range m.desc.variableLabels { - val, ok := labels[label.Name] + for i, labelName := range m.desc.variableLabels.names { + val, ok := labels[labelName] if iCurry < len(oldCurry) && oldCurry[iCurry].index == i { if ok { - return nil, fmt.Errorf("label name %q is already curried", label.Name) + return nil, fmt.Errorf("label name %q is already curried", labelName) } newCurry = append(newCurry, oldCurry[iCurry]) iCurry++ @@ -181,7 +164,10 @@ func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { if !ok { continue // Label stays uncurried. } - newCurry = append(newCurry, curriedLabelValue{i, label.Constrain(val)}) + newCurry = append(newCurry, curriedLabelValue{ + i, + m.desc.variableLabels.constrain(labelName, val), + }) } } if l := len(oldCurry) + len(labels) - len(newCurry); l > 0 { @@ -250,8 +236,8 @@ func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { // around MetricVec, implementing a vector for a specific Metric implementation, // for example GaugeVec. func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { - labels = constrainLabels(m.desc, labels) - defer putLabelsToPool(labels) + labels, closer := constrainLabels(m.desc, labels) + defer closer() h, err := m.hashLabels(labels) if err != nil { @@ -262,7 +248,7 @@ func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { } func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { - if err := validateLabelValues(vals, len(m.desc.variableLabels)-len(m.curry)); err != nil { + if err := validateLabelValues(vals, len(m.desc.variableLabels.names)-len(m.curry)); err != nil { return 0, err } @@ -271,7 +257,7 @@ func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { curry = m.curry iVals, iCurry int ) - for i := 0; i < len(m.desc.variableLabels); i++ { + for i := 0; i < len(m.desc.variableLabels.names); i++ { if iCurry < len(curry) && curry[iCurry].index == i { h = m.hashAdd(h, curry[iCurry].value) iCurry++ @@ -285,7 +271,7 @@ func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { } func (m *MetricVec) hashLabels(labels Labels) (uint64, error) { - if err := validateValuesInLabels(labels, len(m.desc.variableLabels)-len(m.curry)); err != nil { + if err := validateValuesInLabels(labels, len(m.desc.variableLabels.names)-len(m.curry)); err != nil { return 0, err } @@ -294,17 +280,17 @@ func (m *MetricVec) hashLabels(labels Labels) (uint64, error) { curry = m.curry iCurry int ) - for i, label := range m.desc.variableLabels { - val, ok := labels[label.Name] + for i, labelName := range m.desc.variableLabels.names { + val, ok := labels[labelName] if iCurry < len(curry) && curry[iCurry].index == i { if ok { - return 0, fmt.Errorf("label name %q is already curried", label.Name) + return 0, fmt.Errorf("label name %q is already curried", labelName) } h = m.hashAdd(h, curry[iCurry].value) iCurry++ } else { if !ok { - return 0, fmt.Errorf("label name %q missing in label map", label.Name) + return 0, fmt.Errorf("label name %q missing in label map", labelName) } h = m.hashAdd(h, val) } @@ -482,7 +468,7 @@ func valueMatchesVariableOrCurriedValue(targetValue string, index int, values [] func matchPartialLabels(desc *Desc, values []string, labels Labels, curry []curriedLabelValue) bool { for l, v := range labels { // Check if the target label exists in our metrics and get the index. - varLabelIndex, validLabel := indexOf(l, desc.variableLabels.labelNames()) + varLabelIndex, validLabel := indexOf(l, desc.variableLabels.names) if validLabel { // Check the value of that label against the target value. // We don't consider curried values in partial matches. @@ -626,7 +612,7 @@ func matchLabels(desc *Desc, values []string, labels Labels, curry []curriedLabe return false } iCurry := 0 - for i, k := range desc.variableLabels { + for i, k := range desc.variableLabels.names { if iCurry < len(curry) && curry[iCurry].index == i { if values[i] != curry[iCurry].value { return false @@ -634,7 +620,7 @@ func matchLabels(desc *Desc, values []string, labels Labels, curry []curriedLabe iCurry++ continue } - if values[i] != labels[k.Name] { + if values[i] != labels[k] { return false } } @@ -644,13 +630,13 @@ func matchLabels(desc *Desc, values []string, labels Labels, curry []curriedLabe func extractLabelValues(desc *Desc, labels Labels, curry []curriedLabelValue) []string { labelValues := make([]string, len(labels)+len(curry)) iCurry := 0 - for i, k := range desc.variableLabels { + for i, k := range desc.variableLabels.names { if iCurry < len(curry) && curry[iCurry].index == i { labelValues[i] = curry[iCurry].value iCurry++ continue } - labelValues[i] = labels[k.Name] + labelValues[i] = labels[k] } return labelValues } @@ -670,20 +656,37 @@ func inlineLabelValues(lvs []string, curry []curriedLabelValue) []string { return labelValues } -func constrainLabels(desc *Desc, labels Labels) Labels { - constrainedLabels := getLabelsFromPool() - for l, v := range labels { - if i, ok := indexOf(l, desc.variableLabels.labelNames()); ok { - v = desc.variableLabels[i].Constrain(v) - } +var labelsPool = &sync.Pool{ + New: func() interface{} { + return make(Labels) + }, +} - constrainedLabels[l] = v +func constrainLabels(desc *Desc, labels Labels) (Labels, func()) { + if len(desc.variableLabels.labelConstraints) == 0 { + // Fast path when there's no constraints + return labels, func() {} } - return constrainedLabels + constrainedLabels := labelsPool.Get().(Labels) + for l, v := range labels { + constrainedLabels[l] = desc.variableLabels.constrain(l, v) + } + + return constrainedLabels, func() { + for k := range constrainedLabels { + delete(constrainedLabels, k) + } + labelsPool.Put(constrainedLabels) + } } func constrainLabelValues(desc *Desc, lvs []string, curry []curriedLabelValue) []string { + if len(desc.variableLabels.labelConstraints) == 0 { + // Fast path when there's no constraints + return lvs + } + constrainedValues := make([]string, len(lvs)) var iCurry, iLVs int for i := 0; i < len(lvs)+len(curry); i++ { @@ -692,8 +695,11 @@ func constrainLabelValues(desc *Desc, lvs []string, curry []curriedLabelValue) [ continue } - if i < len(desc.variableLabels) { - constrainedValues[iLVs] = desc.variableLabels[i].Constrain(lvs[iLVs]) + if i < len(desc.variableLabels.names) { + constrainedValues[iLVs] = desc.variableLabels.constrain( + desc.variableLabels.names[i], + lvs[iLVs], + ) } else { constrainedValues[iLVs] = lvs[iLVs] } diff --git a/vendor/github.com/prometheus/client_model/go/metrics.pb.go b/vendor/github.com/prometheus/client_model/go/metrics.pb.go index 2b5bca4b9..cee360db7 100644 --- a/vendor/github.com/prometheus/client_model/go/metrics.pb.go +++ b/vendor/github.com/prometheus/client_model/go/metrics.pb.go @@ -215,8 +215,9 @@ type Counter struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` - Exemplar *Exemplar `protobuf:"bytes,2,opt,name=exemplar" json:"exemplar,omitempty"` + Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + Exemplar *Exemplar `protobuf:"bytes,2,opt,name=exemplar" json:"exemplar,omitempty"` + CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` } func (x *Counter) Reset() { @@ -265,6 +266,13 @@ func (x *Counter) GetExemplar() *Exemplar { return nil } +func (x *Counter) GetCreatedTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.CreatedTimestamp + } + return nil +} + type Quantile struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -325,9 +333,10 @@ type Summary struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` - SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` - Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile" json:"quantile,omitempty"` + SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` + SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` + Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile" json:"quantile,omitempty"` + CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` } func (x *Summary) Reset() { @@ -383,6 +392,13 @@ func (x *Summary) GetQuantile() []*Quantile { return nil } +func (x *Summary) GetCreatedTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.CreatedTimestamp + } + return nil +} + type Untyped struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -439,7 +455,8 @@ type Histogram struct { SampleCountFloat *float64 `protobuf:"fixed64,4,opt,name=sample_count_float,json=sampleCountFloat" json:"sample_count_float,omitempty"` // Overrides sample_count if > 0. SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` // Buckets for the conventional histogram. - Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket" json:"bucket,omitempty"` // Ordered in increasing order of upper_bound, +Inf bucket is optional. + Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket" json:"bucket,omitempty"` // Ordered in increasing order of upper_bound, +Inf bucket is optional. + CreatedTimestamp *timestamppb.Timestamp `protobuf:"bytes,15,opt,name=created_timestamp,json=createdTimestamp" json:"created_timestamp,omitempty"` // schema defines the bucket schema. Currently, valid numbers are -4 <= n <= 8. // They are all for base-2 bucket schemas, where 1 is a bucket boundary in each case, and // then each power of two is divided into 2^n logarithmic buckets. @@ -457,6 +474,9 @@ type Histogram struct { NegativeDelta []int64 `protobuf:"zigzag64,10,rep,name=negative_delta,json=negativeDelta" json:"negative_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket). NegativeCount []float64 `protobuf:"fixed64,11,rep,name=negative_count,json=negativeCount" json:"negative_count,omitempty"` // Absolute count of each bucket. // Positive buckets for the native histogram. + // Use a no-op span (offset 0, length 0) for a native histogram without any + // observations yet and with a zero_threshold of 0. Otherwise, it would be + // indistinguishable from a classic histogram. PositiveSpan []*BucketSpan `protobuf:"bytes,12,rep,name=positive_span,json=positiveSpan" json:"positive_span,omitempty"` // Use either "positive_delta" or "positive_count", the former for // regular histograms with integer counts, the latter for float @@ -525,6 +545,13 @@ func (x *Histogram) GetBucket() []*Bucket { return nil } +func (x *Histogram) GetCreatedTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.CreatedTimestamp + } + return nil +} + func (x *Histogram) GetSchema() int32 { if x != nil && x.Schema != nil { return *x.Schema @@ -972,137 +999,151 @@ var file_io_prometheus_client_metrics_proto_rawDesc = []byte{ 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x1d, 0x0a, 0x05, 0x47, 0x61, 0x75, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0x5b, 0x0a, 0x07, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, - 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x22, 0x3c, - 0x0a, 0x08, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, - 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x71, 0x75, - 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x87, 0x01, 0x0a, - 0x07, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, + 0x65, 0x22, 0xa4, 0x01, 0x0a, 0x07, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, + 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, + 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x3c, 0x0a, 0x08, 0x51, 0x75, 0x61, 0x6e, + 0x74, 0x69, 0x6c, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xd0, 0x01, 0x0a, 0x07, 0x53, 0x75, 0x6d, 0x6d, 0x61, + 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, + 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, 0x61, 0x6d, 0x70, 0x6c, + 0x65, 0x53, 0x75, 0x6d, 0x12, 0x3a, 0x0a, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, + 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x51, 0x75, + 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x52, 0x08, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, + 0x12, 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x1f, 0x0a, 0x07, 0x55, 0x6e, 0x74, + 0x79, 0x70, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xac, 0x05, 0x0a, 0x09, 0x48, + 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, - 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, - 0x09, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x3a, 0x0a, 0x08, 0x71, 0x75, - 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, + 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x73, + 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x10, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, + 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x65, 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, + 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x34, 0x0a, 0x06, 0x62, 0x75, 0x63, 0x6b, + 0x65, 0x74, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, + 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, + 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x47, + 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, + 0x25, 0x0a, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, + 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x7a, 0x65, 0x72, 0x6f, 0x54, 0x68, 0x72, + 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x7a, 0x65, 0x72, 0x6f, + 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, 0x52, + 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, + 0x45, 0x0a, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x73, 0x70, 0x61, 0x6e, + 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, + 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, + 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, + 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, + 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x12, 0x52, 0x0d, + 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x12, 0x25, 0x0a, + 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x0b, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, + 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, + 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, + 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, + 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0d, 0x20, + 0x03, 0x28, 0x12, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, + 0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xc6, 0x01, 0x0a, 0x06, 0x42, 0x75, + 0x63, 0x6b, 0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, + 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, + 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, + 0x34, 0x0a, 0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, + 0x14, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, 0x65, 0x72, 0x5f, 0x62, + 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x75, 0x70, 0x70, 0x65, + 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, + 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, + 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x72, 0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, + 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x11, + 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x22, 0x91, 0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, 0x35, 0x0a, + 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x2e, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x52, 0x08, 0x71, 0x75, - 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x22, 0x1f, 0x0a, 0x07, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, - 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xe3, 0x04, 0x0a, 0x09, 0x48, 0x69, 0x73, 0x74, - 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, - 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x73, 0x61, 0x6d, - 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x61, 0x6d, 0x70, - 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x01, 0x52, 0x10, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, - 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, - 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, 0x61, 0x6d, 0x70, - 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x34, 0x0a, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x18, - 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, - 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, - 0x6b, 0x65, 0x74, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x74, 0x68, 0x72, 0x65, - 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x7a, 0x65, 0x72, - 0x6f, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x7a, 0x65, - 0x72, 0x6f, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, - 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x7a, 0x65, 0x72, - 0x6f, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x01, 0x52, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, - 0x73, 0x70, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, + 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, + 0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, + 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, + 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, + 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x47, 0x61, 0x75, + 0x67, 0x65, 0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, - 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x6e, 0x65, - 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, - 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x03, - 0x28, 0x12, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, 0x74, - 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, - 0x75, 0x6e, 0x74, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, - 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x69, - 0x74, 0x69, 0x76, 0x65, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, - 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, - 0x6e, 0x52, 0x0c, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, - 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, - 0x61, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x12, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, - 0x65, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, - 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xc6, 0x01, - 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, - 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x04, 0x52, 0x0f, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, - 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, - 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x01, 0x52, 0x14, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, - 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, - 0x75, 0x70, 0x70, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, + 0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x65, 0x72, 0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, + 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x75, 0x6d, 0x6d, 0x61, + 0x72, 0x79, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x37, 0x0a, 0x07, 0x75, + 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, - 0x53, 0x70, 0x61, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x22, 0x91, 0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x72, 0x12, 0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, - 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, - 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, - 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x12, 0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, - 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, - 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, - 0x75, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, - 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, - 0x2e, 0x47, 0x61, 0x75, 0x67, 0x65, 0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, - 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, - 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, - 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, - 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, - 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, - 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, - 0x37, 0x0a, 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, - 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, - 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, - 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, - 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4d, 0x73, 0x22, 0xa2, 0x01, 0x0a, 0x0c, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x12, 0x0a, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, - 0x65, 0x6c, 0x70, 0x12, 0x34, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, - 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, - 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, - 0x72, 0x69, 0x63, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, - 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, - 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2a, - 0x62, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, - 0x07, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, - 0x55, 0x47, 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, - 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, - 0x0d, 0x0a, 0x09, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, - 0x0a, 0x0f, 0x47, 0x41, 0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, - 0x4d, 0x10, 0x05, 0x42, 0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, - 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, - 0x75, 0x73, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, - 0x67, 0x6f, 0x3b, 0x69, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, - 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, 0x07, 0x75, 0x6e, 0x74, + 0x79, 0x70, 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, + 0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, + 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x48, + 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, + 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x5f, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x4d, 0x73, 0x22, 0xa2, 0x01, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x72, 0x69, + 0x63, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, + 0x65, 0x6c, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x12, + 0x34, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, + 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, + 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, + 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2a, 0x62, 0x0a, 0x0a, 0x4d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x4f, 0x55, + 0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, 0x55, 0x47, 0x45, 0x10, + 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, 0x10, 0x02, 0x12, 0x0b, + 0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x48, + 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, 0x47, 0x41, + 0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x05, 0x42, + 0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, + 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2f, 0x63, + 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x67, 0x6f, 0x3b, 0x69, + 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x5f, 0x63, 0x6c, 0x69, + 0x65, 0x6e, 0x74, } var ( @@ -1137,26 +1178,29 @@ var file_io_prometheus_client_metrics_proto_goTypes = []interface{}{ } var file_io_prometheus_client_metrics_proto_depIdxs = []int32{ 10, // 0: io.prometheus.client.Counter.exemplar:type_name -> io.prometheus.client.Exemplar - 4, // 1: io.prometheus.client.Summary.quantile:type_name -> io.prometheus.client.Quantile - 8, // 2: io.prometheus.client.Histogram.bucket:type_name -> io.prometheus.client.Bucket - 9, // 3: io.prometheus.client.Histogram.negative_span:type_name -> io.prometheus.client.BucketSpan - 9, // 4: io.prometheus.client.Histogram.positive_span:type_name -> io.prometheus.client.BucketSpan - 10, // 5: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar - 1, // 6: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair - 13, // 7: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp - 1, // 8: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair - 2, // 9: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge - 3, // 10: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter - 5, // 11: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary - 6, // 12: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped - 7, // 13: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram - 0, // 14: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType - 11, // 15: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric - 16, // [16:16] is the sub-list for method output_type - 16, // [16:16] is the sub-list for method input_type - 16, // [16:16] is the sub-list for extension type_name - 16, // [16:16] is the sub-list for extension extendee - 0, // [0:16] is the sub-list for field type_name + 13, // 1: io.prometheus.client.Counter.created_timestamp:type_name -> google.protobuf.Timestamp + 4, // 2: io.prometheus.client.Summary.quantile:type_name -> io.prometheus.client.Quantile + 13, // 3: io.prometheus.client.Summary.created_timestamp:type_name -> google.protobuf.Timestamp + 8, // 4: io.prometheus.client.Histogram.bucket:type_name -> io.prometheus.client.Bucket + 13, // 5: io.prometheus.client.Histogram.created_timestamp:type_name -> google.protobuf.Timestamp + 9, // 6: io.prometheus.client.Histogram.negative_span:type_name -> io.prometheus.client.BucketSpan + 9, // 7: io.prometheus.client.Histogram.positive_span:type_name -> io.prometheus.client.BucketSpan + 10, // 8: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar + 1, // 9: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair + 13, // 10: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp + 1, // 11: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair + 2, // 12: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge + 3, // 13: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter + 5, // 14: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary + 6, // 15: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped + 7, // 16: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram + 0, // 17: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType + 11, // 18: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric + 19, // [19:19] is the sub-list for method output_type + 19, // [19:19] is the sub-list for method input_type + 19, // [19:19] is the sub-list for extension type_name + 19, // [19:19] is the sub-list for extension extendee + 0, // [0:19] is the sub-list for field type_name } func init() { file_io_prometheus_client_metrics_proto_init() } diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index 0ce7ea461..062a28185 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -61,7 +61,7 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_ SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.53.3 +GOLANGCI_LINT_VERSION ?= v1.54.2 # golangci-lint only supports linux, darwin and windows platforms on i386/amd64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_notype.go b/vendor/github.com/prometheus/procfs/fs_statfs_notype.go index 13d74e395..134767d69 100644 --- a/vendor/github.com/prometheus/procfs/fs_statfs_notype.go +++ b/vendor/github.com/prometheus/procfs/fs_statfs_notype.go @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -//go:build netbsd || openbsd || solaris || windows || nostatfs -// +build netbsd openbsd solaris windows nostatfs +//go:build !freebsd && !linux +// +build !freebsd,!linux package procfs diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_type.go b/vendor/github.com/prometheus/procfs/fs_statfs_type.go index bee151445..80df79c31 100644 --- a/vendor/github.com/prometheus/procfs/fs_statfs_type.go +++ b/vendor/github.com/prometheus/procfs/fs_statfs_type.go @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -//go:build !netbsd && !openbsd && !solaris && !windows && !nostatfs -// +build !netbsd,!openbsd,!solaris,!windows,!nostatfs +//go:build freebsd || linux +// +build freebsd linux package procfs diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go index 852c8c4a0..9d8af6db7 100644 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ b/vendor/github.com/prometheus/procfs/mountstats.go @@ -44,6 +44,14 @@ const ( fieldTransport11TCPLen = 13 fieldTransport11UDPLen = 10 + + // kernel version >= 4.14 MaxLen + // See: https://elixir.bootlin.com/linux/v6.4.8/source/net/sunrpc/xprtrdma/xprt_rdma.h#L393 + fieldTransport11RDMAMaxLen = 28 + + // kernel version <= 4.2 MinLen + // See: https://elixir.bootlin.com/linux/v4.2.8/source/net/sunrpc/xprtrdma/xprt_rdma.h#L331 + fieldTransport11RDMAMinLen = 20 ) // A Mount is a device mount parsed from /proc/[pid]/mountstats. @@ -233,6 +241,33 @@ type NFSTransportStats struct { // A running counter, incremented on each request as the current size of the // pending queue. CumulativePendingQueue uint64 + + // Stats below only available with stat version 1.1. + // Transport over RDMA + + // accessed when sending a call + ReadChunkCount uint64 + WriteChunkCount uint64 + ReplyChunkCount uint64 + TotalRdmaRequest uint64 + + // rarely accessed error counters + PullupCopyCount uint64 + HardwayRegisterCount uint64 + FailedMarshalCount uint64 + BadReplyCount uint64 + MrsRecovered uint64 + MrsOrphaned uint64 + MrsAllocated uint64 + EmptySendctxQ uint64 + + // accessed when receiving a reply + TotalRdmaReply uint64 + FixupCopyCount uint64 + ReplyWaitsForSend uint64 + LocalInvNeeded uint64 + NomsgCallCount uint64 + BcallCount uint64 } // parseMountStats parses a /proc/[pid]/mountstats file and returns a slice @@ -587,14 +622,17 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats expectedLength = fieldTransport11TCPLen } else if protocol == "udp" { expectedLength = fieldTransport11UDPLen + } else if protocol == "rdma" { + expectedLength = fieldTransport11RDMAMinLen } else { return nil, fmt.Errorf("%w: invalid NFS protocol \"%s\" in stats 1.1 statement: %v", ErrFileParse, protocol, ss) } - if len(ss) != expectedLength { - return nil, fmt.Errorf("%w: invalid NFS transport stats 1.1 statement: %v", ErrFileParse, ss) + if (len(ss) != expectedLength && (protocol == "tcp" || protocol == "udp")) || + (protocol == "rdma" && len(ss) < expectedLength) { + return nil, fmt.Errorf("%w: invalid NFS transport stats 1.1 statement: %v, protocol: %v", ErrFileParse, ss, protocol) } default: - return nil, fmt.Errorf("%s: Unrecognized NFS transport stats version: %q", ErrFileParse, statVersion) + return nil, fmt.Errorf("%s: Unrecognized NFS transport stats version: %q, protocol: %v", ErrFileParse, statVersion, protocol) } // Allocate enough for v1.1 stats since zero value for v1.1 stats will be okay @@ -604,7 +642,9 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats // Note: slice length must be set to length of v1.1 stats to avoid a panic when // only v1.0 stats are present. // See: https://github.com/prometheus/node_exporter/issues/571. - ns := make([]uint64, fieldTransport11TCPLen) + // + // Note: NFS Over RDMA slice length is fieldTransport11RDMAMaxLen + ns := make([]uint64, fieldTransport11RDMAMaxLen+3) for i, s := range ss { n, err := strconv.ParseUint(s, 10, 64) if err != nil { @@ -622,9 +662,14 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats // we set them to 0 here. if protocol == "udp" { ns = append(ns[:2], append(make([]uint64, 3), ns[2:]...)...) + } else if protocol == "tcp" { + ns = append(ns[:fieldTransport11TCPLen], make([]uint64, fieldTransport11RDMAMaxLen-fieldTransport11TCPLen+3)...) + } else if protocol == "rdma" { + ns = append(ns[:fieldTransport10TCPLen], append(make([]uint64, 3), ns[fieldTransport10TCPLen:]...)...) } return &NFSTransportStats{ + // NFS xprt over tcp or udp Protocol: protocol, Port: ns[0], Bind: ns[1], @@ -636,8 +681,32 @@ func parseNFSTransportStats(ss []string, statVersion string) (*NFSTransportStats BadTransactionIDs: ns[7], CumulativeActiveRequests: ns[8], CumulativeBacklog: ns[9], - MaximumRPCSlotsUsed: ns[10], - CumulativeSendingQueue: ns[11], - CumulativePendingQueue: ns[12], + + // NFS xprt over tcp or udp + // And statVersion 1.1 + MaximumRPCSlotsUsed: ns[10], + CumulativeSendingQueue: ns[11], + CumulativePendingQueue: ns[12], + + // NFS xprt over rdma + // And stat Version 1.1 + ReadChunkCount: ns[13], + WriteChunkCount: ns[14], + ReplyChunkCount: ns[15], + TotalRdmaRequest: ns[16], + PullupCopyCount: ns[17], + HardwayRegisterCount: ns[18], + FailedMarshalCount: ns[19], + BadReplyCount: ns[20], + MrsRecovered: ns[21], + MrsOrphaned: ns[22], + MrsAllocated: ns[23], + EmptySendctxQ: ns[24], + TotalRdmaReply: ns[25], + FixupCopyCount: ns[26], + ReplyWaitsForSend: ns[27], + LocalInvNeeded: ns[28], + NomsgCallCount: ns[29], + BcallCount: ns[30], }, nil } diff --git a/vendor/github.com/prometheus/procfs/proc_fdinfo.go b/vendor/github.com/prometheus/procfs/proc_fdinfo.go index 4b7933e4f..fa761b352 100644 --- a/vendor/github.com/prometheus/procfs/proc_fdinfo.go +++ b/vendor/github.com/prometheus/procfs/proc_fdinfo.go @@ -26,6 +26,7 @@ var ( rPos = regexp.MustCompile(`^pos:\s+(\d+)$`) rFlags = regexp.MustCompile(`^flags:\s+(\d+)$`) rMntID = regexp.MustCompile(`^mnt_id:\s+(\d+)$`) + rIno = regexp.MustCompile(`^ino:\s+(\d+)$`) rInotify = regexp.MustCompile(`^inotify`) rInotifyParts = regexp.MustCompile(`^inotify\s+wd:([0-9a-f]+)\s+ino:([0-9a-f]+)\s+sdev:([0-9a-f]+)(?:\s+mask:([0-9a-f]+))?`) ) @@ -40,6 +41,8 @@ type ProcFDInfo struct { Flags string // Mount point ID MntID string + // Inode number + Ino string // List of inotify lines (structured) in the fdinfo file (kernel 3.8+ only) InotifyInfos []InotifyInfo } @@ -51,7 +54,7 @@ func (p Proc) FDInfo(fd string) (*ProcFDInfo, error) { return nil, err } - var text, pos, flags, mntid string + var text, pos, flags, mntid, ino string var inotify []InotifyInfo scanner := bufio.NewScanner(bytes.NewReader(data)) @@ -63,6 +66,8 @@ func (p Proc) FDInfo(fd string) (*ProcFDInfo, error) { flags = rFlags.FindStringSubmatch(text)[1] } else if rMntID.MatchString(text) { mntid = rMntID.FindStringSubmatch(text)[1] + } else if rIno.MatchString(text) { + ino = rIno.FindStringSubmatch(text)[1] } else if rInotify.MatchString(text) { newInotify, err := parseInotifyInfo(text) if err != nil { @@ -77,6 +82,7 @@ func (p Proc) FDInfo(fd string) (*ProcFDInfo, error) { Pos: pos, Flags: flags, MntID: mntid, + Ino: ino, InotifyInfos: inotify, } diff --git a/vendor/github.com/prometheus/procfs/proc_maps.go b/vendor/github.com/prometheus/procfs/proc_maps.go index 727549a13..7e75c286b 100644 --- a/vendor/github.com/prometheus/procfs/proc_maps.go +++ b/vendor/github.com/prometheus/procfs/proc_maps.go @@ -63,17 +63,17 @@ type ProcMap struct { // parseDevice parses the device token of a line and converts it to a dev_t // (mkdev) like structure. func parseDevice(s string) (uint64, error) { - toks := strings.Split(s, ":") - if len(toks) < 2 { - return 0, fmt.Errorf("%w: unexpected number of fields, expected: 2, got: %q", ErrFileParse, len(toks)) + i := strings.Index(s, ":") + if i == -1 { + return 0, fmt.Errorf("%w: expected separator `:` in %s", ErrFileParse, s) } - major, err := strconv.ParseUint(toks[0], 16, 0) + major, err := strconv.ParseUint(s[0:i], 16, 0) if err != nil { return 0, err } - minor, err := strconv.ParseUint(toks[1], 16, 0) + minor, err := strconv.ParseUint(s[i+1:], 16, 0) if err != nil { return 0, err } @@ -93,17 +93,17 @@ func parseAddress(s string) (uintptr, error) { // parseAddresses parses the start-end address. func parseAddresses(s string) (uintptr, uintptr, error) { - toks := strings.Split(s, "-") - if len(toks) < 2 { - return 0, 0, fmt.Errorf("%w: invalid address", ErrFileParse) + idx := strings.Index(s, "-") + if idx == -1 { + return 0, 0, fmt.Errorf("%w: expected separator `-` in %s", ErrFileParse, s) } - saddr, err := parseAddress(toks[0]) + saddr, err := parseAddress(s[0:idx]) if err != nil { return 0, 0, err } - eaddr, err := parseAddress(toks[1]) + eaddr, err := parseAddress(s[idx+1:]) if err != nil { return 0, 0, err } diff --git a/vendor/github.com/prometheus/procfs/proc_status.go b/vendor/github.com/prometheus/procfs/proc_status.go index c055d075d..46307f572 100644 --- a/vendor/github.com/prometheus/procfs/proc_status.go +++ b/vendor/github.com/prometheus/procfs/proc_status.go @@ -23,7 +23,7 @@ import ( ) // ProcStatus provides status information about the process, -// read from /proc/[pid]/stat. +// read from /proc/[pid]/status. type ProcStatus struct { // The process ID. PID int @@ -32,6 +32,8 @@ type ProcStatus struct { // Thread group ID. TGID int + // List of Pid namespace. + NSpids []uint64 // Peak virtual memory size. VmPeak uint64 // nolint:revive @@ -127,6 +129,8 @@ func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintByt copy(s.UIDs[:], strings.Split(vString, "\t")) case "Gid": copy(s.GIDs[:], strings.Split(vString, "\t")) + case "NSpid": + s.NSpids = calcNSPidsList(vString) case "VmPeak": s.VmPeak = vUintBytes case "VmSize": @@ -200,3 +204,18 @@ func calcCpusAllowedList(cpuString string) []uint64 { sort.Slice(g, func(i, j int) bool { return g[i] < g[j] }) return g } + +func calcNSPidsList(nspidsString string) []uint64 { + s := strings.Split(nspidsString, " ") + var nspids []uint64 + + for _, nspid := range s { + nspid, _ := strconv.ParseUint(nspid, 10, 64) + if nspid == 0 { + continue + } + nspids = append(nspids, nspid) + } + + return nspids +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/feature_flags.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/feature_flags.go index 8e0054daf..e3f43cc2d 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/feature_flags.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/feature_flags.go @@ -73,7 +73,7 @@ const ( // EnforceNonfalsifiabilityWithSpire is the value used for "enable-nonfalsifiability" when SPIRE is used to enable non-falsifiability. EnforceNonfalsifiabilityWithSpire = "spire" // EnforceNonfalsifiabilityNone is the value used for "enable-nonfalsifiability" when non-falsifiability is not enabled. - EnforceNonfalsifiabilityNone = "" + EnforceNonfalsifiabilityNone = "none" // DefaultEnforceNonfalsifiability is the default value for "enforce-nonfalsifiability". DefaultEnforceNonfalsifiability = EnforceNonfalsifiabilityNone // DefaultNoMatchPolicyConfig is the default value for "trusted-resources-verification-no-match-policy". @@ -195,7 +195,7 @@ func NewFeatureFlagsFromMap(cfgMap map[string]string) (*FeatureFlags, error) { if err := setMaxResultSize(cfgMap, DefaultMaxResultSize, &tc.MaxResultSize); err != nil { return nil, err } - if err := setEnforceNonFalsifiability(cfgMap, tc.EnableAPIFields, &tc.EnforceNonfalsifiability); err != nil { + if err := setEnforceNonFalsifiability(cfgMap, &tc.EnforceNonfalsifiability); err != nil { return nil, err } if err := setFeature(setSecurityContextKey, DefaultSetSecurityContext, &tc.SetSecurityContext); err != nil { @@ -262,7 +262,7 @@ func setCoschedule(cfgMap map[string]string, defaultValue string, disabledAffini // setEnforceNonFalsifiability sets the "enforce-nonfalsifiability" flag based on the content of a given map. // If the feature gate is invalid, then an error is returned. -func setEnforceNonFalsifiability(cfgMap map[string]string, enableAPIFields string, feature *string) error { +func setEnforceNonFalsifiability(cfgMap map[string]string, feature *string) error { var value = DefaultEnforceNonfalsifiability if cfg, ok := cfgMap[enforceNonfalsifiability]; ok { value = strings.ToLower(cfg) diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/store.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/store.go index 763071969..a46de4114 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/store.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/store.go @@ -33,6 +33,7 @@ type Config struct { Metrics *Metrics SpireConfig *sc.SpireConfig Events *Events + Tracing *Tracing } // FromContext extracts a Config from the provided context. @@ -57,6 +58,7 @@ func FromContextOrDefaults(ctx context.Context) *Config { Metrics: DefaultMetrics.DeepCopy(), SpireConfig: DefaultSpire.DeepCopy(), Events: DefaultEvents.DeepCopy(), + Tracing: DefaultTracing.DeepCopy(), } } @@ -84,6 +86,7 @@ func NewStore(logger configmap.Logger, onAfterStore ...func(name string, value i GetMetricsConfigName(): NewMetricsFromConfigMap, GetSpireConfigName(): NewSpireConfigFromConfigMap, GetEventsConfigName(): NewEventsFromConfigMap, + GetTracingConfigName(): NewTracingFromConfigMap, }, onAfterStore..., ), @@ -111,6 +114,11 @@ func (s *Store) Load() *Config { if metrics == nil { metrics = DefaultMetrics.DeepCopy() } + tracing := s.UntypedLoad(GetTracingConfigName()) + if tracing == nil { + tracing = DefaultTracing.DeepCopy() + } + spireconfig := s.UntypedLoad(GetSpireConfigName()) if spireconfig == nil { spireconfig = DefaultSpire.DeepCopy() @@ -124,6 +132,7 @@ func (s *Store) Load() *Config { Defaults: defaults.(*Defaults).DeepCopy(), FeatureFlags: featureFlags.(*FeatureFlags).DeepCopy(), Metrics: metrics.(*Metrics).DeepCopy(), + Tracing: tracing.(*Tracing).DeepCopy(), SpireConfig: spireconfig.(*sc.SpireConfig).DeepCopy(), Events: events.(*Events).DeepCopy(), } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/tracing.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/tracing.go new file mode 100644 index 000000000..d8685b758 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/tracing.go @@ -0,0 +1,94 @@ +/* +Copyright 2023 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package config + +import ( + "fmt" + "os" + "strconv" + + corev1 "k8s.io/api/core/v1" +) + +const ( + // tracingEnabledKey is the configmap key which determines if tracing is enabled + tracingEnabledKey = "enabled" + // tracingEndpintKey is the configmap key for tracing api endpoint + tracingEndpointKey = "endpoint" + + // DefaultEndpoint is the default destination for sending traces + DefaultEndpoint = "http://jaeger-collector.jaeger.svc.cluster.local:14268/api/traces" +) + +// DefaultTracing holds all the default configurations for tracing +var DefaultTracing, _ = newTracingFromMap(map[string]string{}) + +// Tracing holds the configurations for tracing +// +k8s:deepcopy-gen=true +type Tracing struct { + Enabled bool + Endpoint string +} + +// Equals returns true if two Configs are identical +func (cfg *Tracing) Equals(other *Tracing) bool { + if cfg == nil && other == nil { + return true + } + + if cfg == nil || other == nil { + return false + } + + return other.Enabled == cfg.Enabled && + other.Endpoint == cfg.Endpoint +} + +// GetTracingConfigName returns the name of the configmap containing all +// customizations for tracing +func GetTracingConfigName() string { + if e := os.Getenv("CONFIG_TRACING_NAME"); e != "" { + return e + } + return "config-tracing" +} + +// newTracingFromMap returns a Config given a map from ConfigMap +func newTracingFromMap(config map[string]string) (*Tracing, error) { + t := Tracing{ + Enabled: false, + Endpoint: DefaultEndpoint, + } + + if endpoint, ok := config[tracingEndpointKey]; ok { + t.Endpoint = endpoint + } + + if enabled, ok := config[tracingEnabledKey]; ok { + e, err := strconv.ParseBool(enabled) + if err != nil { + return nil, fmt.Errorf("failed parsing tracing config %q: %w", enabled, err) + } + t.Enabled = e + } + return &t, nil +} + +// NewTracingFromConfigMap returns a Config given a ConfigMap +func NewTracingFromConfigMap(config *corev1.ConfigMap) (*Tracing, error) { + return newTracingFromMap(config.Data) +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/zz_generated.deepcopy.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/zz_generated.deepcopy.go index 0d0bff08f..c688b9ee9 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/config/zz_generated.deepcopy.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/config/zz_generated.deepcopy.go @@ -110,3 +110,19 @@ func (in *Metrics) DeepCopy() *Metrics { in.DeepCopyInto(out) return out } + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Tracing) DeepCopyInto(out *Tracing) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Tracing. +func (in *Tracing) DeepCopy() *Tracing { + if in == nil { + return nil + } + out := new(Tracing) + in.DeepCopyInto(out) + return out +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/constant.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/constant.go index 5fceccbf5..19604205b 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/constant.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/constant.go @@ -18,5 +18,5 @@ package pipeline const ( // TektonReservedAnnotationExpr is the expression we use to filter out reserved key in annotation - TektonReservedAnnotationExpr = "(results.tekton.dev|chains.tekton.dev)/.*" + TektonReservedAnnotationExpr = "(chains.tekton.dev)/.*" ) diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum/checksum.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum/checksum.go new file mode 100644 index 000000000..29cca04f7 --- /dev/null +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum/checksum.go @@ -0,0 +1,57 @@ +package checksum + +import ( + "crypto/sha256" + "encoding/json" + "fmt" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +const ( + // SignatureAnnotation is the key of signature in annotation map + SignatureAnnotation = "tekton.dev/signature" +) + +// PrepareObjectMeta will remove annotations not configured from user side -- "kubectl-client-side-apply" and "kubectl.kubernetes.io/last-applied-configuration" +// (added when an object is created with `kubectl apply`) to avoid verification failure and extract the signature. +// Returns a copy of the input object metadata with the annotations removed and the object's signature, +// if it is present in the metadata. +func PrepareObjectMeta(in metav1.Object) metav1.ObjectMeta { + outMeta := metav1.ObjectMeta{} + + // exclude the fields populated by system. + outMeta.Name = in.GetName() + outMeta.GenerateName = in.GetGenerateName() + outMeta.Namespace = in.GetNamespace() + + if in.GetLabels() != nil { + outMeta.Labels = make(map[string]string) + for k, v := range in.GetLabels() { + outMeta.Labels[k] = v + } + } + + outMeta.Annotations = make(map[string]string) + for k, v := range in.GetAnnotations() { + outMeta.Annotations[k] = v + } + + // exclude the annotations added by other components + delete(outMeta.Annotations, "kubectl-client-side-apply") + delete(outMeta.Annotations, "kubectl.kubernetes.io/last-applied-configuration") + delete(outMeta.Annotations, SignatureAnnotation) + + return outMeta +} + +// ComputeSha256Checksum computes the sha256 checksum of the tekton object. +func ComputeSha256Checksum(obj interface{}) ([]byte, error) { + ts, err := json.Marshal(obj) + if err != nil { + return nil, fmt.Errorf("failed to marshal the object: %w", err) + } + h := sha256.New() + h.Write(ts) + return h.Sum(nil), nil +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/openapi_generated.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/openapi_generated.go index abb49a962..c3011075a 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/openapi_generated.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/openapi_generated.go @@ -1879,11 +1879,23 @@ func schema_pkg_apis_pipeline_v1_PipelineTask(ref common.ReferenceCallback) comm Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.Duration"), }, }, + "pipelineRef": { + SchemaProps: spec.SchemaProps{ + Description: "PipelineRef is a reference to a pipeline definition Note: PipelineRef is in preview mode and not yet supported", + Ref: ref("github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.PipelineRef"), + }, + }, + "pipelineSpec": { + SchemaProps: spec.SchemaProps{ + Description: "PipelineSpec is a specification of a pipeline Note: PipelineSpec is in preview mode and not yet supported", + Ref: ref("github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.PipelineSpec"), + }, + }, }, }, }, Dependencies: []string{ - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.EmbeddedTask", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.Matrix", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.Param", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.TaskRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.WhenExpression", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.WorkspacePipelineTaskBinding", "k8s.io/apimachinery/pkg/apis/meta/v1.Duration"}, + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.EmbeddedTask", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.Matrix", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.Param", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.PipelineRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.PipelineSpec", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.TaskRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.WhenExpression", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1.WorkspacePipelineTaskBinding", "k8s.io/apimachinery/pkg/apis/meta/v1.Duration"}, } } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/param_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/param_types.go index b47e45f72..86fe6ce3e 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/param_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/param_types.go @@ -154,6 +154,29 @@ type Param struct { Value ParamValue `json:"value"` } +// GetVarSubstitutionExpressions extracts all the value between "$(" and ")"" for a Parameter +func (p Param) GetVarSubstitutionExpressions() ([]string, bool) { + var allExpressions []string + switch p.Value.Type { + case ParamTypeArray: + // array type + for _, value := range p.Value.ArrayVal { + allExpressions = append(allExpressions, validateString(value)...) + } + case ParamTypeString: + // string type + allExpressions = append(allExpressions, validateString(p.Value.StringVal)...) + case ParamTypeObject: + // object type + for _, value := range p.Value.ObjectVal { + allExpressions = append(allExpressions, validateString(value)...) + } + default: + return nil, false + } + return allExpressions, len(allExpressions) != 0 +} + // ExtractNames returns a set of unique names func (ps Params) ExtractNames() sets.String { names := sets.String{} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_types.go index e0ce5ec67..25613f1a4 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_types.go @@ -18,6 +18,7 @@ package v1 import ( "github.com/tektoncd/pipeline/pkg/apis/pipeline" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum" "github.com/tektoncd/pipeline/pkg/reconciler/pipeline/dag" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" @@ -70,6 +71,27 @@ func (*Pipeline) GetGroupVersionKind() schema.GroupVersionKind { return SchemeGroupVersion.WithKind(pipeline.PipelineControllerName) } +// Checksum computes the sha256 checksum of the pipeline object. +// Prior to computing the checksum, it performs some preprocessing on the +// metadata of the object where it removes system provided annotations. +// Only the name, namespace, generateName, user-provided labels and annotations +// and the pipelineSpec are included for the checksum computation. +func (p *Pipeline) Checksum() ([]byte, error) { + objectMeta := checksum.PrepareObjectMeta(p) + preprocessedPipeline := Pipeline{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "tekton.dev/v1", + Kind: "Pipeline"}, + ObjectMeta: objectMeta, + Spec: p.Spec, + } + sha256Checksum, err := checksum.ComputeSha256Checksum(preprocessedPipeline) + if err != nil { + return nil, err + } + return sha256Checksum, nil +} + // PipelineSpec defines the desired state of Pipeline. type PipelineSpec struct { // DisplayName is a user-facing name of the pipeline that may be @@ -206,6 +228,16 @@ type PipelineTask struct { // Refer Go's ParseDuration documentation for expected format: https://golang.org/pkg/time/#ParseDuration // +optional Timeout *metav1.Duration `json:"timeout,omitempty"` + + // PipelineRef is a reference to a pipeline definition + // Note: PipelineRef is in preview mode and not yet supported + // +optional + PipelineRef *PipelineRef `json:"pipelineRef,omitempty"` + + // PipelineSpec is a specification of a pipeline + // Note: PipelineSpec is in preview mode and not yet supported + // +optional + PipelineSpec *PipelineSpec `json:"pipelineSpec,omitempty"` } // IsCustomTask checks whether an embedded TaskSpec is a Custom Task @@ -298,3 +330,15 @@ type PipelineList struct { metav1.ListMeta `json:"metadata,omitempty"` Items []Pipeline `json:"items"` } + +// GetVarSubstitutionExpressions extracts all the value between "$(" and ")"" for a PipelineResult +func (result PipelineResult) GetVarSubstitutionExpressions() ([]string, bool) { + allExpressions := validateString(result.Value.StringVal) + for _, v := range result.Value.ArrayVal { + allExpressions = append(allExpressions, validateString(v)...) + } + for _, v := range result.Value.ObjectVal { + allExpressions = append(allExpressions, validateString(v)...) + } + return allExpressions, len(allExpressions) != 0 +} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_validation.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_validation.go index e1a7464c3..71b002d9b 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_validation.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipeline_validation.go @@ -37,6 +37,13 @@ import ( var _ apis.Validatable = (*Pipeline)(nil) var _ resourcesemantics.VerbLimited = (*Pipeline)(nil) +const ( + taskRef = "taskRef" + taskSpec = "taskSpec" + pipelineRef = "pipelineRef" + pipelineSpec = "pipelineSpec" +) + // SupportedVerbs returns the operations that validation should be called for func (p *Pipeline) SupportedVerbs() []admissionregistrationv1.OperationType { return []admissionregistrationv1.OperationType{admissionregistrationv1.Create, admissionregistrationv1.Update} @@ -195,7 +202,7 @@ func (pt PipelineTask) ValidateName() *apis.FieldError { // Validate classifies whether a task is a custom task or a regular task(dag/final) // calls the validation routine based on the type of the task func (pt PipelineTask) Validate(ctx context.Context) (errs *apis.FieldError) { - errs = errs.Also(pt.validateRefOrSpec()) + errs = errs.Also(pt.validateRefOrSpec(ctx)) errs = errs.Also(pt.validateEmbeddedOrType()) // taskKinds contains the kinds when the apiVersion is not set, they are not custom tasks, @@ -289,15 +296,39 @@ func (pt *PipelineTask) validateWorkspaces(workspaceNames sets.String) (errs *ap return errs } -// validateRefOrSpec validates at least one of taskRef or taskSpec is specified -func (pt PipelineTask) validateRefOrSpec() (errs *apis.FieldError) { - // can't have both taskRef and taskSpec at the same time - if pt.TaskRef != nil && pt.TaskSpec != nil { - errs = errs.Also(apis.ErrMultipleOneOf("taskRef", "taskSpec")) +// validateRefOrSpec validates at least one of taskRef or taskSpec or pipelineRef or pipelineSpec is specified +func (pt PipelineTask) validateRefOrSpec(ctx context.Context) (errs *apis.FieldError) { + // collect all the specified specifications + nonNilFields := []string{} + if pt.TaskRef != nil { + nonNilFields = append(nonNilFields, taskRef) + } + if pt.TaskSpec != nil { + nonNilFields = append(nonNilFields, taskSpec) + } + if pt.PipelineRef != nil { + errs = errs.Also(version.ValidateEnabledAPIFields(ctx, pipelineRef, config.AlphaAPIFields)) + nonNilFields = append(nonNilFields, pipelineRef) } - // Check that one of TaskRef and TaskSpec is present - if pt.TaskRef == nil && pt.TaskSpec == nil { - errs = errs.Also(apis.ErrMissingOneOf("taskRef", "taskSpec")) + if pt.PipelineSpec != nil { + errs = errs.Also(version.ValidateEnabledAPIFields(ctx, pipelineSpec, config.AlphaAPIFields)) + nonNilFields = append(nonNilFields, pipelineSpec) + } + + // check the length of nonNilFields + // if one of taskRef or taskSpec or pipelineRef or pipelineSpec is specified, + // the length of nonNilFields should exactly be 1 + if len(nonNilFields) > 1 { + errs = errs.Also(apis.ErrGeneric("expected exactly one, got multiple", nonNilFields...)) + } else if len(nonNilFields) == 0 { + cfg := config.FromContextOrDefaults(ctx) + // check for TaskRef or TaskSpec or PipelineRef or PipelineSpec with alpha feature flag + if cfg.FeatureFlags.EnableAPIFields == config.AlphaAPIFields { + errs = errs.Also(apis.ErrMissingOneOf(taskRef, taskSpec, pipelineRef, pipelineSpec)) + } else { + // check for taskRef and taskSpec with beta/stable feature flag + errs = errs.Also(apis.ErrMissingOneOf(taskRef, taskSpec)) + } } return errs } @@ -323,10 +354,16 @@ func (pt PipelineTask) validateCustomTask() (errs *apis.FieldError) { func (pt PipelineTask) validateTask(ctx context.Context) (errs *apis.FieldError) { // Validate TaskSpec if it's present if pt.TaskSpec != nil { - errs = errs.Also(pt.TaskSpec.Validate(ctx).ViaField("taskSpec")) + errs = errs.Also(pt.TaskSpec.Validate(ctx).ViaField(taskSpec)) } if pt.TaskRef != nil { - errs = errs.Also(pt.TaskRef.Validate(ctx).ViaField("taskRef")) + errs = errs.Also(pt.TaskRef.Validate(ctx).ViaField(taskRef)) + } + if pt.PipelineRef != nil { + errs = errs.Also(pt.PipelineRef.Validate(ctx).ViaField(pipelineRef)) + } + if pt.PipelineSpec != nil { + errs = errs.Also(pt.PipelineSpec.Validate(ctx).ViaField(pipelineSpec)) } return errs } @@ -493,7 +530,7 @@ func validateExecutionStatusVariablesInFinally(tasksNames sets.String, finally [ func (pt *PipelineTask) validateExecutionStatusVariablesDisallowed() (errs *apis.FieldError) { for _, param := range pt.Params { - if expressions, ok := GetVarSubstitutionExpressionsForParam(param); ok { + if expressions, ok := param.GetVarSubstitutionExpressions(); ok { errs = errs.Also(validateContainsExecutionStatusVariablesDisallowed(expressions, "value"). ViaFieldKey("params", param.Name)) } @@ -509,7 +546,7 @@ func (pt *PipelineTask) validateExecutionStatusVariablesDisallowed() (errs *apis func (pt *PipelineTask) validateExecutionStatusVariablesAllowed(ptNames sets.String) (errs *apis.FieldError) { for _, param := range pt.Params { - if expressions, ok := GetVarSubstitutionExpressionsForParam(param); ok { + if expressions, ok := param.GetVarSubstitutionExpressions(); ok { errs = errs.Also(validateExecutionStatusVariablesExpressions(expressions, ptNames, "value"). ViaFieldKey("params", param.Name)) } @@ -588,7 +625,7 @@ func validatePipelineResults(results []PipelineResult, tasks []PipelineTask, fin pipelineTaskNames := getPipelineTasksNames(tasks) pipelineFinallyTaskNames := getPipelineTasksNames(finally) for idx, result := range results { - expressions, ok := GetVarSubstitutionExpressionsForPipelineResult(result) + expressions, ok := result.GetVarSubstitutionExpressions() if !ok { errs = errs.Also(apis.ErrInvalidValue("expected pipeline results to be task result expressions but no expressions were found", "value").ViaFieldIndex("results", idx)) @@ -676,7 +713,7 @@ func validateFinalTasks(tasks []PipelineTask, finalTasks []PipelineTask) (errs * func validateTaskResultReferenceInFinallyTasks(finalTasks []PipelineTask, ts sets.String, fts sets.String) (errs *apis.FieldError) { for idx, t := range finalTasks { for _, p := range t.Params { - if expressions, ok := GetVarSubstitutionExpressionsForParam(p); ok { + if expressions, ok := p.GetVarSubstitutionExpressions(); ok { errs = errs.Also(validateResultsVariablesExpressionsInFinally(expressions, ts, fts, "value").ViaFieldKey( "params", p.Name).ViaFieldIndex("finally", idx)) } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_types.go index 88120472e..fda0168c0 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_types.go @@ -351,6 +351,62 @@ const ( // PipelineRunReasonStoppedRunningFinally indicates that pipeline has been gracefully stopped // and no new Tasks will be scheduled by the controller, but final tasks are now running PipelineRunReasonStoppedRunningFinally PipelineRunReason = "StoppedRunningFinally" + // ReasonCouldntGetPipeline indicates that the reason for the failure status is that the + // associated Pipeline couldn't be retrieved + PipelineRunReasonCouldntGetPipeline PipelineRunReason = "CouldntGetPipeline" + // ReasonInvalidBindings indicates that the reason for the failure status is that the + // PipelineResources bound in the PipelineRun didn't match those declared in the Pipeline + PipelineRunReasonInvalidBindings PipelineRunReason = "InvalidPipelineResourceBindings" + // ReasonInvalidWorkspaceBinding indicates that a Pipeline expects a workspace but a + // PipelineRun has provided an invalid binding. + PipelineRunReasonInvalidWorkspaceBinding PipelineRunReason = "InvalidWorkspaceBindings" + // ReasonInvalidTaskRunSpec indicates that PipelineRun.Spec.TaskRunSpecs[].PipelineTaskName is defined with + // a not exist taskName in pipelineSpec. + PipelineRunReasonInvalidTaskRunSpec PipelineRunReason = "InvalidTaskRunSpecs" + // ReasonParameterTypeMismatch indicates that the reason for the failure status is that + // parameter(s) declared in the PipelineRun do not have the some declared type as the + // parameters(s) declared in the Pipeline that they are supposed to override. + PipelineRunReasonParameterTypeMismatch PipelineRunReason = "ParameterTypeMismatch" + // ReasonObjectParameterMissKeys indicates that the object param value provided from PipelineRun spec + // misses some keys required for the object param declared in Pipeline spec. + PipelineRunReasonObjectParameterMissKeys PipelineRunReason = "ObjectParameterMissKeys" + // ReasonParamArrayIndexingInvalid indicates that the use of param array indexing is not under correct api fields feature gate + // or the array is out of bound. + PipelineRunReasonParamArrayIndexingInvalid PipelineRunReason = "ParamArrayIndexingInvalid" + // ReasonCouldntGetTask indicates that the reason for the failure status is that the + // associated Pipeline's Tasks couldn't all be retrieved + PipelineRunReasonCouldntGetTask PipelineRunReason = "CouldntGetTask" + // ReasonParameterMissing indicates that the reason for the failure status is that the + // associated PipelineRun didn't provide all the required parameters + PipelineRunReasonParameterMissing PipelineRunReason = "ParameterMissing" + // ReasonFailedValidation indicates that the reason for failure status is + // that pipelinerun failed runtime validation + PipelineRunReasonFailedValidation PipelineRunReason = "PipelineValidationFailed" + // ReasonInvalidGraph indicates that the reason for the failure status is that the + // associated Pipeline is an invalid graph (a.k.a wrong order, cycle, …) + PipelineRunReasonInvalidGraph PipelineRunReason = "PipelineInvalidGraph" + // ReasonCouldntCancel indicates that a PipelineRun was cancelled but attempting to update + // all of the running TaskRuns as cancelled failed. + PipelineRunReasonCouldntCancel PipelineRunReason = "PipelineRunCouldntCancel" + // ReasonCouldntTimeOut indicates that a PipelineRun was timed out but attempting to update + // all of the running TaskRuns as timed out failed. + PipelineRunReasonCouldntTimeOut PipelineRunReason = "PipelineRunCouldntTimeOut" + // ReasonInvalidMatrixParameterTypes indicates a matrix contains invalid parameter types + PipelineRunReasonInvalidMatrixParameterTypes PipelineRunReason = "InvalidMatrixParameterTypes" + // ReasonInvalidTaskResultReference indicates a task result was declared + // but was not initialized by that task + PipelineRunReasonInvalidTaskResultReference PipelineRunReason = "InvalidTaskResultReference" + // ReasonRequiredWorkspaceMarkedOptional indicates an optional workspace + // has been passed to a Task that is expecting a non-optional workspace + PipelineRunReasonRequiredWorkspaceMarkedOptional PipelineRunReason = "RequiredWorkspaceMarkedOptional" + // ReasonResolvingPipelineRef indicates that the PipelineRun is waiting for + // its pipelineRef to be asynchronously resolved. + PipelineRunReasonResolvingPipelineRef PipelineRunReason = "ResolvingPipelineRef" + // ReasonResourceVerificationFailed indicates that the pipeline fails the trusted resource verification, + // it could be the content has changed, signature is invalid or public key is invalid + PipelineRunReasonResourceVerificationFailed PipelineRunReason = "ResourceVerificationFailed" + // ReasonCreateRunFailed indicates that the pipeline fails to create the taskrun or other run resources + PipelineRunReasonCreateRunFailed PipelineRunReason = "CreateRunFailed" ) func (t PipelineRunReason) String() string { diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_validation.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_validation.go index e6c7299c8..dc95f47cc 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_validation.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/pipelinerun_validation.go @@ -67,6 +67,11 @@ func (ps *PipelineRunSpec) Validate(ctx context.Context) (errs *apis.FieldError) // Validate PipelineSpec if it's present if ps.PipelineSpec != nil { errs = errs.Also(ps.PipelineSpec.Validate(ctx).ViaField("pipelineSpec")) + // Validate beta fields separately for inline Pipeline definitions. + // This prevents validation from failing in the reconciler when a Pipeline is converted to a different API version. + // See https://github.com/tektoncd/pipeline/issues/6616 for more information. + // TODO(#6592): Decouple API versioning from feature versioning + errs = errs.Also(ps.PipelineSpec.ValidateBetaFields(ctx).ViaField("pipelineSpec")) } // Validate PipelineRun parameters @@ -109,6 +114,10 @@ func (ps *PipelineRunSpec) Validate(ctx context.Context) (errs *apis.FieldError) errs = errs.Also(validateTaskRunSpec(ctx, trs).ViaIndex(idx).ViaField("taskRunSpecs")) } + if ps.TaskRunTemplate.PodTemplate != nil { + errs = errs.Also(validatePodTemplateEnv(ctx, *ps.TaskRunTemplate.PodTemplate).ViaField("taskRunTemplate")) + } + return errs } @@ -122,7 +131,7 @@ func (ps *PipelineRunSpec) validatePipelineRunParameters(ctx context.Context) (e // Validate that task results aren't used in param values for _, param := range ps.Params { - expressions, ok := GetVarSubstitutionExpressionsForParam(param) + expressions, ok := param.GetVarSubstitutionExpressions() if ok { if LooksLikeContainsResultRefs(expressions) { expressions = filter(expressions, looksLikeResultRef) diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/resultref.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/resultref.go index 5a7610e03..14b0de17f 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/resultref.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/resultref.go @@ -105,41 +105,6 @@ func looksLikeResultRef(expression string) bool { return len(subExpressions) >= 4 && (subExpressions[0] == ResultTaskPart || subExpressions[0] == ResultFinallyPart) && subExpressions[2] == ResultResultPart } -// GetVarSubstitutionExpressionsForParam extracts all the value between "$(" and ")"" for a parameter -func GetVarSubstitutionExpressionsForParam(param Param) ([]string, bool) { - var allExpressions []string - switch param.Value.Type { - case ParamTypeArray: - // array type - for _, value := range param.Value.ArrayVal { - allExpressions = append(allExpressions, validateString(value)...) - } - case ParamTypeString: - // string type - allExpressions = append(allExpressions, validateString(param.Value.StringVal)...) - case ParamTypeObject: - // object type - for _, value := range param.Value.ObjectVal { - allExpressions = append(allExpressions, validateString(value)...) - } - default: - return nil, false - } - return allExpressions, len(allExpressions) != 0 -} - -// GetVarSubstitutionExpressionsForPipelineResult extracts all the value between "$(" and ")"" for a pipeline result -func GetVarSubstitutionExpressionsForPipelineResult(result PipelineResult) ([]string, bool) { - allExpressions := validateString(result.Value.StringVal) - for _, v := range result.Value.ArrayVal { - allExpressions = append(allExpressions, validateString(v)...) - } - for _, v := range result.Value.ObjectVal { - allExpressions = append(allExpressions, validateString(v)...) - } - return allExpressions, len(allExpressions) != 0 -} - func validateString(value string) []string { expressions := VariableSubstitutionRegex.FindAllString(value, -1) if expressions == nil { @@ -208,7 +173,7 @@ func ParseResultName(resultName string) (string, string) { func PipelineTaskResultRefs(pt *PipelineTask) []*ResultRef { refs := []*ResultRef{} for _, p := range pt.extractAllParams() { - expressions, _ := GetVarSubstitutionExpressionsForParam(p) + expressions, _ := p.GetVarSubstitutionExpressions() refs = append(refs, NewResultRefs(expressions)...) } for _, whenExpression := range pt.When { diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/swagger.json b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/swagger.json index 5a71f5f50..e2f82a647 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/swagger.json +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/swagger.json @@ -894,6 +894,14 @@ }, "x-kubernetes-list-type": "atomic" }, + "pipelineRef": { + "description": "PipelineRef is a reference to a pipeline definition Note: PipelineRef is in preview mode and not yet supported", + "$ref": "#/definitions/v1.PipelineRef" + }, + "pipelineSpec": { + "description": "PipelineSpec is a specification of a pipeline Note: PipelineSpec is in preview mode and not yet supported", + "$ref": "#/definitions/v1.PipelineSpec" + }, "retries": { "description": "Retries represents how many times this task should be retried in case of task failure: ConditionSucceeded set to False", "type": "integer", diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/task_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/task_types.go index 9a46de41b..894590508 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/task_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/task_types.go @@ -18,6 +18,7 @@ package v1 import ( "github.com/tektoncd/pipeline/pkg/apis/pipeline" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" @@ -52,6 +53,27 @@ func (*Task) GetGroupVersionKind() schema.GroupVersionKind { return SchemeGroupVersion.WithKind(pipeline.TaskControllerName) } +// Checksum computes the sha256 checksum of the task object. +// Prior to computing the checksum, it performs some preprocessing on the +// metadata of the object where it removes system provided annotations. +// Only the name, namespace, generateName, user-provided labels and annotations +// and the taskSpec are included for the checksum computation. +func (t *Task) Checksum() ([]byte, error) { + objectMeta := checksum.PrepareObjectMeta(t) + preprocessedTask := Task{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "tekton.dev/v1", + Kind: "Task"}, + ObjectMeta: objectMeta, + Spec: t.Spec, + } + sha256Checksum, err := checksum.ComputeSha256Checksum(preprocessedTask) + if err != nil { + return nil, err + } + return sha256Checksum, nil +} + // TaskSpec defines the desired state of Task. type TaskSpec struct { // Params is a list of input parameters required to run the task. Params diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/taskrun_validation.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/taskrun_validation.go index 7fb04691e..c818a5760 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/taskrun_validation.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/taskrun_validation.go @@ -63,6 +63,11 @@ func (ts *TaskRunSpec) Validate(ctx context.Context) (errs *apis.FieldError) { // Validate TaskSpec if it's present. if ts.TaskSpec != nil { errs = errs.Also(ts.TaskSpec.Validate(ctx).ViaField("taskSpec")) + // Validate beta fields separately for inline Task definitions. + // This prevents validation from failing in the reconciler when a Task is converted to a different API version. + // See https://github.com/tektoncd/pipeline/issues/6616 for more information. + // TODO(#6592): Decouple API versioning from feature versioning + errs = errs.Also(ts.TaskSpec.ValidateBetaFields(ctx).ViaField("taskSpec")) } errs = errs.Also(ValidateParameters(ctx, ts.Params).ViaField("params")) diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/zz_generated.deepcopy.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/zz_generated.deepcopy.go index bd2f039f6..484d08a43 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/zz_generated.deepcopy.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1/zz_generated.deepcopy.go @@ -777,6 +777,16 @@ func (in *PipelineTask) DeepCopyInto(out *PipelineTask) { *out = new(metav1.Duration) **out = **in } + if in.PipelineRef != nil { + in, out := &in.PipelineRef, &out.PipelineRef + *out = new(PipelineRef) + (*in).DeepCopyInto(*out) + } + if in.PipelineSpec != nil { + in, out := &in.PipelineSpec, &out.PipelineSpec + *out = new(PipelineSpec) + (*in).DeepCopyInto(*out) + } return } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/openapi_generated.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/openapi_generated.go index ce92c8d8c..71844dd01 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/openapi_generated.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/openapi_generated.go @@ -2636,11 +2636,23 @@ func schema_pkg_apis_pipeline_v1beta1_PipelineTask(ref common.ReferenceCallback) Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.Duration"), }, }, + "pipelineRef": { + SchemaProps: spec.SchemaProps{ + Description: "PipelineRef is a reference to a pipeline definition Note: PipelineRef is in preview mode and not yet supported", + Ref: ref("github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineRef"), + }, + }, + "pipelineSpec": { + SchemaProps: spec.SchemaProps{ + Description: "PipelineSpec is a specification of a pipeline Note: PipelineSpec is in preview mode and not yet supported", + Ref: ref("github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineSpec"), + }, + }, }, }, }, Dependencies: []string{ - "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.EmbeddedTask", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.Matrix", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.Param", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineTaskResources", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.TaskRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.WhenExpression", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.WorkspacePipelineTaskBinding", "k8s.io/apimachinery/pkg/apis/meta/v1.Duration"}, + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.EmbeddedTask", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.Matrix", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.Param", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineSpec", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.PipelineTaskResources", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.TaskRef", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.WhenExpression", "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1.WorkspacePipelineTaskBinding", "k8s.io/apimachinery/pkg/apis/meta/v1.Duration"}, } } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_types.go index 4426d5ddb..bbe837a94 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_types.go @@ -18,6 +18,7 @@ package v1beta1 import ( "github.com/tektoncd/pipeline/pkg/apis/pipeline" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum" "github.com/tektoncd/pipeline/pkg/reconciler/pipeline/dag" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" @@ -77,6 +78,27 @@ func (*Pipeline) GetGroupVersionKind() schema.GroupVersionKind { return SchemeGroupVersion.WithKind(pipeline.PipelineControllerName) } +// Checksum computes the sha256 checksum of the task object. +// Prior to computing the checksum, it performs some preprocessing on the +// metadata of the object where it removes system provided annotations. +// Only the name, namespace, generateName, user-provided labels and annotations +// and the pipelineSpec are included for the checksum computation. +func (p *Pipeline) Checksum() ([]byte, error) { + objectMeta := checksum.PrepareObjectMeta(p) + preprocessedPipeline := Pipeline{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "tekton.dev/v1beta1", + Kind: "Pipeline"}, + ObjectMeta: objectMeta, + Spec: p.Spec, + } + sha256Checksum, err := checksum.ComputeSha256Checksum(preprocessedPipeline) + if err != nil { + return nil, err + } + return sha256Checksum, nil +} + // PipelineSpec defines the desired state of Pipeline. type PipelineSpec struct { // DisplayName is a user-facing name of the pipeline that may be @@ -220,6 +242,16 @@ type PipelineTask struct { // Refer Go's ParseDuration documentation for expected format: https://golang.org/pkg/time/#ParseDuration // +optional Timeout *metav1.Duration `json:"timeout,omitempty"` + + // PipelineRef is a reference to a pipeline definition + // Note: PipelineRef is in preview mode and not yet supported + // +optional + PipelineRef *PipelineRef `json:"pipelineRef,omitempty"` + + // PipelineSpec is a specification of a pipeline + // Note: PipelineSpec is in preview mode and not yet supported + // +optional + PipelineSpec *PipelineSpec `json:"pipelineSpec,omitempty"` } // IsCustomTask checks whether an embedded TaskSpec is a Custom Task diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_validation.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_validation.go index 6411d3f09..7529c273b 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_validation.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/pipeline_validation.go @@ -38,6 +38,13 @@ import ( var _ apis.Validatable = (*Pipeline)(nil) var _ resourcesemantics.VerbLimited = (*Pipeline)(nil) +const ( + taskRef = "taskRef" + taskSpec = "taskSpec" + pipelineRef = "pipelineRef" + pipelineSpec = "pipelineSpec" +) + // SupportedVerbs returns the operations that validation should be called for func (p *Pipeline) SupportedVerbs() []admissionregistrationv1.OperationType { return []admissionregistrationv1.OperationType{admissionregistrationv1.Create, admissionregistrationv1.Update} @@ -139,7 +146,7 @@ func (pt PipelineTask) ValidateName() *apis.FieldError { // Validate classifies whether a task is a custom task, bundle, or a regular task(dag/final) // calls the validation routine based on the type of the task func (pt PipelineTask) Validate(ctx context.Context) (errs *apis.FieldError) { - errs = errs.Also(pt.validateRefOrSpec()) + errs = errs.Also(pt.validateRefOrSpec(ctx)) errs = errs.Also(pt.validateEmbeddedOrType()) @@ -241,15 +248,39 @@ func (pt *PipelineTask) validateWorkspaces(workspaceNames sets.String) (errs *ap return errs } -// validateRefOrSpec validates at least one of taskRef or taskSpec is specified -func (pt PipelineTask) validateRefOrSpec() (errs *apis.FieldError) { - // can't have both taskRef and taskSpec at the same time - if pt.TaskRef != nil && pt.TaskSpec != nil { - errs = errs.Also(apis.ErrMultipleOneOf("taskRef", "taskSpec")) +// validateRefOrSpec validates at least one of taskRef or taskSpec or pipelineRef or pipelineSpec is specified +func (pt PipelineTask) validateRefOrSpec(ctx context.Context) (errs *apis.FieldError) { + // collect all the specified specifications + nonNilFields := []string{} + if pt.TaskRef != nil { + nonNilFields = append(nonNilFields, taskRef) } - // Check that one of TaskRef and TaskSpec is present - if pt.TaskRef == nil && pt.TaskSpec == nil { - errs = errs.Also(apis.ErrMissingOneOf("taskRef", "taskSpec")) + if pt.TaskSpec != nil { + nonNilFields = append(nonNilFields, taskSpec) + } + if pt.PipelineRef != nil { + errs = errs.Also(version.ValidateEnabledAPIFields(ctx, pipelineRef, config.AlphaAPIFields)) + nonNilFields = append(nonNilFields, pipelineRef) + } + if pt.PipelineSpec != nil { + errs = errs.Also(version.ValidateEnabledAPIFields(ctx, pipelineSpec, config.AlphaAPIFields)) + nonNilFields = append(nonNilFields, pipelineSpec) + } + + // check the length of nonNilFields + // if one of taskRef or taskSpec or pipelineRef or pipelineSpec is specified, + // the length of nonNilFields should exactly be 1 + if len(nonNilFields) > 1 { + errs = errs.Also(apis.ErrGeneric("expected exactly one, got multiple", nonNilFields...)) + } else if len(nonNilFields) == 0 { + cfg := config.FromContextOrDefaults(ctx) + // check for TaskRef or TaskSpec or PipelineRef or PipelineSpec with alpha feature flag + if cfg.FeatureFlags.EnableAPIFields == config.AlphaAPIFields { + errs = errs.Also(apis.ErrMissingOneOf(taskRef, taskSpec, pipelineRef, pipelineSpec)) + } else { + // check for taskRef and taskSpec with beta/stable feature flag + errs = errs.Also(apis.ErrMissingOneOf(taskRef, taskSpec)) + } } return errs } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/swagger.json b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/swagger.json index 2bbb8926d..47aafa9a0 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/swagger.json +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/swagger.json @@ -1287,6 +1287,14 @@ }, "x-kubernetes-list-type": "atomic" }, + "pipelineRef": { + "description": "PipelineRef is a reference to a pipeline definition Note: PipelineRef is in preview mode and not yet supported", + "$ref": "#/definitions/v1beta1.PipelineRef" + }, + "pipelineSpec": { + "description": "PipelineSpec is a specification of a pipeline Note: PipelineSpec is in preview mode and not yet supported", + "$ref": "#/definitions/v1beta1.PipelineSpec" + }, "resources": { "description": "Deprecated: Unused, preserved only for backwards compatibility", "$ref": "#/definitions/v1beta1.PipelineTaskResources" diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/task_types.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/task_types.go index 855309256..2961ade3b 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/task_types.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/task_types.go @@ -18,6 +18,7 @@ package v1beta1 import ( "github.com/tektoncd/pipeline/pkg/apis/pipeline" + "github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" @@ -68,6 +69,27 @@ func (*Task) GetGroupVersionKind() schema.GroupVersionKind { return SchemeGroupVersion.WithKind(pipeline.TaskControllerName) } +// Checksum computes the sha256 checksum of the task object. +// Prior to computing the checksum, it performs some preprocessing on the +// metadata of the object where it removes system provided annotations. +// Only the name, namespace, generateName, user-provided labels and annotations +// and the taskSpec are included for the checksum computation. +func (t *Task) Checksum() ([]byte, error) { + objectMeta := checksum.PrepareObjectMeta(t) + preprocessedTask := Task{ + TypeMeta: metav1.TypeMeta{ + APIVersion: "tekton.dev/v1beta1", + Kind: "Task"}, + ObjectMeta: objectMeta, + Spec: t.Spec, + } + sha256Checksum, err := checksum.ComputeSha256Checksum(preprocessedTask) + if err != nil { + return nil, err + } + return sha256Checksum, nil +} + // TaskSpec defines the desired state of Task. type TaskSpec struct { // Resources is a list input and output resource to run the task diff --git a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go index fdd028257..c158be177 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1/zz_generated.deepcopy.go @@ -1175,6 +1175,16 @@ func (in *PipelineTask) DeepCopyInto(out *PipelineTask) { *out = new(v1.Duration) **out = **in } + if in.PipelineRef != nil { + in, out := &in.PipelineRef, &out.PipelineRef + *out = new(PipelineRef) + (*in).DeepCopyInto(*out) + } + if in.PipelineSpec != nil { + in, out := &in.PipelineSpec, &out.PipelineSpec + *out = new(PipelineSpec) + (*in).DeepCopyInto(*out) + } return } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/clientset/versioned/clientset.go b/vendor/github.com/tektoncd/pipeline/pkg/client/clientset/versioned/clientset.go index 081e93512..56848e8d5 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/clientset/versioned/clientset.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/clientset/versioned/clientset.go @@ -37,8 +37,7 @@ type Interface interface { TektonV1() tektonv1.TektonV1Interface } -// Clientset contains the clients for groups. Each group has exactly one -// version included in a Clientset. +// Clientset contains the clients for groups. type Clientset struct { *discovery.DiscoveryClient tektonV1alpha1 *tektonv1alpha1.TektonV1alpha1Client diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/informers/externalversions/factory.go b/vendor/github.com/tektoncd/pipeline/pkg/client/informers/externalversions/factory.go index 691cee85d..b0cd64175 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/informers/externalversions/factory.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/informers/externalversions/factory.go @@ -47,6 +47,11 @@ type sharedInformerFactory struct { // startedInformers is used for tracking which informers have been started. // This allows Start() to be called multiple times safely. startedInformers map[reflect.Type]bool + // wg tracks how many goroutines were started. + wg sync.WaitGroup + // shuttingDown is true when Shutdown has been called. It may still be running + // because it needs to wait for goroutines. + shuttingDown bool } // WithCustomResyncConfig sets a custom resync period for the specified informer types. @@ -107,20 +112,39 @@ func NewSharedInformerFactoryWithOptions(client versioned.Interface, defaultResy return factory } -// Start initializes all requested informers. func (f *sharedInformerFactory) Start(stopCh <-chan struct{}) { f.lock.Lock() defer f.lock.Unlock() + if f.shuttingDown { + return + } + for informerType, informer := range f.informers { if !f.startedInformers[informerType] { - go informer.Run(stopCh) + f.wg.Add(1) + // We need a new variable in each loop iteration, + // otherwise the goroutine would use the loop variable + // and that keeps changing. + informer := informer + go func() { + defer f.wg.Done() + informer.Run(stopCh) + }() f.startedInformers[informerType] = true } } } -// WaitForCacheSync waits for all started informers' cache were synced. +func (f *sharedInformerFactory) Shutdown() { + f.lock.Lock() + f.shuttingDown = true + f.lock.Unlock() + + // Will return immediately if there is nothing to wait for. + f.wg.Wait() +} + func (f *sharedInformerFactory) WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool { informers := func() map[reflect.Type]cache.SharedIndexInformer { f.lock.Lock() @@ -167,11 +191,58 @@ func (f *sharedInformerFactory) InformerFor(obj runtime.Object, newFunc internal // SharedInformerFactory provides shared informers for resources in all known // API group versions. +// +// It is typically used like this: +// +// ctx, cancel := context.Background() +// defer cancel() +// factory := NewSharedInformerFactory(client, resyncPeriod) +// defer factory.WaitForStop() // Returns immediately if nothing was started. +// genericInformer := factory.ForResource(resource) +// typedInformer := factory.SomeAPIGroup().V1().SomeType() +// factory.Start(ctx.Done()) // Start processing these informers. +// synced := factory.WaitForCacheSync(ctx.Done()) +// for v, ok := range synced { +// if !ok { +// fmt.Fprintf(os.Stderr, "caches failed to sync: %v", v) +// return +// } +// } +// +// // Creating informers can also be created after Start, but then +// // Start must be called again: +// anotherGenericInformer := factory.ForResource(resource) +// factory.Start(ctx.Done()) type SharedInformerFactory interface { internalinterfaces.SharedInformerFactory - ForResource(resource schema.GroupVersionResource) (GenericInformer, error) + + // Start initializes all requested informers. They are handled in goroutines + // which run until the stop channel gets closed. + Start(stopCh <-chan struct{}) + + // Shutdown marks a factory as shutting down. At that point no new + // informers can be started anymore and Start will return without + // doing anything. + // + // In addition, Shutdown blocks until all goroutines have terminated. For that + // to happen, the close channel(s) that they were started with must be closed, + // either before Shutdown gets called or while it is waiting. + // + // Shutdown may be called multiple times, even concurrently. All such calls will + // block until all goroutines have terminated. + Shutdown() + + // WaitForCacheSync blocks until all started informers' caches were synced + // or the stop channel gets closed. WaitForCacheSync(stopCh <-chan struct{}) map[reflect.Type]bool + // ForResource gives generic access to a shared informer of the matching type. + ForResource(resource schema.GroupVersionResource) (GenericInformer, error) + + // InternalInformerFor returns the SharedIndexInformer for obj using an internal + // client. + InformerFor(obj runtime.Object, newFunc internalinterfaces.NewInformerFunc) cache.SharedIndexInformer + Tekton() pipeline.Interface } diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/client/client.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/client/client.go index 67cd130ad..af3642aaa 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/client/client.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/client/client.go @@ -20,28 +20,10 @@ package client import ( context "context" - json "encoding/json" - errors "errors" - fmt "fmt" - pipelinev1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - v1alpha1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1" - v1beta1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1beta1" versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" - typedtektonv1 "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/typed/pipeline/v1" - typedtektonv1alpha1 "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/typed/pipeline/v1alpha1" - typedtektonv1beta1 "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/typed/pipeline/v1beta1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - unstructured "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - runtime "k8s.io/apimachinery/pkg/runtime" - schema "k8s.io/apimachinery/pkg/runtime/schema" - types "k8s.io/apimachinery/pkg/types" - watch "k8s.io/apimachinery/pkg/watch" - discovery "k8s.io/client-go/discovery" - dynamic "k8s.io/client-go/dynamic" rest "k8s.io/client-go/rest" injection "knative.dev/pkg/injection" - dynamicclient "knative.dev/pkg/injection/clients/dynamicclient" logging "knative.dev/pkg/logging" ) @@ -50,7 +32,6 @@ func init() { injection.Default.RegisterClientFetcher(func(ctx context.Context) interface{} { return Get(ctx) }) - injection.Dynamic.RegisterDynamicClient(withClientFromDynamic) } // Key is used as the key for associating information with a context.Context. @@ -60,10 +41,6 @@ func withClientFromConfig(ctx context.Context, cfg *rest.Config) context.Context return context.WithValue(ctx, Key{}, versioned.NewForConfigOrDie(cfg)) } -func withClientFromDynamic(ctx context.Context) context.Context { - return context.WithValue(ctx, Key{}, &wrapClient{dyn: dynamicclient.Get(ctx)}) -} - // Get extracts the versioned.Interface client from the context. func Get(ctx context.Context) versioned.Interface { untyped := ctx.Value(Key{}) @@ -78,1637 +55,3 @@ func Get(ctx context.Context) versioned.Interface { } return untyped.(versioned.Interface) } - -type wrapClient struct { - dyn dynamic.Interface -} - -var _ versioned.Interface = (*wrapClient)(nil) - -func (w *wrapClient) Discovery() discovery.DiscoveryInterface { - panic("Discovery called on dynamic client!") -} - -func convert(from interface{}, to runtime.Object) error { - bs, err := json.Marshal(from) - if err != nil { - return fmt.Errorf("Marshal() = %w", err) - } - if err := json.Unmarshal(bs, to); err != nil { - return fmt.Errorf("Unmarshal() = %w", err) - } - return nil -} - -// TektonV1alpha1 retrieves the TektonV1alpha1Client -func (w *wrapClient) TektonV1alpha1() typedtektonv1alpha1.TektonV1alpha1Interface { - return &wrapTektonV1alpha1{ - dyn: w.dyn, - } -} - -type wrapTektonV1alpha1 struct { - dyn dynamic.Interface -} - -func (w *wrapTektonV1alpha1) RESTClient() rest.Interface { - panic("RESTClient called on dynamic client!") -} - -func (w *wrapTektonV1alpha1) Runs(namespace string) typedtektonv1alpha1.RunInterface { - return &wrapTektonV1alpha1RunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1alpha1", - Resource: "runs", - }), - - namespace: namespace, - } -} - -type wrapTektonV1alpha1RunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1alpha1.RunInterface = (*wrapTektonV1alpha1RunImpl)(nil) - -func (w *wrapTektonV1alpha1RunImpl) Create(ctx context.Context, in *v1alpha1.Run, opts v1.CreateOptions) (*v1alpha1.Run, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "Run", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.Run{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1alpha1RunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1alpha1RunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.Run, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.Run{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.RunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.RunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.Run, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.Run{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) Update(ctx context.Context, in *v1alpha1.Run, opts v1.UpdateOptions) (*v1alpha1.Run, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "Run", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.Run{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) UpdateStatus(ctx context.Context, in *v1alpha1.Run, opts v1.UpdateOptions) (*v1alpha1.Run, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "Run", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.Run{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1RunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1alpha1) VerificationPolicies(namespace string) typedtektonv1alpha1.VerificationPolicyInterface { - return &wrapTektonV1alpha1VerificationPolicyImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1alpha1", - Resource: "verificationpolicies", - }), - - namespace: namespace, - } -} - -type wrapTektonV1alpha1VerificationPolicyImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1alpha1.VerificationPolicyInterface = (*wrapTektonV1alpha1VerificationPolicyImpl)(nil) - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Create(ctx context.Context, in *v1alpha1.VerificationPolicy, opts v1.CreateOptions) (*v1alpha1.VerificationPolicy, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "VerificationPolicy", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicy{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.VerificationPolicy, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicy{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.VerificationPolicyList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicyList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.VerificationPolicy, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicy{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Update(ctx context.Context, in *v1alpha1.VerificationPolicy, opts v1.UpdateOptions) (*v1alpha1.VerificationPolicy, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "VerificationPolicy", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicy{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) UpdateStatus(ctx context.Context, in *v1alpha1.VerificationPolicy, opts v1.UpdateOptions) (*v1alpha1.VerificationPolicy, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1alpha1", - Kind: "VerificationPolicy", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1alpha1.VerificationPolicy{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1alpha1VerificationPolicyImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -// TektonV1beta1 retrieves the TektonV1beta1Client -func (w *wrapClient) TektonV1beta1() typedtektonv1beta1.TektonV1beta1Interface { - return &wrapTektonV1beta1{ - dyn: w.dyn, - } -} - -type wrapTektonV1beta1 struct { - dyn dynamic.Interface -} - -func (w *wrapTektonV1beta1) RESTClient() rest.Interface { - panic("RESTClient called on dynamic client!") -} - -func (w *wrapTektonV1beta1) ClusterTasks() typedtektonv1beta1.ClusterTaskInterface { - return &wrapTektonV1beta1ClusterTaskImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "clustertasks", - }), - } -} - -type wrapTektonV1beta1ClusterTaskImpl struct { - dyn dynamic.NamespaceableResourceInterface -} - -var _ typedtektonv1beta1.ClusterTaskInterface = (*wrapTektonV1beta1ClusterTaskImpl)(nil) - -func (w *wrapTektonV1beta1ClusterTaskImpl) Create(ctx context.Context, in *v1beta1.ClusterTask, opts v1.CreateOptions) (*v1beta1.ClusterTask, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "ClusterTask", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTask{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.ClusterTask, error) { - uo, err := w.dyn.Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTask{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.ClusterTaskList, error) { - uo, err := w.dyn.List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTaskList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.ClusterTask, err error) { - uo, err := w.dyn.Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTask{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) Update(ctx context.Context, in *v1beta1.ClusterTask, opts v1.UpdateOptions) (*v1beta1.ClusterTask, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "ClusterTask", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTask{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) UpdateStatus(ctx context.Context, in *v1beta1.ClusterTask, opts v1.UpdateOptions) (*v1beta1.ClusterTask, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "ClusterTask", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.ClusterTask{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1ClusterTaskImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1beta1) CustomRuns(namespace string) typedtektonv1beta1.CustomRunInterface { - return &wrapTektonV1beta1CustomRunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "customruns", - }), - - namespace: namespace, - } -} - -type wrapTektonV1beta1CustomRunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1beta1.CustomRunInterface = (*wrapTektonV1beta1CustomRunImpl)(nil) - -func (w *wrapTektonV1beta1CustomRunImpl) Create(ctx context.Context, in *v1beta1.CustomRun, opts v1.CreateOptions) (*v1beta1.CustomRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "CustomRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1CustomRunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1CustomRunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.CustomRun, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.CustomRunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.CustomRun, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) Update(ctx context.Context, in *v1beta1.CustomRun, opts v1.UpdateOptions) (*v1beta1.CustomRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "CustomRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) UpdateStatus(ctx context.Context, in *v1beta1.CustomRun, opts v1.UpdateOptions) (*v1beta1.CustomRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "CustomRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.CustomRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1CustomRunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1beta1) Pipelines(namespace string) typedtektonv1beta1.PipelineInterface { - return &wrapTektonV1beta1PipelineImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "pipelines", - }), - - namespace: namespace, - } -} - -type wrapTektonV1beta1PipelineImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1beta1.PipelineInterface = (*wrapTektonV1beta1PipelineImpl)(nil) - -func (w *wrapTektonV1beta1PipelineImpl) Create(ctx context.Context, in *v1beta1.Pipeline, opts v1.CreateOptions) (*v1beta1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1PipelineImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1PipelineImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.Pipeline, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.PipelineList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.Pipeline, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) Update(ctx context.Context, in *v1beta1.Pipeline, opts v1.UpdateOptions) (*v1beta1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) UpdateStatus(ctx context.Context, in *v1beta1.Pipeline, opts v1.UpdateOptions) (*v1beta1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1beta1) PipelineRuns(namespace string) typedtektonv1beta1.PipelineRunInterface { - return &wrapTektonV1beta1PipelineRunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "pipelineruns", - }), - - namespace: namespace, - } -} - -type wrapTektonV1beta1PipelineRunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1beta1.PipelineRunInterface = (*wrapTektonV1beta1PipelineRunImpl)(nil) - -func (w *wrapTektonV1beta1PipelineRunImpl) Create(ctx context.Context, in *v1beta1.PipelineRun, opts v1.CreateOptions) (*v1beta1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1PipelineRunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1PipelineRunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.PipelineRun, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.PipelineRunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.PipelineRun, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) Update(ctx context.Context, in *v1beta1.PipelineRun, opts v1.UpdateOptions) (*v1beta1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) UpdateStatus(ctx context.Context, in *v1beta1.PipelineRun, opts v1.UpdateOptions) (*v1beta1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1PipelineRunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1beta1) Tasks(namespace string) typedtektonv1beta1.TaskInterface { - return &wrapTektonV1beta1TaskImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "tasks", - }), - - namespace: namespace, - } -} - -type wrapTektonV1beta1TaskImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1beta1.TaskInterface = (*wrapTektonV1beta1TaskImpl)(nil) - -func (w *wrapTektonV1beta1TaskImpl) Create(ctx context.Context, in *v1beta1.Task, opts v1.CreateOptions) (*v1beta1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1TaskImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1TaskImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.Task, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.TaskList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.Task, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) Update(ctx context.Context, in *v1beta1.Task, opts v1.UpdateOptions) (*v1beta1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) UpdateStatus(ctx context.Context, in *v1beta1.Task, opts v1.UpdateOptions) (*v1beta1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1beta1) TaskRuns(namespace string) typedtektonv1beta1.TaskRunInterface { - return &wrapTektonV1beta1TaskRunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1beta1", - Resource: "taskruns", - }), - - namespace: namespace, - } -} - -type wrapTektonV1beta1TaskRunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1beta1.TaskRunInterface = (*wrapTektonV1beta1TaskRunImpl)(nil) - -func (w *wrapTektonV1beta1TaskRunImpl) Create(ctx context.Context, in *v1beta1.TaskRun, opts v1.CreateOptions) (*v1beta1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1beta1TaskRunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1beta1TaskRunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.TaskRun, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.TaskRunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.TaskRun, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) Update(ctx context.Context, in *v1beta1.TaskRun, opts v1.UpdateOptions) (*v1beta1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) UpdateStatus(ctx context.Context, in *v1beta1.TaskRun, opts v1.UpdateOptions) (*v1beta1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1beta1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &v1beta1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1beta1TaskRunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -// TektonV1 retrieves the TektonV1Client -func (w *wrapClient) TektonV1() typedtektonv1.TektonV1Interface { - return &wrapTektonV1{ - dyn: w.dyn, - } -} - -type wrapTektonV1 struct { - dyn dynamic.Interface -} - -func (w *wrapTektonV1) RESTClient() rest.Interface { - panic("RESTClient called on dynamic client!") -} - -func (w *wrapTektonV1) Pipelines(namespace string) typedtektonv1.PipelineInterface { - return &wrapTektonV1PipelineImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1", - Resource: "pipelines", - }), - - namespace: namespace, - } -} - -type wrapTektonV1PipelineImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1.PipelineInterface = (*wrapTektonV1PipelineImpl)(nil) - -func (w *wrapTektonV1PipelineImpl) Create(ctx context.Context, in *pipelinev1.Pipeline, opts v1.CreateOptions) (*pipelinev1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1PipelineImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1PipelineImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*pipelinev1.Pipeline, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) List(ctx context.Context, opts v1.ListOptions) (*pipelinev1.PipelineList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *pipelinev1.Pipeline, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) Update(ctx context.Context, in *pipelinev1.Pipeline, opts v1.UpdateOptions) (*pipelinev1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) UpdateStatus(ctx context.Context, in *pipelinev1.Pipeline, opts v1.UpdateOptions) (*pipelinev1.Pipeline, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Pipeline", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Pipeline{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1) PipelineRuns(namespace string) typedtektonv1.PipelineRunInterface { - return &wrapTektonV1PipelineRunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1", - Resource: "pipelineruns", - }), - - namespace: namespace, - } -} - -type wrapTektonV1PipelineRunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1.PipelineRunInterface = (*wrapTektonV1PipelineRunImpl)(nil) - -func (w *wrapTektonV1PipelineRunImpl) Create(ctx context.Context, in *pipelinev1.PipelineRun, opts v1.CreateOptions) (*pipelinev1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1PipelineRunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1PipelineRunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*pipelinev1.PipelineRun, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) List(ctx context.Context, opts v1.ListOptions) (*pipelinev1.PipelineRunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *pipelinev1.PipelineRun, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) Update(ctx context.Context, in *pipelinev1.PipelineRun, opts v1.UpdateOptions) (*pipelinev1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) UpdateStatus(ctx context.Context, in *pipelinev1.PipelineRun, opts v1.UpdateOptions) (*pipelinev1.PipelineRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "PipelineRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.PipelineRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1PipelineRunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1) Tasks(namespace string) typedtektonv1.TaskInterface { - return &wrapTektonV1TaskImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1", - Resource: "tasks", - }), - - namespace: namespace, - } -} - -type wrapTektonV1TaskImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1.TaskInterface = (*wrapTektonV1TaskImpl)(nil) - -func (w *wrapTektonV1TaskImpl) Create(ctx context.Context, in *pipelinev1.Task, opts v1.CreateOptions) (*pipelinev1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1TaskImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1TaskImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*pipelinev1.Task, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) List(ctx context.Context, opts v1.ListOptions) (*pipelinev1.TaskList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *pipelinev1.Task, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) Update(ctx context.Context, in *pipelinev1.Task, opts v1.UpdateOptions) (*pipelinev1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) UpdateStatus(ctx context.Context, in *pipelinev1.Task, opts v1.UpdateOptions) (*pipelinev1.Task, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "Task", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.Task{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} - -func (w *wrapTektonV1) TaskRuns(namespace string) typedtektonv1.TaskRunInterface { - return &wrapTektonV1TaskRunImpl{ - dyn: w.dyn.Resource(schema.GroupVersionResource{ - Group: "tekton.dev", - Version: "v1", - Resource: "taskruns", - }), - - namespace: namespace, - } -} - -type wrapTektonV1TaskRunImpl struct { - dyn dynamic.NamespaceableResourceInterface - - namespace string -} - -var _ typedtektonv1.TaskRunInterface = (*wrapTektonV1TaskRunImpl)(nil) - -func (w *wrapTektonV1TaskRunImpl) Create(ctx context.Context, in *pipelinev1.TaskRun, opts v1.CreateOptions) (*pipelinev1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Create(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { - return w.dyn.Namespace(w.namespace).Delete(ctx, name, opts) -} - -func (w *wrapTektonV1TaskRunImpl) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { - return w.dyn.Namespace(w.namespace).DeleteCollection(ctx, opts, listOpts) -} - -func (w *wrapTektonV1TaskRunImpl) Get(ctx context.Context, name string, opts v1.GetOptions) (*pipelinev1.TaskRun, error) { - uo, err := w.dyn.Namespace(w.namespace).Get(ctx, name, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) List(ctx context.Context, opts v1.ListOptions) (*pipelinev1.TaskRunList, error) { - uo, err := w.dyn.Namespace(w.namespace).List(ctx, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRunList{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *pipelinev1.TaskRun, err error) { - uo, err := w.dyn.Namespace(w.namespace).Patch(ctx, name, pt, data, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) Update(ctx context.Context, in *pipelinev1.TaskRun, opts v1.UpdateOptions) (*pipelinev1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).Update(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) UpdateStatus(ctx context.Context, in *pipelinev1.TaskRun, opts v1.UpdateOptions) (*pipelinev1.TaskRun, error) { - in.SetGroupVersionKind(schema.GroupVersionKind{ - Group: "tekton.dev", - Version: "v1", - Kind: "TaskRun", - }) - uo := &unstructured.Unstructured{} - if err := convert(in, uo); err != nil { - return nil, err - } - uo, err := w.dyn.Namespace(w.namespace).UpdateStatus(ctx, uo, opts) - if err != nil { - return nil, err - } - out := &pipelinev1.TaskRun{} - if err := convert(uo, out); err != nil { - return nil, err - } - return out, nil -} - -func (w *wrapTektonV1TaskRunImpl) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { - return nil, errors.New("NYI: Watch") -} diff --git a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/pipelinerun.go b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/pipelinerun.go index 44b15d523..0e0850531 100644 --- a/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/pipelinerun.go +++ b/vendor/github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1/pipelinerun/pipelinerun.go @@ -21,15 +21,8 @@ package pipelinerun import ( context "context" - apispipelinev1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1" - versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" v1 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1" - client "github.com/tektoncd/pipeline/pkg/client/injection/client" factory "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory" - pipelinev1 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - labels "k8s.io/apimachinery/pkg/labels" - cache "k8s.io/client-go/tools/cache" controller "knative.dev/pkg/controller" injection "knative.dev/pkg/injection" logging "knative.dev/pkg/logging" @@ -37,7 +30,6 @@ import ( func init() { injection.Default.RegisterInformer(withInformer) - injection.Dynamic.RegisterDynamicInformer(withDynamicInformer) } // Key is used for associating the Informer inside the context.Context. @@ -49,11 +41,6 @@ func withInformer(ctx context.Context) (context.Context, controller.Informer) { return context.WithValue(ctx, Key{}, inf), inf.Informer() } -func withDynamicInformer(ctx context.Context) context.Context { - inf := &wrapper{client: client.Get(ctx), resourceVersion: injection.GetResourceVersion(ctx)} - return context.WithValue(ctx, Key{}, inf) -} - // Get extracts the typed informer from the context. func Get(ctx context.Context) v1.PipelineRunInformer { untyped := ctx.Value(Key{}) @@ -63,54 +50,3 @@ func Get(ctx context.Context) v1.PipelineRunInformer { } return untyped.(v1.PipelineRunInformer) } - -type wrapper struct { - client versioned.Interface - - namespace string - - resourceVersion string -} - -var _ v1.PipelineRunInformer = (*wrapper)(nil) -var _ pipelinev1.PipelineRunLister = (*wrapper)(nil) - -func (w *wrapper) Informer() cache.SharedIndexInformer { - return cache.NewSharedIndexInformer(nil, &apispipelinev1.PipelineRun{}, 0, nil) -} - -func (w *wrapper) Lister() pipelinev1.PipelineRunLister { - return w -} - -func (w *wrapper) PipelineRuns(namespace string) pipelinev1.PipelineRunNamespaceLister { - return &wrapper{client: w.client, namespace: namespace, resourceVersion: w.resourceVersion} -} - -// SetResourceVersion allows consumers to adjust the minimum resourceVersion -// used by the underlying client. It is not accessible via the standard -// lister interface, but can be accessed through a user-defined interface and -// an implementation check e.g. rvs, ok := foo.(ResourceVersionSetter) -func (w *wrapper) SetResourceVersion(resourceVersion string) { - w.resourceVersion = resourceVersion -} - -func (w *wrapper) List(selector labels.Selector) (ret []*apispipelinev1.PipelineRun, err error) { - lo, err := w.client.TektonV1().PipelineRuns(w.namespace).List(context.TODO(), metav1.ListOptions{ - LabelSelector: selector.String(), - ResourceVersion: w.resourceVersion, - }) - if err != nil { - return nil, err - } - for idx := range lo.Items { - ret = append(ret, &lo.Items[idx]) - } - return ret, nil -} - -func (w *wrapper) Get(name string) (*apispipelinev1.PipelineRun, error) { - return w.client.TektonV1().PipelineRuns(w.namespace).Get(context.TODO(), name, metav1.GetOptions{ - ResourceVersion: w.resourceVersion, - }) -} diff --git a/vendor/github.com/xanzy/go-gitlab/CONTRIBUTING.md b/vendor/github.com/xanzy/go-gitlab/CONTRIBUTING.md index ec4ed485d..32bd82274 100644 --- a/vendor/github.com/xanzy/go-gitlab/CONTRIBUTING.md +++ b/vendor/github.com/xanzy/go-gitlab/CONTRIBUTING.md @@ -4,8 +4,7 @@ We want to make contributing to this project as easy as possible. ## Reporting Issues -If you have an issue, please report it on the [issue tracker]( -https://github.com/xanzy/go-gitlab/issues). +If you have an issue, please report it on the [issue tracker](https://github.com/xanzy/go-gitlab/issues). When you are up for writing a PR to solve the issue you encountered, it's not needed to first open a separate issue. In that case only opening a PR with a @@ -13,8 +12,8 @@ description of the issue you are trying to solve is just fine. ## Contributing Code -Pull requests are always welcome. When in doubt if your contribution fits in with -the rest of the project, free to first open an issue to discuss your idea. +Pull requests are always welcome. When in doubt if your contribution fits within +the rest of the project, feel free to first open an issue to discuss your idea. This is not needed when fixing a bug or adding an enhancement, as long as the enhancement you are trying to add can be found in the public GitLab API docs as @@ -22,8 +21,10 @@ this project only supports what is in the public API docs. ## Coding style -We try to follow the Go best practices, where it makes sense, and use [`gofumpt`]( -https://github.com/mvdan/gofumpt) to format code in this project. +We try to follow the Go best practices, where it makes sense, and use +[`gofumpt`](https://github.com/mvdan/gofumpt) to format code in this project. +As a general rule of thumb we prefer to keep line width for comments below 80 +chars and for code (where possible and sensible) below 100 chars. Before making a PR, please look at the rest this package and try to make sure your contribution is consistent with the rest of the coding style. @@ -35,18 +36,18 @@ easier to find things. ### Setting up your local development environment to Contribute to `go-gitlab` 1. [Fork](https://github.com/xanzy/go-gitlab/fork), then clone the repository. - ```sh - git clone https://github.com//go-gitlab.git - # or via ssh - git clone git@github.com:/go-gitlab.git - ``` + ```sh + git clone https://github.com//go-gitlab.git + # or via ssh + git clone git@github.com:/go-gitlab.git + ``` 1. Install dependencies: - ```sh - make setup - ``` + ```sh + make setup + ``` 1. Make your changes on your feature branch 1. Run the tests and `gofumpt` - ```sh - make test && make fmt - ``` + ```sh + make test && make fmt + ``` 1. Open up your pull request diff --git a/vendor/github.com/xanzy/go-gitlab/commits.go b/vendor/github.com/xanzy/go-gitlab/commits.go index 7c724c2b0..cdb548b07 100644 --- a/vendor/github.com/xanzy/go-gitlab/commits.go +++ b/vendor/github.com/xanzy/go-gitlab/commits.go @@ -77,6 +77,7 @@ type ListCommitsOptions struct { Since *time.Time `url:"since,omitempty" json:"since,omitempty"` Until *time.Time `url:"until,omitempty" json:"until,omitempty"` Path *string `url:"path,omitempty" json:"path,omitempty"` + Author *string `url:"author,omitempty" json:"author,omitempty"` All *bool `url:"all,omitempty" json:"all,omitempty"` WithStats *bool `url:"with_stats,omitempty" json:"with_stats,omitempty"` FirstParent *bool `url:"first_parent,omitempty" json:"first_parent,omitempty"` @@ -402,6 +403,7 @@ type CommitStatus struct { Name string `json:"name"` AllowFailure bool `json:"allow_failure"` Coverage float64 `json:"coverage"` + PipelineId int `json:"pipeline_id"` Author Author `json:"author"` Description string `json:"description"` TargetURL string `json:"target_url"` @@ -571,10 +573,10 @@ type GPGSignature struct { KeySubkeyID int `json:"gpg_key_subkey_id"` } -// GetGPGSiganature gets a GPG signature of a commit. +// GetGPGSignature gets a GPG signature of a commit. // // GitLab API docs: https://docs.gitlab.com/ee/api/commits.html#get-gpg-signature-of-a-commit -func (s *CommitsService) GetGPGSiganature(pid interface{}, sha string, options ...RequestOptionFunc) (*GPGSignature, *Response, error) { +func (s *CommitsService) GetGPGSignature(pid interface{}, sha string, options ...RequestOptionFunc) (*GPGSignature, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err diff --git a/vendor/github.com/xanzy/go-gitlab/event_webhook_types.go b/vendor/github.com/xanzy/go-gitlab/event_webhook_types.go index 4c2dd8e04..fa1fe5c7d 100644 --- a/vendor/github.com/xanzy/go-gitlab/event_webhook_types.go +++ b/vendor/github.com/xanzy/go-gitlab/event_webhook_types.go @@ -652,6 +652,7 @@ type MergeEvent struct { } `json:"last_commit"` BlockingDiscussionsResolved bool `json:"blocking_discussions_resolved"` WorkInProgress bool `json:"work_in_progress"` + Draft bool `json:"draft"` TotalTimeSpent int `json:"total_time_spent"` TimeChange int `json:"time_change"` HumanTotalTimeSpent string `json:"human_total_time_spent"` @@ -679,6 +680,10 @@ type MergeEvent struct { Previous string `json:"previous"` Current string `json:"current"` } `json:"description"` + Draft struct { + Previous bool `json:"previous"` + Current bool `json:"current"` + } `json:"draft"` Labels struct { Previous []*EventLabel `json:"previous"` Current []*EventLabel `json:"current"` @@ -789,6 +794,7 @@ type PipelineEvent struct { ObjectAttributes struct { ID int `json:"id"` IID int `json:"iid"` + Name string `json:"name"` Ref string `json:"ref"` Tag bool `json:"tag"` SHA string `json:"sha"` diff --git a/vendor/github.com/xanzy/go-gitlab/gitlab.go b/vendor/github.com/xanzy/go-gitlab/gitlab.go index 322f1a4a0..0f882bf2d 100644 --- a/vendor/github.com/xanzy/go-gitlab/gitlab.go +++ b/vendor/github.com/xanzy/go-gitlab/gitlab.go @@ -144,6 +144,7 @@ type Client struct { GroupLabels *GroupLabelsService GroupMembers *GroupMembersService GroupMilestones *GroupMilestonesService + GroupProtectedEnvironments *GroupProtectedEnvironmentsService GroupRepositoryStorageMove *GroupRepositoryStorageMoveService GroupVariables *GroupVariablesService GroupWikis *GroupWikisService @@ -164,6 +165,7 @@ type Client struct { Markdown *MarkdownService MergeRequestApprovals *MergeRequestApprovalsService MergeRequests *MergeRequestsService + MergeTrains *MergeTrainsService Metadata *MetadataService Milestones *MilestonesService Namespaces *NamespacesService @@ -365,6 +367,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.GroupLabels = &GroupLabelsService{client: c} c.GroupMembers = &GroupMembersService{client: c} c.GroupMilestones = &GroupMilestonesService{client: c} + c.GroupProtectedEnvironments = &GroupProtectedEnvironmentsService{client: c} c.GroupRepositoryStorageMove = &GroupRepositoryStorageMoveService{client: c} c.GroupVariables = &GroupVariablesService{client: c} c.GroupWikis = &GroupWikisService{client: c} @@ -385,6 +388,7 @@ func newClient(options ...ClientOptionFunc) (*Client, error) { c.Markdown = &MarkdownService{client: c} c.MergeRequestApprovals = &MergeRequestApprovalsService{client: c} c.MergeRequests = &MergeRequestsService{client: c, timeStats: timeStats} + c.MergeTrains = &MergeTrainsService{client: c} c.Metadata = &MetadataService{client: c} c.Milestones = &MilestonesService{client: c} c.Namespaces = &NamespacesService{client: c} @@ -809,6 +813,7 @@ func (c *Client) Do(req *retryablehttp.Request, v interface{}) (*Response, error return c.Do(req, v) } defer resp.Body.Close() + defer io.Copy(io.Discard, resp.Body) // If not yet configured, try to configure the rate limiter // using the response headers we just received. Fail silently diff --git a/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go b/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go index 3d42ab531..15f303a98 100644 --- a/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go +++ b/vendor/github.com/xanzy/go-gitlab/group_access_tokens.go @@ -145,6 +145,31 @@ func (s *GroupAccessTokensService) CreateGroupAccessToken(gid interface{}, opt * return pat, resp, nil } +// RotateGroupAccessToken revokes a group access token and returns a new group +// access token that expires in one week. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_access_tokens.html#rotate-a-group-access-token +func (s *GroupAccessTokensService) RotateGroupAccessToken(gid interface{}, id int, options ...RequestOptionFunc) (*GroupAccessToken, *Response, error) { + groups, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/access_tokens/%d/rotate", PathEscape(groups), id) + req, err := s.client.NewRequest(http.MethodPost, u, nil, options) + if err != nil { + return nil, nil, err + } + + gat := new(GroupAccessToken) + resp, err := s.client.Do(req, gat) + if err != nil { + return nil, resp, err + } + + return gat, resp, nil +} + // RevokeGroupAccessToken revokes a group access token. // // GitLab API docs: diff --git a/vendor/github.com/xanzy/go-gitlab/group_protected_environments.go b/vendor/github.com/xanzy/go-gitlab/group_protected_environments.go new file mode 100644 index 000000000..53bee7079 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/group_protected_environments.go @@ -0,0 +1,278 @@ +// +// Copyright 2023, Sander van Harmelen +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/http" +) + +// GroupProtectedEnvironmentsService handles communication with the group-level +// protected environment methods of the GitLab API. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html +type GroupProtectedEnvironmentsService struct { + client *Client +} + +// GroupProtectedEnvironment represents a group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html +type GroupProtectedEnvironment struct { + Name string `json:"name"` + DeployAccessLevels []*GroupEnvironmentAccessDescription `json:"deploy_access_levels"` + RequiredApprovalCount int `json:"required_approval_count"` + ApprovalRules []*GroupEnvironmentApprovalRule `json:"approval_rules"` +} + +// GroupEnvironmentAccessDescription represents the access decription for a +// group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html +type GroupEnvironmentAccessDescription struct { + ID int `json:"id"` + AccessLevel AccessLevelValue `json:"access_level"` + AccessLevelDescription string `json:"access_level_description"` + UserID int `json:"user_id"` + GroupID int `json:"group_id"` +} + +// GroupEnvironmentApprovalRule represents the approval rules for a group-level +// protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#protect-a-single-environment +type GroupEnvironmentApprovalRule struct { + ID int `json:"id"` + UserID int `json:"user_id"` + GroupID int `json:"group_id"` + AccessLevel AccessLevelValue `json:"access_level"` + AccessLevelDescription string `json:"access_level_description"` + RequiredApprovalCount int `json:"required_approvals"` + GroupInheritanceType int `json:"group_inheritance_type"` +} + +// ListGroupProtectedEnvironmentsOptions represents the available +// ListGroupProtectedEnvironments() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#list-group-level-protected-environments +type ListGroupProtectedEnvironmentsOptions ListOptions + +// ListGroupProtectedEnvironments returns a list of protected environments from +// a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#list-group-level-protected-environments +func (s *GroupProtectedEnvironmentsService) ListGroupProtectedEnvironments(gid interface{}, opt *ListGroupProtectedEnvironmentsOptions, options ...RequestOptionFunc) ([]*GroupProtectedEnvironment, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/protected_environments", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var pes []*GroupProtectedEnvironment + resp, err := s.client.Do(req, &pes) + if err != nil { + return nil, resp, err + } + + return pes, resp, nil +} + +// GetGroupProtectedEnvironment returns a single group-level protected +// environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#get-a-single-protected-environment +func (s *GroupProtectedEnvironmentsService) GetGroupProtectedEnvironment(gid interface{}, environment string, options ...RequestOptionFunc) (*GroupProtectedEnvironment, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/protected_environments/%s", PathEscape(group), environment) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + pe := new(GroupProtectedEnvironment) + resp, err := s.client.Do(req, pe) + if err != nil { + return nil, resp, err + } + + return pe, resp, nil +} + +// ProtectGroupEnvironmentOptions represents the available +// ProtectGroupEnvironment() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#protect-a-single-environment +type ProtectGroupEnvironmentOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + DeployAccessLevels *[]*GroupEnvironmentAccessOptions `url:"deploy_access_levels,omitempty" json:"deploy_access_levels,omitempty"` + RequiredApprovalCount *int `url:"required_approval_count,omitempty" json:"required_approval_count,omitempty"` + ApprovalRules *[]*GroupEnvironmentApprovalRuleOptions `url:"approval_rules,omitempty" json:"approval_rules,omitempty"` +} + +// GroupEnvironmentAccessOptions represents the options for an access decription +// for a group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#protect-a-single-environment +type GroupEnvironmentAccessOptions struct { + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` +} + +// GroupEnvironmentApprovalRuleOptions represents the approval rules for a +// group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#protect-a-single-environment +type GroupEnvironmentApprovalRuleOptions struct { + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + AccessLevelDescription *string `url:"access_level_description,omitempty" json:"access_level_description,omitempty"` + RequiredApprovalCount *int `url:"required_approvals,omitempty" json:"required_approvals,omitempty"` + GroupInheritanceType *int `url:"group_inheritance_type,omitempty" json:"group_inheritance_type,omitempty"` +} + +// ProtectGroupEnvironment protects a single group-level environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#protect-a-single-environment +func (s *GroupProtectedEnvironmentsService) ProtectGroupEnvironment(gid interface{}, opt *ProtectGroupEnvironmentOptions, options ...RequestOptionFunc) (*GroupProtectedEnvironment, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/protected_environments", PathEscape(group)) + + req, err := s.client.NewRequest(http.MethodPost, u, opt, options) + if err != nil { + return nil, nil, err + } + + pe := new(GroupProtectedEnvironment) + resp, err := s.client.Do(req, pe) + if err != nil { + return nil, resp, err + } + + return pe, resp, nil +} + +// UpdateGroupProtectedEnvironmentOptions represents the available +// UpdateGroupProtectedEnvironment() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#update-a-protected-environment +type UpdateGroupProtectedEnvironmentOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + DeployAccessLevels *[]*UpdateGroupEnvironmentAccessOptions `url:"deploy_access_levels,omitempty" json:"deploy_access_levels,omitempty"` + RequiredApprovalCount *int `url:"required_approval_count,omitempty" json:"required_approval_count,omitempty"` + ApprovalRules *[]*UpdateGroupEnvironmentApprovalRuleOptions `url:"approval_rules,omitempty" json:"approval_rules,omitempty"` +} + +// UpdateGroupEnvironmentAccessOptions represents the options for updates to the +// access decription for a group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#update-a-protected-environment +type UpdateGroupEnvironmentAccessOptions struct { + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + ID *int `url:"id,omitempty" json:"id,omitempty"` + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` + Destroy *bool `url:"_destroy,omitempty" json:"_destroy,omitempty"` +} + +// UpdateGroupEnvironmentApprovalRuleOptions represents the updates to the +// approval rules for a group-level protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#update-a-protected-environment +type UpdateGroupEnvironmentApprovalRuleOptions struct { + ID *int `url:"id,omitempty" json:"id,omitempty"` + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + AccessLevelDescription *string `url:"access_level_description,omitempty" json:"access_level_description,omitempty"` + RequiredApprovalCount *int `url:"required_approvals,omitempty" json:"required_approvals,omitempty"` + GroupInheritanceType *int `url:"group_inheritance_type,omitempty" json:"group_inheritance_type,omitempty"` + Destroy *bool `url:"_destroy,omitempty" json:"_destroy,omitempty"` +} + +// UpdateGroupProtectedEnvironment updates a single group-level protected +// environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#update-a-protected-environment +func (s *GroupProtectedEnvironmentsService) UpdateGroupProtectedEnvironment(gid interface{}, environment string, opt *UpdateGroupProtectedEnvironmentOptions, options ...RequestOptionFunc) (*GroupProtectedEnvironment, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/protected_environments/%s", PathEscape(group), environment) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, nil, err + } + + pe := new(GroupProtectedEnvironment) + resp, err := s.client.Do(req, pe) + if err != nil { + return nil, resp, err + } + + return pe, resp, nil +} + +// UnprotectGroupEnvironment unprotects the given protected group-level +// environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_protected_environments.html#unprotect-a-single-environment +func (s *GroupProtectedEnvironmentsService) UnprotectGroupEnvironment(gid interface{}, environment string, options ...RequestOptionFunc) (*Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("groups/%s/protected_environments/%s", PathEscape(group), environment) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/groups.go b/vendor/github.com/xanzy/go-gitlab/groups.go index 4a9a996d9..fcbd68de1 100644 --- a/vendor/github.com/xanzy/go-gitlab/groups.go +++ b/vendor/github.com/xanzy/go-gitlab/groups.go @@ -68,7 +68,7 @@ type Group struct { MentionsDisabled bool `json:"mentions_disabled"` RunnersToken string `json:"runners_token"` SharedProjects []*Project `json:"shared_projects"` - SharedRunnersEnabled bool `json:"shared_runners_enabled"` + SharedRunnersSetting SharedRunnersSettingValue `json:"shared_runners_setting"` SharedWithGroups []struct { GroupID int `json:"group_id"` GroupName string `json:"group_name"` diff --git a/vendor/github.com/xanzy/go-gitlab/job_token_scope.go b/vendor/github.com/xanzy/go-gitlab/job_token_scope.go index 82dc50819..ff96ba698 100644 --- a/vendor/github.com/xanzy/go-gitlab/job_token_scope.go +++ b/vendor/github.com/xanzy/go-gitlab/job_token_scope.go @@ -26,6 +26,67 @@ type JobTokenScopeService struct { client *Client } +// JobTokenAccessSettings represents job token access attributes for this project. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/project_job_token_scopes.html +type JobTokenAccessSettings struct { + InboundEnabled bool `json:"inbound_enabled"` + OutboundEnabled bool `json:"outbound_enabled"` +} + +// GetProjectJobTokenAccessSettings fetch the CI/CD job token access settings (job token scope) of a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_job_token_scopes.html#get-a-projects-cicd-job-token-access-settings +func (j *JobTokenScopeService) GetProjectJobTokenAccessSettings(pid interface{}, options ...RequestOptionFunc) (*JobTokenAccessSettings, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf(`projects/%s/job_token_scope`, PathEscape(project)) + + req, err := j.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + jt := new(JobTokenAccessSettings) + resp, err := j.client.Do(req, jt) + if err != nil { + return nil, resp, err + } + + return jt, resp, err +} + +// PatchProjectJobTokenAccessSettingsOptions represents the available +// PatchProjectJobTokenAccessSettings() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_job_token_scopes.html#patch-a-projects-cicd-job-token-access-settings +type PatchProjectJobTokenAccessSettingsOptions struct { + Enabled bool `json:"enabled"` +} + +// PatchProjectJobTokenAccessSettings patch the Limit access to this project setting (job token scope) of a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_job_token_scopes.html#patch-a-projects-cicd-job-token-access-settings +func (j *JobTokenScopeService) PatchProjectJobTokenAccessSettings(pid interface{}, opt *PatchProjectJobTokenAccessSettingsOptions, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf(`projects/%s/job_token_scope`, PathEscape(project)) + + req, err := j.client.NewRequest(http.MethodPatch, u, opt, options) + if err != nil { + return nil, err + } + + return j.client.Do(req, nil) +} + // JobTokenInboundAllowItem represents a single job token inbound allowlist item. // // GitLab API docs: https://docs.gitlab.com/ee/api/project_job_token_scopes.html @@ -95,13 +156,13 @@ func (j *JobTokenScopeService) AddProjectToJobScopeAllowList(pid interface{}, op return nil, nil, err } - ai := new(JobTokenInboundAllowItem) - resp, err := j.client.Do(req, ai) + jt := new(JobTokenInboundAllowItem) + resp, err := j.client.Do(req, jt) if err != nil { return nil, resp, err } - return ai, resp, nil + return jt, resp, nil } // RemoveProjectFromJobScopeAllowList removes a project from a project's job diff --git a/vendor/github.com/xanzy/go-gitlab/jobs.go b/vendor/github.com/xanzy/go-gitlab/jobs.go index 0feaee989..9ad006658 100644 --- a/vendor/github.com/xanzy/go-gitlab/jobs.go +++ b/vendor/github.com/xanzy/go-gitlab/jobs.go @@ -564,3 +564,22 @@ func (s *JobsService) DeleteArtifacts(pid interface{}, jobID int, options ...Req return s.client.Do(req, nil) } + +// DeleteProjectArtifacts delete artifacts eligible for deletion in a project +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/job_artifacts.html#delete-project-artifacts +func (s *JobsService) DeleteProjectArtifacts(pid interface{}, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/artifacts", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/merge_requests.go b/vendor/github.com/xanzy/go-gitlab/merge_requests.go index 34a244a1d..f5b68fdfa 100644 --- a/vendor/github.com/xanzy/go-gitlab/merge_requests.go +++ b/vendor/github.com/xanzy/go-gitlab/merge_requests.go @@ -17,6 +17,7 @@ package gitlab import ( + "encoding/json" "fmt" "net/http" "time" @@ -35,58 +36,50 @@ type MergeRequestsService struct { // // GitLab API docs: https://docs.gitlab.com/ee/api/merge_requests.html type MergeRequest struct { - ID int `json:"id"` - IID int `json:"iid"` - TargetBranch string `json:"target_branch"` - SourceBranch string `json:"source_branch"` - ProjectID int `json:"project_id"` - Title string `json:"title"` - State string `json:"state"` - CreatedAt *time.Time `json:"created_at"` - UpdatedAt *time.Time `json:"updated_at"` - Upvotes int `json:"upvotes"` - Downvotes int `json:"downvotes"` - Author *BasicUser `json:"author"` - Assignee *BasicUser `json:"assignee"` - Assignees []*BasicUser `json:"assignees"` - Reviewers []*BasicUser `json:"reviewers"` - SourceProjectID int `json:"source_project_id"` - TargetProjectID int `json:"target_project_id"` - Labels Labels `json:"labels"` - Description string `json:"description"` - Draft bool `json:"draft"` - WorkInProgress bool `json:"work_in_progress"` - Milestone *Milestone `json:"milestone"` - MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"` - DetailedMergeStatus string `json:"detailed_merge_status"` - MergeError string `json:"merge_error"` - MergedBy *BasicUser `json:"merged_by"` - MergedAt *time.Time `json:"merged_at"` - ClosedBy *BasicUser `json:"closed_by"` - ClosedAt *time.Time `json:"closed_at"` - Subscribed bool `json:"subscribed"` - SHA string `json:"sha"` - MergeCommitSHA string `json:"merge_commit_sha"` - SquashCommitSHA string `json:"squash_commit_sha"` - UserNotesCount int `json:"user_notes_count"` - ChangesCount string `json:"changes_count"` - ShouldRemoveSourceBranch bool `json:"should_remove_source_branch"` - ForceRemoveSourceBranch bool `json:"force_remove_source_branch"` - AllowCollaboration bool `json:"allow_collaboration"` - WebURL string `json:"web_url"` - References *IssueReferences `json:"references"` - DiscussionLocked bool `json:"discussion_locked"` - Changes []struct { - OldPath string `json:"old_path"` - NewPath string `json:"new_path"` - AMode string `json:"a_mode"` - BMode string `json:"b_mode"` - Diff string `json:"diff"` - NewFile bool `json:"new_file"` - RenamedFile bool `json:"renamed_file"` - DeletedFile bool `json:"deleted_file"` - } `json:"changes"` - User struct { + ID int `json:"id"` + IID int `json:"iid"` + TargetBranch string `json:"target_branch"` + SourceBranch string `json:"source_branch"` + ProjectID int `json:"project_id"` + Title string `json:"title"` + State string `json:"state"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + Upvotes int `json:"upvotes"` + Downvotes int `json:"downvotes"` + Author *BasicUser `json:"author"` + Assignee *BasicUser `json:"assignee"` + Assignees []*BasicUser `json:"assignees"` + Reviewers []*BasicUser `json:"reviewers"` + SourceProjectID int `json:"source_project_id"` + TargetProjectID int `json:"target_project_id"` + Labels Labels `json:"labels"` + LabelDetails []*LabelDetails `json:"label_details"` + Description string `json:"description"` + Draft bool `json:"draft"` + WorkInProgress bool `json:"work_in_progress"` + Milestone *Milestone `json:"milestone"` + MergeWhenPipelineSucceeds bool `json:"merge_when_pipeline_succeeds"` + DetailedMergeStatus string `json:"detailed_merge_status"` + MergeError string `json:"merge_error"` + MergedBy *BasicUser `json:"merged_by"` + MergedAt *time.Time `json:"merged_at"` + ClosedBy *BasicUser `json:"closed_by"` + ClosedAt *time.Time `json:"closed_at"` + Subscribed bool `json:"subscribed"` + SHA string `json:"sha"` + MergeCommitSHA string `json:"merge_commit_sha"` + SquashCommitSHA string `json:"squash_commit_sha"` + UserNotesCount int `json:"user_notes_count"` + ChangesCount string `json:"changes_count"` + ShouldRemoveSourceBranch bool `json:"should_remove_source_branch"` + ForceRemoveSourceBranch bool `json:"force_remove_source_branch"` + AllowCollaboration bool `json:"allow_collaboration"` + WebURL string `json:"web_url"` + References *IssueReferences `json:"references"` + DiscussionLocked bool `json:"discussion_locked"` + Changes []*MergeRequestDiff `json:"changes"` + User struct { CanMerge bool `json:"can_merge"` } `json:"user"` TimeStats *TimeStats `json:"time_stats"` @@ -116,6 +109,55 @@ func (m MergeRequest) String() string { return Stringify(m) } +func (m *MergeRequest) UnmarshalJSON(data []byte) error { + type alias MergeRequest + + raw := make(map[string]interface{}) + err := json.Unmarshal(data, &raw) + if err != nil { + return err + } + + labelDetails, ok := raw["labels"].([]interface{}) + if ok && len(labelDetails) > 0 { + // We only want to change anything if we got label details. + if _, ok := labelDetails[0].(map[string]interface{}); !ok { + return json.Unmarshal(data, (*alias)(m)) + } + + labels := make([]interface{}, len(labelDetails)) + for i, details := range labelDetails { + labels[i] = details.(map[string]interface{})["name"] + } + + // Set the correct values + raw["labels"] = labels + raw["label_details"] = labelDetails + + data, err = json.Marshal(raw) + if err != nil { + return err + } + } + + return json.Unmarshal(data, (*alias)(m)) +} + +// MergeRequestDiff represents Gitlab merge request diff. +// +// Gitlab API docs: +// https://docs.gitlab.com/ee/api/merge_requests.html#list-merge-request-diffs +type MergeRequestDiff struct { + OldPath string `json:"old_path"` + NewPath string `json:"new_path"` + AMode string `json:"a_mode"` + BMode string `json:"b_mode"` + Diff string `json:"diff"` + NewFile bool `json:"new_file"` + RenamedFile bool `json:"renamed_file"` + DeletedFile bool `json:"deleted_file"` +} + // MergeRequestDiffVersion represents Gitlab merge request version. // // Gitlab API docs: @@ -425,6 +467,9 @@ type GetMergeRequestChangesOptions struct { // GetMergeRequestChanges shows information about the merge request including // its files and changes. // +// Deprecated: This endpoint has been replaced by +// MergeRequestsService.ListMergeRequestDiffs() +// // GitLab API docs: // https://docs.gitlab.com/ee/api/merge_requests.html#get-single-merge-request-changes func (s *MergeRequestsService) GetMergeRequestChanges(pid interface{}, mergeRequest int, opt *GetMergeRequestChangesOptions, options ...RequestOptionFunc) (*MergeRequest, *Response, error) { @@ -448,6 +493,38 @@ func (s *MergeRequestsService) GetMergeRequestChanges(pid interface{}, mergeRequ return m, resp, nil } +// ListMergeRequestDiffsOptions represents the available ListMergeRequestDiffs() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_requests.html#list-merge-request-diffs +type ListMergeRequestDiffsOptions ListOptions + +// ListMergeRequestDiffs List diffs of the files changed in a merge request +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_requests.html#list-merge-request-diffs +func (s *MergeRequestsService) ListMergeRequestDiffs(pid interface{}, mergeRequest int, opt *ListMergeRequestDiffsOptions, options ...RequestOptionFunc) ([]*MergeRequestDiff, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_requests/%d/diffs", PathEscape(project), mergeRequest) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var m []*MergeRequestDiff + resp, err := s.client.Do(req, &m) + if err != nil { + return nil, resp, err + } + + return m, resp, nil +} + // GetMergeRequestParticipants gets a list of merge request participants. // // GitLab API docs: @@ -464,13 +541,48 @@ func (s *MergeRequestsService) GetMergeRequestParticipants(pid interface{}, merg return nil, nil, err } - var ps []*BasicUser - resp, err := s.client.Do(req, &ps) + var bu []*BasicUser + resp, err := s.client.Do(req, &bu) + if err != nil { + return nil, resp, err + } + + return bu, resp, nil +} + +// MergeRequestReviewer represents a GitLab merge request reviewer. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_requests.html#get-single-merge-request-reviewers +type MergeRequestReviewer struct { + User *BasicUser `json:"user"` + State string `json:"state"` + CreatedAt *time.Time `json:"created_at"` +} + +// GetMergeRequestReviewers gets a list of merge request reviewers. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_requests.html#get-single-merge-request-reviewers +func (s *MergeRequestsService) GetMergeRequestReviewers(pid interface{}, mergeRequest int, options ...RequestOptionFunc) ([]*MergeRequestReviewer, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_requests/%d/reviewers", PathEscape(project), mergeRequest) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + var mrr []*MergeRequestReviewer + resp, err := s.client.Do(req, &mrr) if err != nil { return nil, resp, err } - return ps, resp, nil + return mrr, resp, nil } // ListMergeRequestPipelines gets all pipelines for the provided merge request. diff --git a/vendor/github.com/xanzy/go-gitlab/merge_trains.go b/vendor/github.com/xanzy/go-gitlab/merge_trains.go new file mode 100644 index 000000000..e55917fa0 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/merge_trains.go @@ -0,0 +1,170 @@ +package gitlab + +import ( + "fmt" + "net/http" + "time" +) + +// MergeTrainsService handles communication with the merge trains related +// methods of the GitLab API. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/merge_trains.html +type MergeTrainsService struct { + client *Client +} + +// MergeTrain represents a Gitlab merge train. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/merge_trains.html +type MergeTrain struct { + ID int `json:"id"` + MergeRequest *MergeTrainMergeRequest `json:"merge_request"` + User *BasicUser `json:"user"` + Pipeline *Pipeline `json:"pipeline"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + TargetBranch string `json:"target_branch"` + Status string `json:"status"` + MergedAt *time.Time `json:"merged_at"` + Duration int `json:"duration"` +} + +// MergeTrainMergeRequest represents a Gitlab merge request inside merge train. +// +// GitLab API docs: https://docs.gitlab.com/ee/api/merge_trains.html +type MergeTrainMergeRequest struct { + ID int `json:"id"` + IID int `json:"iid"` + ProjectID int `json:"project_id"` + Title string `json:"title"` + Description string `json:"description"` + State string `json:"state"` + CreatedAt *time.Time `json:"created_at"` + UpdatedAt *time.Time `json:"updated_at"` + WebURL string `json:"web_url"` +} + +// ListMergeTrainsOptions represents the available ListMergeTrain() options. +// +// Gitab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#list-merge-trains-for-a-project +type ListMergeTrainsOptions struct { + ListOptions + Scope *string `url:"scope,omitempty" json:"scope,omitempty"` + Sort *string `url:"sort,omitempty" json:"sort,omitempty"` +} + +// ListProjectMergeTrains get a list of merge trains in a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#list-merge-trains-for-a-project +func (s *MergeTrainsService) ListProjectMergeTrains(pid interface{}, opt *ListMergeTrainsOptions, options ...RequestOptionFunc) ([]*MergeTrain, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_trains", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var mts []*MergeTrain + resp, err := s.client.Do(req, &mts) + if err != nil { + return nil, resp, err + } + + return mts, resp, nil +} + +// ListMergeRequestInMergeTrain gets a list of merge requests added to a merge +// train for the requested target branch. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#list-merge-requests-in-a-merge-train +func (s *MergeTrainsService) ListMergeRequestInMergeTrain(pid interface{}, targetBranch string, opts *ListMergeTrainsOptions, options ...RequestOptionFunc) ([]*MergeTrain, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_trains/%s", PathEscape(project), targetBranch) + + req, err := s.client.NewRequest(http.MethodGet, u, opts, options) + if err != nil { + return nil, nil, err + } + + var mts []*MergeTrain + resp, err := s.client.Do(req, &mts) + if err != nil { + return nil, resp, err + } + + return mts, resp, nil +} + +// GetMergeRequestOnAMergeTrain Get merge train information for the requested +// merge request. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#get-the-status-of-a-merge-request-on-a-merge-train +func (s *MergeTrainsService) GetMergeRequestOnAMergeTrain(pid interface{}, mergeRequest int, options ...RequestOptionFunc) (*MergeTrain, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_trains/merge_requests/%d", PathEscape(project), mergeRequest) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + mt := new(MergeTrain) + resp, err := s.client.Do(req, mt) + if err != nil { + return nil, resp, err + } + + return mt, resp, nil +} + +// AddMergeRequestToMergeTrainOptions represents the available +// AddMergeRequestToMergeTrain() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#add-a-merge-request-to-a-merge-train +type AddMergeRequestToMergeTrainOptions struct { + WhenPipelineSucceeds *bool `url:"when_pipeline_succeeds,omitempty" json:"when_pipeline_succeeds,omitempty"` + SHA *string `url:"sha,omitempty" json:"sha,omitempty"` + Squash *bool `url:"squash,omitempty" json:"squash,omitempty"` +} + +// AddMergeRequestToMergeTrain Add a merge request to the merge train targeting +// the merge request’s target branch. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/merge_trains.html#add-a-merge-request-to-a-merge-train +func (s *MergeTrainsService) AddMergeRequestToMergeTrain(pid interface{}, mergeRequest int, opts *AddMergeRequestToMergeTrainOptions, options ...RequestOptionFunc) ([]*MergeTrain, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/merge_trains/merge_requests/%d", PathEscape(project), mergeRequest) + + req, err := s.client.NewRequest(http.MethodPost, u, opts, options) + if err != nil { + return nil, nil, err + } + + var mts []*MergeTrain + resp, err := s.client.Do(req, &mts) + if err != nil { + return nil, resp, err + } + + return mts, resp, nil +} diff --git a/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go b/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go index 9fa510d09..780e59a82 100644 --- a/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go +++ b/vendor/github.com/xanzy/go-gitlab/project_access_tokens.go @@ -146,6 +146,31 @@ func (s *ProjectAccessTokensService) CreateProjectAccessToken(pid interface{}, o return pat, resp, nil } +// RotateProjectAccessToken revokes a project access token and returns a new +// project access token that expires in one week. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_access_tokens.html#rotate-a-project-access-token +func (s *ProjectAccessTokensService) RotateProjectAccessToken(pid interface{}, id int, options ...RequestOptionFunc) (*ProjectAccessToken, *Response, error) { + projects, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/access_tokens/%d/rotate", PathEscape(projects), id) + req, err := s.client.NewRequest(http.MethodPost, u, nil, options) + if err != nil { + return nil, nil, err + } + + pat := new(ProjectAccessToken) + resp, err := s.client.Do(req, pat) + if err != nil { + return nil, resp, err + } + + return pat, resp, nil +} + // RevokeProjectAccessToken revokes a project access token. // // GitLab API docs: diff --git a/vendor/github.com/xanzy/go-gitlab/projects.go b/vendor/github.com/xanzy/go-gitlab/projects.go index 6d4cfe77a..6a2b3be01 100644 --- a/vendor/github.com/xanzy/go-gitlab/projects.go +++ b/vendor/github.com/xanzy/go-gitlab/projects.go @@ -197,13 +197,14 @@ type ContainerExpirationPolicy struct { // ForkParent represents the parent project when this is a fork. type ForkParent struct { - HTTPURLToRepo string `json:"http_url_to_repo"` ID int `json:"id"` Name string `json:"name"` NameWithNamespace string `json:"name_with_namespace"` Path string `json:"path"` PathWithNamespace string `json:"path_with_namespace"` + HTTPURLToRepo string `json:"http_url_to_repo"` WebURL string `json:"web_url"` + RepositoryStorage string `json:"repository_storage"` } // GroupAccess represents group access. @@ -390,6 +391,31 @@ func (s *ProjectsService) ListUserProjects(uid interface{}, opt *ListProjectsOpt return p, resp, nil } +// ListUserContributedProjects gets a list of visible projects a given user has contributed to. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/projects.html#list-projects-a-user-has-contributed-to +func (s *ProjectsService) ListUserContributedProjects(uid interface{}, opt *ListProjectsOptions, options ...RequestOptionFunc) ([]*Project, *Response, error) { + user, err := parseID(uid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("users/%s/contributed_projects", user) + + req, err := s.client.NewRequest(http.MethodGet, u, opt, options) + if err != nil { + return nil, nil, err + } + + var p []*Project + resp, err := s.client.Do(req, &p) + if err != nil { + return nil, resp, err + } + + return p, resp, nil +} + // ListUserStarredProjects gets a list of projects starred by the given user. // // GitLab API docs: @@ -1809,6 +1835,7 @@ func (s *ProjectsService) GetProjectApprovalRule(pid interface{}, ruleID int, op type CreateProjectLevelRuleOptions struct { Name *string `url:"name,omitempty" json:"name,omitempty"` ApprovalsRequired *int `url:"approvals_required,omitempty" json:"approvals_required,omitempty"` + ReportType *string `url:"report_type,omitempty" json:"report_type,omitempty"` RuleType *string `url:"rule_type,omitempty" json:"rule_type,omitempty"` UserIDs *[]int `url:"user_ids,omitempty" json:"user_ids,omitempty"` GroupIDs *[]int `url:"group_ids,omitempty" json:"group_ids,omitempty"` diff --git a/vendor/github.com/xanzy/go-gitlab/protected_environments.go b/vendor/github.com/xanzy/go-gitlab/protected_environments.go index 21820ab39..3dc7fbdfb 100644 --- a/vendor/github.com/xanzy/go-gitlab/protected_environments.go +++ b/vendor/github.com/xanzy/go-gitlab/protected_environments.go @@ -191,6 +191,73 @@ func (s *ProtectedEnvironmentsService) ProtectRepositoryEnvironments(pid interfa return pe, resp, nil } +// UpdateProtectedEnvironmentsOptions represents the available +// UpdateProtectedEnvironments() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/protected_environments.html#update-a-protected-environment +type UpdateProtectedEnvironmentsOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + DeployAccessLevels *[]*UpdateEnvironmentAccessOptions `url:"deploy_access_levels,omitempty" json:"deploy_access_levels,omitempty"` + RequiredApprovalCount *int `url:"required_approval_count,omitempty" json:"required_approval_count,omitempty"` + ApprovalRules *[]*UpdateEnvironmentApprovalRuleOptions `url:"approval_rules,omitempty" json:"approval_rules,omitempty"` +} + +// UpdateEnvironmentAccessOptions represents the options for updates to an +// access decription for a protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/protected_environments.html#update-a-protected-environment +type UpdateEnvironmentAccessOptions struct { + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + ID *int `url:"id,omitempty" json:"id,omitempty"` + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` + Destroy *bool `url:"_destroy,omitempty" json:"_destroy,omitempty"` +} + +// UpdateEnvironmentApprovalRuleOptions represents the updates to the approval +// rules for a protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/protected_environments.html#update-a-protected-environment +type UpdateEnvironmentApprovalRuleOptions struct { + ID *int `url:"id,omitempty" json:"id,omitempty"` + UserID *int `url:"user_id,omitempty" json:"user_id,omitempty"` + GroupID *int `url:"group_id,omitempty" json:"group_id,omitempty"` + AccessLevel *AccessLevelValue `url:"access_level,omitempty" json:"access_level,omitempty"` + AccessLevelDescription *string `url:"access_level_description,omitempty" json:"access_level_description,omitempty"` + RequiredApprovalCount *int `url:"required_approvals,omitempty" json:"required_approvals,omitempty"` + GroupInheritanceType *int `url:"group_inheritance_type,omitempty" json:"group_inheritance_type,omitempty"` + Destroy *bool `url:"_destroy,omitempty" json:"_destroy,omitempty"` +} + +// UpdateProtectedEnvironments updates a single repository environment or +// several project repository environments using wildcard protected environment. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/protected_environments.html#update-a-protected-environment +func (s *ProtectedEnvironmentsService) UpdateProtectedEnvironments(pid interface{}, environment string, opt *UpdateProtectedEnvironmentsOptions, options ...RequestOptionFunc) (*ProtectedEnvironment, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/protected_environments/%s", PathEscape(project), environment) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, nil, err + } + + pe := new(ProtectedEnvironment) + resp, err := s.client.Do(req, pe) + if err != nil { + return nil, resp, err + } + + return pe, resp, nil +} + // UnprotectEnvironment unprotects the given protected environment or wildcard // protected environment. // diff --git a/vendor/github.com/xanzy/go-gitlab/releases.go b/vendor/github.com/xanzy/go-gitlab/releases.go index 86308c5a9..3d6a89772 100644 --- a/vendor/github.com/xanzy/go-gitlab/releases.go +++ b/vendor/github.com/xanzy/go-gitlab/releases.go @@ -133,6 +133,31 @@ func (s *ReleasesService) GetRelease(pid interface{}, tagName string, options .. return r, resp, nil } +// GetLatestRelease returns the latest release for the project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/releases/#get-the-latest-release +func (s *ReleasesService) GetLatestRelease(pid interface{}, options ...RequestOptionFunc) (*Release, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/releases/permalink/latest", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + r := new(Release) + resp, err := s.client.Do(req, r) + if err != nil { + return nil, resp, err + } + + return r, resp, err +} + // CreateReleaseOptions represents CreateRelease() options. // // GitLab API docs: diff --git a/vendor/github.com/xanzy/go-gitlab/repositories.go b/vendor/github.com/xanzy/go-gitlab/repositories.go index b89efc090..986dd001c 100644 --- a/vendor/github.com/xanzy/go-gitlab/repositories.go +++ b/vendor/github.com/xanzy/go-gitlab/repositories.go @@ -140,6 +140,7 @@ func (s *RepositoriesService) RawBlobContent(pid interface{}, sha string, option // https://docs.gitlab.com/ee/api/repositories.html#get-file-archive type ArchiveOptions struct { Format *string `url:"-" json:"-"` + Path *string `url:"path,omitempty" json:"path,omitempty"` SHA *string `url:"sha,omitempty" json:"sha,omitempty"` } diff --git a/vendor/github.com/xanzy/go-gitlab/services.go b/vendor/github.com/xanzy/go-gitlab/services.go index 69842aec1..0313eccdb 100644 --- a/vendor/github.com/xanzy/go-gitlab/services.go +++ b/vendor/github.com/xanzy/go-gitlab/services.go @@ -179,6 +179,106 @@ func (s *ServicesService) DeleteCustomIssueTrackerService(pid interface{}, optio return s.client.Do(req, nil) } +// DataDogService represents DataDog service settings. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#datadog +type DataDogService struct { + Service + Properties *DataDogServiceProperties `json:"properties"` +} + +// DataDogServiceProperties represents DataDog specific properties. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#datadog +type DataDogServiceProperties struct { + APIURL string `url:"api_url,omitempty" json:"api_url,omitempty"` + DataDogEnv string `url:"datadog_env,omitempty" json:"datadog_env,omitempty"` + DataDogService string `url:"datadog_service,omitempty" json:"datadog_service,omitempty"` + DataDogSite string `url:"datadog_site,omitempty" json:"datadog_site,omitempty"` + DataDogTags string `url:"datadog_tags,omitempty" json:"datadog_tags,omitempty"` + ArchiveTraceEvents bool `url:"archive_trace_events,omitempty" json:"archive_trace_events,omitempty"` +} + +// GetDataDogService gets DataDog service settings for a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#get-datadog-integration-settings +func (s *ServicesService) GetDataDogService(pid interface{}, options ...RequestOptionFunc) (*DataDogService, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/services/datadog", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodGet, u, nil, options) + if err != nil { + return nil, nil, err + } + + svc := new(DataDogService) + resp, err := s.client.Do(req, svc) + if err != nil { + return nil, resp, err + } + + return svc, resp, nil +} + +// SetDataDogServiceOptions represents the available SetDataDogService() +// options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#createedit-datadog-integration +type SetDataDogServiceOptions struct { + APIKey *string `url:"api_key,omitempty" json:"api_key,omitempty"` + APIURL *string `url:"api_url,omitempty" json:"api_url,omitempty"` + DataDogEnv *string `url:"datadog_env,omitempty" json:"datadog_env,omitempty"` + DataDogService *string `url:"datadog_service,omitempty" json:"datadog_service,omitempty"` + DataDogSite *string `url:"datadog_site,omitempty" json:"datadog_site,omitempty"` + DataDogTags *string `url:"datadog_tags,omitempty" json:"datadog_tags,omitempty"` + ArchiveTraceEvents *bool `url:"archive_trace_events,omitempty" json:"archive_trace_events,omitempty"` +} + +// SetDataDogService sets DataDog service settings for a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#createedit-datadog-integration +func (s *ServicesService) SetDataDogService(pid interface{}, opt *SetDataDogServiceOptions, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/services/datadog", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodPut, u, opt, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + +// DeleteDataDogService deletes the DataDog service settings for a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/services.html#disable-datadog-integration +func (s *ServicesService) DeleteDataDogService(pid interface{}, options ...RequestOptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/services/datadog", PathEscape(project)) + + req, err := s.client.NewRequest(http.MethodDelete, u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} + // DiscordService represents Discord service settings. // // GitLab API docs: diff --git a/vendor/github.com/xanzy/go-gitlab/snippets.go b/vendor/github.com/xanzy/go-gitlab/snippets.go index 12687c372..c25548b12 100644 --- a/vendor/github.com/xanzy/go-gitlab/snippets.go +++ b/vendor/github.com/xanzy/go-gitlab/snippets.go @@ -57,6 +57,7 @@ type Snippet struct { Path string `json:"path"` RawURL string `json:"raw_url"` } `json:"files"` + RepositoryStorage string `json:"repository_storage"` } func (s Snippet) String() string { @@ -281,3 +282,33 @@ func (s *SnippetsService) ExploreSnippets(opt *ExploreSnippetsOptions, options . return ps, resp, nil } + +// ListAllSnippetsOptions represents the available ListAllSnippets() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/snippets.html#list-all-snippets +type ListAllSnippetsOptions struct { + ListOptions + CreatedAfter *ISOTime `url:"created_after,omitempty" json:"created_after,omitempty"` + CreatedBefore *ISOTime `url:"created_before,omitempty" json:"created_before,omitempty"` + RepositoryStorage *string `url:"repository_storage,omitempty" json:"repository_storage,omitempty"` +} + +// ListAllSnippets gets all snippets the current user has access to. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/snippets.html#list-all-snippets +func (s *SnippetsService) ListAllSnippets(opt *ListAllSnippetsOptions, options ...RequestOptionFunc) ([]*Snippet, *Response, error) { + req, err := s.client.NewRequest(http.MethodGet, "snippets/all", opt, options) + if err != nil { + return nil, nil, err + } + + var ps []*Snippet + resp, err := s.client.Do(req, &ps) + if err != nil { + return nil, resp, err + } + + return ps, resp, nil +} diff --git a/vendor/github.com/xanzy/go-gitlab/time_stats.go b/vendor/github.com/xanzy/go-gitlab/time_stats.go index c1256b965..0ce2d6751 100644 --- a/vendor/github.com/xanzy/go-gitlab/time_stats.go +++ b/vendor/github.com/xanzy/go-gitlab/time_stats.go @@ -104,6 +104,7 @@ func (s *timeStatsService) resetTimeEstimate(pid interface{}, entity string, iss // GitLab docs: https://docs.gitlab.com/ee/workflow/time_tracking.html type AddSpentTimeOptions struct { Duration *string `url:"duration,omitempty" json:"duration,omitempty"` + Summary *string `url:"summary,omitempty" json:"summary,omitempty"` } // addSpentTime adds spent time for a single project issue. diff --git a/vendor/github.com/xanzy/go-gitlab/types.go b/vendor/github.com/xanzy/go-gitlab/types.go index 9b2a96cf1..cd8aad768 100644 --- a/vendor/github.com/xanzy/go-gitlab/types.go +++ b/vendor/github.com/xanzy/go-gitlab/types.go @@ -607,8 +607,11 @@ type SharedRunnersSettingValue string // https://docs.gitlab.com/ee/api/groups.html#options-for-shared_runners_setting const ( EnabledSharedRunnersSettingValue SharedRunnersSettingValue = "enabled" - DisabledWithOverrideSharedRunnersSettingValue SharedRunnersSettingValue = "disabled_with_override" + DisabledAndOverridableSharedRunnersSettingValue SharedRunnersSettingValue = "disabled_and_overridable" DisabledAndUnoverridableSharedRunnersSettingValue SharedRunnersSettingValue = "disabled_and_unoverridable" + + // Deprecated: DisabledWithOverrideSharedRunnersSettingValue is deprecated in favor of DisabledAndOverridableSharedRunnersSettingValue + DisabledWithOverrideSharedRunnersSettingValue SharedRunnersSettingValue = "disabled_with_override" ) // SharedRunnersSetting is a helper routine that allocates a new SharedRunnersSettingValue diff --git a/vendor/github.com/xanzy/go-gitlab/users.go b/vendor/github.com/xanzy/go-gitlab/users.go index d3eff7615..4489034e1 100644 --- a/vendor/github.com/xanzy/go-gitlab/users.go +++ b/vendor/github.com/xanzy/go-gitlab/users.go @@ -1159,14 +1159,15 @@ func (s *UsersService) RejectUser(user int, options ...RequestOptionFunc) error // GitLab API docs: // https://docs.gitlab.com/ee/api/users.html#get-all-impersonation-tokens-of-a-user type ImpersonationToken struct { - ID int `json:"id"` - Name string `json:"name"` - Active bool `json:"active"` - Token string `json:"token"` - Scopes []string `json:"scopes"` - Revoked bool `json:"revoked"` - CreatedAt *time.Time `json:"created_at"` - ExpiresAt *ISOTime `json:"expires_at"` + ID int `json:"id"` + Name string `json:"name"` + Active bool `json:"active"` + Token string `json:"token"` + Scopes []string `json:"scopes"` + Revoked bool `json:"revoked"` + CreatedAt *time.Time `json:"created_at"` + ExpiresAt *ISOTime `json:"expires_at"` + LastUsedAt *time.Time `json:"last_used_at"` } // GetAllImpersonationTokensOptions represents the available @@ -1300,6 +1301,38 @@ func (s *UsersService) CreatePersonalAccessToken(user int, opt *CreatePersonalAc return t, resp, nil } +// CreatePersonalAccessTokenForCurrentUserOptions represents the available +// CreatePersonalAccessTokenForCurrentUser() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/users.html#create-a-personal-access-token-with-limited-scopes-for-the-currently-authenticated-user +type CreatePersonalAccessTokenForCurrentUserOptions struct { + Name *string `url:"name,omitempty" json:"name,omitempty"` + Scopes *[]string `url:"scopes,omitempty" json:"scopes,omitempty"` + ExpiresAt *ISOTime `url:"expires_at,omitempty" json:"expires_at,omitempty"` +} + +// CreatePersonalAccessTokenForCurrentUser creates a personal access token with limited scopes for the currently authenticated user. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/users.html#create-a-personal-access-token-with-limited-scopes-for-the-currently-authenticated-user +func (s *UsersService) CreatePersonalAccessTokenForCurrentUser(opt *CreatePersonalAccessTokenForCurrentUserOptions, options ...RequestOptionFunc) (*PersonalAccessToken, *Response, error) { + u := "user/personal_access_tokens" + + req, err := s.client.NewRequest(http.MethodPost, u, opt, options) + if err != nil { + return nil, nil, err + } + + t := new(PersonalAccessToken) + resp, err := s.client.Do(req, &t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + // UserActivity represents an entry in the user/activities response // // GitLab API docs: diff --git a/vendor/go.uber.org/atomic/CHANGELOG.md b/vendor/go.uber.org/atomic/CHANGELOG.md index 5fe03f21b..6f87f33fa 100644 --- a/vendor/go.uber.org/atomic/CHANGELOG.md +++ b/vendor/go.uber.org/atomic/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.11.0] - 2023-05-02 +### Fixed +- Fix initialization of `Value` wrappers. + +### Added +- Add `String` method to `atomic.Pointer[T]` type allowing users to safely print +underlying values of pointers. + +[1.11.0]: https://github.com/uber-go/atomic/compare/v1.10.0...v1.11.0 + ## [1.10.0] - 2022-08-11 ### Added - Add `atomic.Float32` type for atomic operations on `float32`. diff --git a/vendor/go.uber.org/atomic/bool.go b/vendor/go.uber.org/atomic/bool.go index dfa2085f4..f0a2ddd14 100644 --- a/vendor/go.uber.org/atomic/bool.go +++ b/vendor/go.uber.org/atomic/bool.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/duration.go b/vendor/go.uber.org/atomic/duration.go index 6f4157445..7c23868fc 100644 --- a/vendor/go.uber.org/atomic/duration.go +++ b/vendor/go.uber.org/atomic/duration.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/error.go b/vendor/go.uber.org/atomic/error.go index 27b23ea16..b7e3f1291 100644 --- a/vendor/go.uber.org/atomic/error.go +++ b/vendor/go.uber.org/atomic/error.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -52,7 +52,17 @@ func (x *Error) Store(val error) { // CompareAndSwap is an atomic compare-and-swap for error values. func (x *Error) CompareAndSwap(old, new error) (swapped bool) { - return x.v.CompareAndSwap(packError(old), packError(new)) + if x.v.CompareAndSwap(packError(old), packError(new)) { + return true + } + + if old == _zeroError { + // If the old value is the empty value, then it's possible the + // underlying Value hasn't been set and is nil, so retry with nil. + return x.v.CompareAndSwap(nil, packError(new)) + } + + return false } // Swap atomically stores the given error and returns the old diff --git a/vendor/go.uber.org/atomic/float32.go b/vendor/go.uber.org/atomic/float32.go index 5d535a6d2..62c36334f 100644 --- a/vendor/go.uber.org/atomic/float32.go +++ b/vendor/go.uber.org/atomic/float32.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/float64.go b/vendor/go.uber.org/atomic/float64.go index 11d5189a5..5bc11caab 100644 --- a/vendor/go.uber.org/atomic/float64.go +++ b/vendor/go.uber.org/atomic/float64.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/int32.go b/vendor/go.uber.org/atomic/int32.go index b9a68f42c..5320eac10 100644 --- a/vendor/go.uber.org/atomic/int32.go +++ b/vendor/go.uber.org/atomic/int32.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicint. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/int64.go b/vendor/go.uber.org/atomic/int64.go index 78d260976..460821d00 100644 --- a/vendor/go.uber.org/atomic/int64.go +++ b/vendor/go.uber.org/atomic/int64.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicint. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/pointer_go118.go b/vendor/go.uber.org/atomic/pointer_go118.go index e0f47dba4..1fb6c03b2 100644 --- a/vendor/go.uber.org/atomic/pointer_go118.go +++ b/vendor/go.uber.org/atomic/pointer_go118.go @@ -18,43 +18,14 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -//go:build go1.18 && !go1.19 -// +build go1.18,!go1.19 +//go:build go1.18 +// +build go1.18 package atomic -import "unsafe" +import "fmt" -type Pointer[T any] struct { - _ nocmp // disallow non-atomic comparison - p UnsafePointer -} - -// NewPointer creates a new Pointer. -func NewPointer[T any](v *T) *Pointer[T] { - var p Pointer[T] - if v != nil { - p.p.Store(unsafe.Pointer(v)) - } - return &p -} - -// Load atomically loads the wrapped value. -func (p *Pointer[T]) Load() *T { - return (*T)(p.p.Load()) -} - -// Store atomically stores the passed value. -func (p *Pointer[T]) Store(val *T) { - p.p.Store(unsafe.Pointer(val)) -} - -// Swap atomically swaps the wrapped pointer and returns the old value. -func (p *Pointer[T]) Swap(val *T) (old *T) { - return (*T)(p.p.Swap(unsafe.Pointer(val))) -} - -// CompareAndSwap is an atomic compare-and-swap. -func (p *Pointer[T]) CompareAndSwap(old, new *T) (swapped bool) { - return p.p.CompareAndSwap(unsafe.Pointer(old), unsafe.Pointer(new)) +// String returns a human readable representation of a Pointer's underlying value. +func (p *Pointer[T]) String() string { + return fmt.Sprint(p.Load()) } diff --git a/vendor/go.uber.org/atomic/pointer_go118_pre119.go b/vendor/go.uber.org/atomic/pointer_go118_pre119.go new file mode 100644 index 000000000..e0f47dba4 --- /dev/null +++ b/vendor/go.uber.org/atomic/pointer_go118_pre119.go @@ -0,0 +1,60 @@ +// Copyright (c) 2022 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//go:build go1.18 && !go1.19 +// +build go1.18,!go1.19 + +package atomic + +import "unsafe" + +type Pointer[T any] struct { + _ nocmp // disallow non-atomic comparison + p UnsafePointer +} + +// NewPointer creates a new Pointer. +func NewPointer[T any](v *T) *Pointer[T] { + var p Pointer[T] + if v != nil { + p.p.Store(unsafe.Pointer(v)) + } + return &p +} + +// Load atomically loads the wrapped value. +func (p *Pointer[T]) Load() *T { + return (*T)(p.p.Load()) +} + +// Store atomically stores the passed value. +func (p *Pointer[T]) Store(val *T) { + p.p.Store(unsafe.Pointer(val)) +} + +// Swap atomically swaps the wrapped pointer and returns the old value. +func (p *Pointer[T]) Swap(val *T) (old *T) { + return (*T)(p.p.Swap(unsafe.Pointer(val))) +} + +// CompareAndSwap is an atomic compare-and-swap. +func (p *Pointer[T]) CompareAndSwap(old, new *T) (swapped bool) { + return p.p.CompareAndSwap(unsafe.Pointer(old), unsafe.Pointer(new)) +} diff --git a/vendor/go.uber.org/atomic/string.go b/vendor/go.uber.org/atomic/string.go index c4bea70f4..061466c5b 100644 --- a/vendor/go.uber.org/atomic/string.go +++ b/vendor/go.uber.org/atomic/string.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -42,24 +42,31 @@ func NewString(val string) *String { // Load atomically loads the wrapped string. func (x *String) Load() string { - if v := x.v.Load(); v != nil { - return v.(string) - } - return _zeroString + return unpackString(x.v.Load()) } // Store atomically stores the passed string. func (x *String) Store(val string) { - x.v.Store(val) + x.v.Store(packString(val)) } // CompareAndSwap is an atomic compare-and-swap for string values. func (x *String) CompareAndSwap(old, new string) (swapped bool) { - return x.v.CompareAndSwap(old, new) + if x.v.CompareAndSwap(packString(old), packString(new)) { + return true + } + + if old == _zeroString { + // If the old value is the empty value, then it's possible the + // underlying Value hasn't been set and is nil, so retry with nil. + return x.v.CompareAndSwap(nil, packString(new)) + } + + return false } // Swap atomically stores the given string and returns the old // value. func (x *String) Swap(val string) (old string) { - return x.v.Swap(val).(string) + return unpackString(x.v.Swap(packString(val))) } diff --git a/vendor/go.uber.org/atomic/string_ext.go b/vendor/go.uber.org/atomic/string_ext.go index 1f63dfd5b..019109c86 100644 --- a/vendor/go.uber.org/atomic/string_ext.go +++ b/vendor/go.uber.org/atomic/string_ext.go @@ -1,4 +1,4 @@ -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -20,7 +20,18 @@ package atomic -//go:generate bin/gen-atomicwrapper -name=String -type=string -wrapped=Value -compareandswap -swap -file=string.go +//go:generate bin/gen-atomicwrapper -name=String -type=string -wrapped Value -pack packString -unpack unpackString -compareandswap -swap -file=string.go + +func packString(s string) interface{} { + return s +} + +func unpackString(v interface{}) string { + if s, ok := v.(string); ok { + return s + } + return "" +} // String returns the wrapped value. func (s *String) String() string { diff --git a/vendor/go.uber.org/atomic/time.go b/vendor/go.uber.org/atomic/time.go index 1660feb14..cc2a230c0 100644 --- a/vendor/go.uber.org/atomic/time.go +++ b/vendor/go.uber.org/atomic/time.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicwrapper. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/uint32.go b/vendor/go.uber.org/atomic/uint32.go index d6f04a96d..4adc294ac 100644 --- a/vendor/go.uber.org/atomic/uint32.go +++ b/vendor/go.uber.org/atomic/uint32.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicint. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/uint64.go b/vendor/go.uber.org/atomic/uint64.go index 2574bdd5e..0e2eddb30 100644 --- a/vendor/go.uber.org/atomic/uint64.go +++ b/vendor/go.uber.org/atomic/uint64.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicint. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/atomic/uintptr.go b/vendor/go.uber.org/atomic/uintptr.go index 81b275a7a..7d5b000d6 100644 --- a/vendor/go.uber.org/atomic/uintptr.go +++ b/vendor/go.uber.org/atomic/uintptr.go @@ -1,6 +1,6 @@ // @generated Code generated by gen-atomicint. -// Copyright (c) 2020-2022 Uber Technologies, Inc. +// Copyright (c) 2020-2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/go.uber.org/zap/.golangci.yml b/vendor/go.uber.org/zap/.golangci.yml new file mode 100644 index 000000000..fbc6df790 --- /dev/null +++ b/vendor/go.uber.org/zap/.golangci.yml @@ -0,0 +1,77 @@ +output: + # Make output more digestible with quickfix in vim/emacs/etc. + sort-results: true + print-issued-lines: false + +linters: + # We'll track the golangci-lint default linters manually + # instead of letting them change without our control. + disable-all: true + enable: + # golangci-lint defaults: + - errcheck + - gosimple + - govet + - ineffassign + - staticcheck + - unused + + # Our own extras: + - gofmt + - nolintlint # lints nolint directives + - revive + +linters-settings: + govet: + # These govet checks are disabled by default, but they're useful. + enable: + - niliness + - reflectvaluecompare + - sortslice + - unusedwrite + + errcheck: + exclude-functions: + # These methods can not fail. + # They operate on an in-memory buffer. + - (*go.uber.org/zap/buffer.Buffer).Write + - (*go.uber.org/zap/buffer.Buffer).WriteByte + - (*go.uber.org/zap/buffer.Buffer).WriteString + + - (*go.uber.org/zap/zapio.Writer).Close + - (*go.uber.org/zap/zapio.Writer).Sync + - (*go.uber.org/zap/zapio.Writer).Write + # Write to zapio.Writer cannot fail, + # so io.WriteString on it cannot fail. + - io.WriteString(*go.uber.org/zap/zapio.Writer) + + # Writing a plain string to a fmt.State cannot fail. + - io.WriteString(fmt.State) + +issues: + # Print all issues reported by all linters. + max-issues-per-linter: 0 + max-same-issues: 0 + + # Don't ignore some of the issues that golangci-lint considers okay. + # This includes documenting all exported entities. + exclude-use-default: false + + exclude-rules: + # Don't warn on unused parameters. + # Parameter names are useful; replacing them with '_' is undesirable. + - linters: [revive] + text: 'unused-parameter: parameter \S+ seems to be unused, consider removing or renaming it as _' + + # staticcheck already has smarter checks for empty blocks. + # revive's empty-block linter has false positives. + # For example, as of writing this, the following is not allowed. + # for foo() { } + - linters: [revive] + text: 'empty-block: this block is empty, you can remove it' + + # Ignore logger.Sync() errcheck failures in example_test.go + # since those are intended to be uncomplicated examples. + - linters: [errcheck] + path: example_test.go + text: 'Error return value of `logger.Sync` is not checked' diff --git a/vendor/go.uber.org/zap/CHANGELOG.md b/vendor/go.uber.org/zap/CHANGELOG.md index fe57bc085..11b465976 100644 --- a/vendor/go.uber.org/zap/CHANGELOG.md +++ b/vendor/go.uber.org/zap/CHANGELOG.md @@ -1,7 +1,18 @@ # Changelog All notable changes to this project will be documented in this file. -This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). +This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 1.26.0 (14 Sep 2023) +Enhancements: +* [#1319][]: Add `WithLazy` method to `Logger` which lazily evaluates the structured +context. +* [#1350][]: String encoding is much (~50%) faster now. + +Thanks to @jquirke, @cdvr1993 for their contributions to this release. + +[#1319]: https://github.com/uber-go/zap/pull/1319 +[#1350]: https://github.com/uber-go/zap/pull/1350 ## 1.25.0 (1 Aug 2023) @@ -48,7 +59,6 @@ Enhancements: [#1147]: https://github.com/uber-go/zap/pull/1147 [#1155]: https://github.com/uber-go/zap/pull/1155 - ## 1.22.0 (8 Aug 2022) Enhancements: @@ -197,6 +207,16 @@ Enhancements: Thanks to @ash2k, @FMLS, @jimmystewpot, @Oncilla, @tsoslow, @tylitianrui, @withshubh, and @wziww for their contributions to this release. +[#865]: https://github.com/uber-go/zap/pull/865 +[#867]: https://github.com/uber-go/zap/pull/867 +[#881]: https://github.com/uber-go/zap/pull/881 +[#903]: https://github.com/uber-go/zap/pull/903 +[#912]: https://github.com/uber-go/zap/pull/912 +[#913]: https://github.com/uber-go/zap/pull/913 +[#928]: https://github.com/uber-go/zap/pull/928 +[#931]: https://github.com/uber-go/zap/pull/931 +[#936]: https://github.com/uber-go/zap/pull/936 + ## 1.16.0 (1 Sep 2020) Bugfixes: @@ -218,6 +238,17 @@ Enhancements: Thanks to @SteelPhase, @tmshn, @lixingwang, @wyxloading, @moul, @segevfiner, @andy-retailnext and @jcorbin for their contributions to this release. +[#629]: https://github.com/uber-go/zap/pull/629 +[#697]: https://github.com/uber-go/zap/pull/697 +[#828]: https://github.com/uber-go/zap/pull/828 +[#835]: https://github.com/uber-go/zap/pull/835 +[#843]: https://github.com/uber-go/zap/pull/843 +[#844]: https://github.com/uber-go/zap/pull/844 +[#852]: https://github.com/uber-go/zap/pull/852 +[#854]: https://github.com/uber-go/zap/pull/854 +[#861]: https://github.com/uber-go/zap/pull/861 +[#862]: https://github.com/uber-go/zap/pull/862 + ## 1.15.0 (23 Apr 2020) Bugfixes: @@ -234,6 +265,11 @@ Enhancements: Thanks to @danielbprice for their contributions to this release. +[#804]: https://github.com/uber-go/zap/pull/804 +[#812]: https://github.com/uber-go/zap/pull/812 +[#806]: https://github.com/uber-go/zap/pull/806 +[#813]: https://github.com/uber-go/zap/pull/813 + ## 1.14.1 (14 Mar 2020) Bugfixes: @@ -246,6 +282,10 @@ Bugfixes: Thanks to @YashishDua for their contributions to this release. +[#791]: https://github.com/uber-go/zap/pull/791 +[#795]: https://github.com/uber-go/zap/pull/795 +[#799]: https://github.com/uber-go/zap/pull/799 + ## 1.14.0 (20 Feb 2020) Enhancements: @@ -256,6 +296,11 @@ Enhancements: Thanks to @caibirdme for their contributions to this release. +[#771]: https://github.com/uber-go/zap/pull/771 +[#773]: https://github.com/uber-go/zap/pull/773 +[#775]: https://github.com/uber-go/zap/pull/775 +[#786]: https://github.com/uber-go/zap/pull/786 + ## 1.13.0 (13 Nov 2019) Enhancements: @@ -264,11 +309,15 @@ Enhancements: Thanks to @jbizzle for their contributions to this release. +[#758]: https://github.com/uber-go/zap/pull/758 + ## 1.12.0 (29 Oct 2019) Enhancements: * [#751][]: Migrate to Go modules. +[#751]: https://github.com/uber-go/zap/pull/751 + ## 1.11.0 (21 Oct 2019) Enhancements: @@ -277,6 +326,9 @@ Enhancements: Thanks to @juicemia, @uhthomas for their contributions to this release. +[#725]: https://github.com/uber-go/zap/pull/725 +[#736]: https://github.com/uber-go/zap/pull/736 + ## 1.10.0 (29 Apr 2019) Bugfixes: @@ -294,12 +346,20 @@ Enhancements: Thanks to @iaroslav-ciupin, @lelenanam, @joa, @NWilson for their contributions to this release. +[#657]: https://github.com/uber-go/zap/pull/657 +[#706]: https://github.com/uber-go/zap/pull/706 +[#610]: https://github.com/uber-go/zap/pull/610 +[#675]: https://github.com/uber-go/zap/pull/675 +[#704]: https://github.com/uber-go/zap/pull/704 + ## v1.9.1 (06 Aug 2018) Bugfixes: * [#614][]: MapObjectEncoder should not ignore empty slices. +[#614]: https://github.com/uber-go/zap/pull/614 + ## v1.9.0 (19 Jul 2018) Enhancements: @@ -309,6 +369,10 @@ Enhancements: Thanks to @nfarah86, @AlekSi, @JeanMertz, @philippgille, @etsangsplk, and @dimroc for their contributions to this release. +[#602]: https://github.com/uber-go/zap/pull/602 +[#572]: https://github.com/uber-go/zap/pull/572 +[#606]: https://github.com/uber-go/zap/pull/606 + ## v1.8.0 (13 Apr 2018) Enhancements: @@ -322,11 +386,18 @@ Bugfixes: Thanks to @DiSiqueira and @djui for their contributions to this release. +[#508]: https://github.com/uber-go/zap/pull/508 +[#518]: https://github.com/uber-go/zap/pull/518 +[#577]: https://github.com/uber-go/zap/pull/577 +[#574]: https://github.com/uber-go/zap/pull/574 + ## v1.7.1 (25 Sep 2017) Bugfixes: * [#504][]: Store strings when using AddByteString with the map encoder. +[#504]: https://github.com/uber-go/zap/pull/504 + ## v1.7.0 (21 Sep 2017) Enhancements: @@ -334,6 +405,8 @@ Enhancements: * [#487][]: Add `NewStdLogAt`, which extends `NewStdLog` by allowing the user to specify the level of the logged messages. +[#487]: https://github.com/uber-go/zap/pull/487 + ## v1.6.0 (30 Aug 2017) Enhancements: @@ -342,6 +415,9 @@ Enhancements: * [#490][]: Add a `ContextMap` method to observer logs for simpler field validation in tests. +[#490]: https://github.com/uber-go/zap/pull/490 +[#491]: https://github.com/uber-go/zap/pull/491 + ## v1.5.0 (22 Jul 2017) Enhancements: @@ -355,6 +431,11 @@ Bugfixes: Thanks to @richard-tunein and @pavius for their contributions to this release. +[#477]: https://github.com/uber-go/zap/pull/477 +[#465]: https://github.com/uber-go/zap/pull/465 +[#460]: https://github.com/uber-go/zap/pull/460 +[#470]: https://github.com/uber-go/zap/pull/470 + ## v1.4.1 (08 Jun 2017) This release fixes two bugs. @@ -364,6 +445,9 @@ Bugfixes: * [#435][]: Support a variety of case conventions when unmarshaling levels. * [#444][]: Fix a panic in the observer. +[#435]: https://github.com/uber-go/zap/pull/435 +[#444]: https://github.com/uber-go/zap/pull/444 + ## v1.4.0 (12 May 2017) This release adds a few small features and is fully backward-compatible. @@ -376,6 +460,10 @@ Enhancements: * [#431][]: Make `zap.AtomicLevel` implement `fmt.Stringer`, which makes a variety of operations a bit simpler. +[#424]: https://github.com/uber-go/zap/pull/424 +[#425]: https://github.com/uber-go/zap/pull/425 +[#431]: https://github.com/uber-go/zap/pull/431 + ## v1.3.0 (25 Apr 2017) This release adds an enhancement to zap's testing helpers as well as the @@ -387,6 +475,9 @@ Enhancements: particularly useful when testing the `SugaredLogger`. * [#416][]: Make `AtomicLevel` implement `encoding.TextMarshaler`. +[#415]: https://github.com/uber-go/zap/pull/415 +[#416]: https://github.com/uber-go/zap/pull/416 + ## v1.2.0 (13 Apr 2017) This release adds a gRPC compatibility wrapper. It is fully backward-compatible. @@ -396,6 +487,8 @@ Enhancements: * [#402][]: Add a `zapgrpc` package that wraps zap's Logger and implements `grpclog.Logger`. +[#402]: https://github.com/uber-go/zap/pull/402 + ## v1.1.0 (31 Mar 2017) This release fixes two bugs and adds some enhancements to zap's testing helpers. @@ -413,6 +506,10 @@ Enhancements: Thanks to @moitias for contributing to this release. +[#385]: https://github.com/uber-go/zap/pull/385 +[#396]: https://github.com/uber-go/zap/pull/396 +[#386]: https://github.com/uber-go/zap/pull/386 + ## v1.0.0 (14 Mar 2017) This is zap's first stable release. All exported APIs are now final, and no @@ -458,6 +555,20 @@ Enhancements: Thanks to @suyash, @htrendev, @flisky, @Ulexus, and @skipor for their contributions to this release. +[#366]: https://github.com/uber-go/zap/pull/366 +[#364]: https://github.com/uber-go/zap/pull/364 +[#371]: https://github.com/uber-go/zap/pull/371 +[#362]: https://github.com/uber-go/zap/pull/362 +[#369]: https://github.com/uber-go/zap/pull/369 +[#347]: https://github.com/uber-go/zap/pull/347 +[#373]: https://github.com/uber-go/zap/pull/373 +[#348]: https://github.com/uber-go/zap/pull/348 +[#327]: https://github.com/uber-go/zap/pull/327 +[#376]: https://github.com/uber-go/zap/pull/376 +[#346]: https://github.com/uber-go/zap/pull/346 +[#365]: https://github.com/uber-go/zap/pull/365 +[#372]: https://github.com/uber-go/zap/pull/372 + ## v1.0.0-rc.3 (7 Mar 2017) This is the third release candidate for zap's stable release. There are no @@ -479,6 +590,11 @@ Enhancements: Thanks to @ansel1 and @suyash for their contributions to this release. +[#339]: https://github.com/uber-go/zap/pull/339 +[#307]: https://github.com/uber-go/zap/pull/307 +[#353]: https://github.com/uber-go/zap/pull/353 +[#311]: https://github.com/uber-go/zap/pull/311 + ## v1.0.0-rc.2 (21 Feb 2017) This is the second release candidate for zap's stable release. It includes two @@ -516,6 +632,15 @@ Enhancements: Thanks to @skipor and @chapsuk for their contributions to this release. +[#316]: https://github.com/uber-go/zap/pull/316 +[#309]: https://github.com/uber-go/zap/pull/309 +[#317]: https://github.com/uber-go/zap/pull/317 +[#321]: https://github.com/uber-go/zap/pull/321 +[#325]: https://github.com/uber-go/zap/pull/325 +[#333]: https://github.com/uber-go/zap/pull/333 +[#326]: https://github.com/uber-go/zap/pull/326 +[#300]: https://github.com/uber-go/zap/pull/300 + ## v1.0.0-rc.1 (14 Feb 2017) This is the first release candidate for zap's stable release. There are multiple @@ -544,95 +669,3 @@ backward compatibility concerns and all functionality is new. Early zap adopters should pin to the 0.1.x minor version until they're ready to upgrade to the upcoming stable release. - -[#316]: https://github.com/uber-go/zap/pull/316 -[#309]: https://github.com/uber-go/zap/pull/309 -[#317]: https://github.com/uber-go/zap/pull/317 -[#321]: https://github.com/uber-go/zap/pull/321 -[#325]: https://github.com/uber-go/zap/pull/325 -[#333]: https://github.com/uber-go/zap/pull/333 -[#326]: https://github.com/uber-go/zap/pull/326 -[#300]: https://github.com/uber-go/zap/pull/300 -[#339]: https://github.com/uber-go/zap/pull/339 -[#307]: https://github.com/uber-go/zap/pull/307 -[#353]: https://github.com/uber-go/zap/pull/353 -[#311]: https://github.com/uber-go/zap/pull/311 -[#366]: https://github.com/uber-go/zap/pull/366 -[#364]: https://github.com/uber-go/zap/pull/364 -[#371]: https://github.com/uber-go/zap/pull/371 -[#362]: https://github.com/uber-go/zap/pull/362 -[#369]: https://github.com/uber-go/zap/pull/369 -[#347]: https://github.com/uber-go/zap/pull/347 -[#373]: https://github.com/uber-go/zap/pull/373 -[#348]: https://github.com/uber-go/zap/pull/348 -[#327]: https://github.com/uber-go/zap/pull/327 -[#376]: https://github.com/uber-go/zap/pull/376 -[#346]: https://github.com/uber-go/zap/pull/346 -[#365]: https://github.com/uber-go/zap/pull/365 -[#372]: https://github.com/uber-go/zap/pull/372 -[#385]: https://github.com/uber-go/zap/pull/385 -[#396]: https://github.com/uber-go/zap/pull/396 -[#386]: https://github.com/uber-go/zap/pull/386 -[#402]: https://github.com/uber-go/zap/pull/402 -[#415]: https://github.com/uber-go/zap/pull/415 -[#416]: https://github.com/uber-go/zap/pull/416 -[#424]: https://github.com/uber-go/zap/pull/424 -[#425]: https://github.com/uber-go/zap/pull/425 -[#431]: https://github.com/uber-go/zap/pull/431 -[#435]: https://github.com/uber-go/zap/pull/435 -[#444]: https://github.com/uber-go/zap/pull/444 -[#477]: https://github.com/uber-go/zap/pull/477 -[#465]: https://github.com/uber-go/zap/pull/465 -[#460]: https://github.com/uber-go/zap/pull/460 -[#470]: https://github.com/uber-go/zap/pull/470 -[#487]: https://github.com/uber-go/zap/pull/487 -[#490]: https://github.com/uber-go/zap/pull/490 -[#491]: https://github.com/uber-go/zap/pull/491 -[#504]: https://github.com/uber-go/zap/pull/504 -[#508]: https://github.com/uber-go/zap/pull/508 -[#518]: https://github.com/uber-go/zap/pull/518 -[#577]: https://github.com/uber-go/zap/pull/577 -[#574]: https://github.com/uber-go/zap/pull/574 -[#602]: https://github.com/uber-go/zap/pull/602 -[#572]: https://github.com/uber-go/zap/pull/572 -[#606]: https://github.com/uber-go/zap/pull/606 -[#614]: https://github.com/uber-go/zap/pull/614 -[#657]: https://github.com/uber-go/zap/pull/657 -[#706]: https://github.com/uber-go/zap/pull/706 -[#610]: https://github.com/uber-go/zap/pull/610 -[#675]: https://github.com/uber-go/zap/pull/675 -[#704]: https://github.com/uber-go/zap/pull/704 -[#725]: https://github.com/uber-go/zap/pull/725 -[#736]: https://github.com/uber-go/zap/pull/736 -[#751]: https://github.com/uber-go/zap/pull/751 -[#758]: https://github.com/uber-go/zap/pull/758 -[#771]: https://github.com/uber-go/zap/pull/771 -[#773]: https://github.com/uber-go/zap/pull/773 -[#775]: https://github.com/uber-go/zap/pull/775 -[#786]: https://github.com/uber-go/zap/pull/786 -[#791]: https://github.com/uber-go/zap/pull/791 -[#795]: https://github.com/uber-go/zap/pull/795 -[#799]: https://github.com/uber-go/zap/pull/799 -[#804]: https://github.com/uber-go/zap/pull/804 -[#812]: https://github.com/uber-go/zap/pull/812 -[#806]: https://github.com/uber-go/zap/pull/806 -[#813]: https://github.com/uber-go/zap/pull/813 -[#629]: https://github.com/uber-go/zap/pull/629 -[#697]: https://github.com/uber-go/zap/pull/697 -[#828]: https://github.com/uber-go/zap/pull/828 -[#835]: https://github.com/uber-go/zap/pull/835 -[#843]: https://github.com/uber-go/zap/pull/843 -[#844]: https://github.com/uber-go/zap/pull/844 -[#852]: https://github.com/uber-go/zap/pull/852 -[#854]: https://github.com/uber-go/zap/pull/854 -[#861]: https://github.com/uber-go/zap/pull/861 -[#862]: https://github.com/uber-go/zap/pull/862 -[#865]: https://github.com/uber-go/zap/pull/865 -[#867]: https://github.com/uber-go/zap/pull/867 -[#881]: https://github.com/uber-go/zap/pull/881 -[#903]: https://github.com/uber-go/zap/pull/903 -[#912]: https://github.com/uber-go/zap/pull/912 -[#913]: https://github.com/uber-go/zap/pull/913 -[#928]: https://github.com/uber-go/zap/pull/928 -[#931]: https://github.com/uber-go/zap/pull/931 -[#936]: https://github.com/uber-go/zap/pull/936 diff --git a/vendor/go.uber.org/zap/Makefile b/vendor/go.uber.org/zap/Makefile index 518c3fa6b..eb1cee53b 100644 --- a/vendor/go.uber.org/zap/Makefile +++ b/vendor/go.uber.org/zap/Makefile @@ -1,64 +1,62 @@ -export GOBIN ?= $(shell pwd)/bin +# Directory containing the Makefile. +PROJECT_ROOT = $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) + +export GOBIN ?= $(PROJECT_ROOT)/bin +export PATH := $(GOBIN):$(PATH) -REVIVE = $(GOBIN)/revive -STATICCHECK = $(GOBIN)/staticcheck GOVULNCHECK = $(GOBIN)/govulncheck BENCH_FLAGS ?= -cpuprofile=cpu.pprof -memprofile=mem.pprof -benchmem # Directories containing independent Go modules. -# -# We track coverage only for the main module. MODULE_DIRS = . ./exp ./benchmarks ./zapgrpc/internal/test -# Many Go tools take file globs or directories as arguments instead of packages. -GO_FILES := $(shell \ - find . '(' -path '*/.*' -o -path './vendor' ')' -prune \ - -o -name '*.go' -print | cut -b3-) +# Directories that we want to track coverage for. +COVER_DIRS = . ./exp .PHONY: all all: lint test .PHONY: lint -lint: $(REVIVE) $(STATICCHECK) - @rm -rf lint.log - @echo "Checking formatting..." - @gofmt -d -s $(GO_FILES) 2>&1 | tee lint.log - @echo "Checking vet..." - @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && go vet ./... 2>&1) &&) true | tee -a lint.log - @echo "Checking lint..." - @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && \ - $(REVIVE) -set_exit_status ./... 2>&1) &&) true | tee -a lint.log - @echo "Checking staticcheck..." - @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && $(STATICCHECK) ./... 2>&1) &&) true | tee -a lint.log - @echo "Checking for unresolved FIXMEs..." - @git grep -i fixme | grep -v -e Makefile | tee -a lint.log - @echo "Checking for license headers..." - @./checklicense.sh | tee -a lint.log - @[ ! -s lint.log ] - @echo "Checking 'go mod tidy'..." - @make tidy - @if ! git diff --quiet; then \ - echo "'go mod tidy' resulted in changes or working tree is dirty:"; \ - git --no-pager diff; \ - fi - -$(REVIVE): - cd tools && go install github.com/mgechev/revive +lint: golangci-lint tidy-lint license-lint + +.PHONY: golangci-lint +golangci-lint: + @$(foreach mod,$(MODULE_DIRS), \ + (cd $(mod) && \ + echo "[lint] golangci-lint: $(mod)" && \ + golangci-lint run --path-prefix $(mod)) &&) true + +.PHONY: tidy +tidy: + @$(foreach dir,$(MODULE_DIRS), \ + (cd $(dir) && go mod tidy) &&) true + +.PHONY: tidy-lint +tidy-lint: + @$(foreach mod,$(MODULE_DIRS), \ + (cd $(mod) && \ + echo "[lint] tidy: $(mod)" && \ + go mod tidy && \ + git diff --exit-code -- go.mod go.sum) &&) true + + +.PHONY: license-lint +license-lint: + ./checklicense.sh $(GOVULNCHECK): cd tools && go install golang.org/x/vuln/cmd/govulncheck -$(STATICCHECK): - cd tools && go install honnef.co/go/tools/cmd/staticcheck - .PHONY: test test: @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && go test -race ./...) &&) true .PHONY: cover cover: - go test -race -coverprofile=cover.out -coverpkg=./... ./... - go tool cover -html=cover.out -o cover.html + @$(foreach dir,$(COVER_DIRS), ( \ + cd $(dir) && \ + go test -race -coverprofile=cover.out -coverpkg=./... ./... \ + && go tool cover -html=cover.out -o cover.html) &&) true .PHONY: bench BENCH ?= . @@ -73,10 +71,6 @@ updatereadme: rm -f README.md cat .readme.tmpl | go run internal/readme/readme.go > README.md -.PHONY: tidy -tidy: - @$(foreach dir,$(MODULE_DIRS),(cd $(dir) && go mod tidy) &&) true - .PHONY: vulncheck vulncheck: $(GOVULNCHECK) - $(GOVULNCHECK) ./... \ No newline at end of file + $(GOVULNCHECK) ./... diff --git a/vendor/go.uber.org/zap/array.go b/vendor/go.uber.org/zap/array.go index 5be3704a3..abfccb566 100644 --- a/vendor/go.uber.org/zap/array.go +++ b/vendor/go.uber.org/zap/array.go @@ -21,6 +21,7 @@ package zap import ( + "fmt" "time" "go.uber.org/zap/zapcore" @@ -94,11 +95,137 @@ func Int8s(key string, nums []int8) Field { return Array(key, int8s(nums)) } +// Objects constructs a field with the given key, holding a list of the +// provided objects that can be marshaled by Zap. +// +// Note that these objects must implement zapcore.ObjectMarshaler directly. +// That is, if you're trying to marshal a []Request, the MarshalLogObject +// method must be declared on the Request type, not its pointer (*Request). +// If it's on the pointer, use ObjectValues. +// +// Given an object that implements MarshalLogObject on the value receiver, you +// can log a slice of those objects with Objects like so: +// +// type Author struct{ ... } +// func (a Author) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var authors []Author = ... +// logger.Info("loading article", zap.Objects("authors", authors)) +// +// Similarly, given a type that implements MarshalLogObject on its pointer +// receiver, you can log a slice of pointers to that object with Objects like +// so: +// +// type Request struct{ ... } +// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var requests []*Request = ... +// logger.Info("sending requests", zap.Objects("requests", requests)) +// +// If instead, you have a slice of values of such an object, use the +// ObjectValues constructor. +// +// var requests []Request = ... +// logger.Info("sending requests", zap.ObjectValues("requests", requests)) +func Objects[T zapcore.ObjectMarshaler](key string, values []T) Field { + return Array(key, objects[T](values)) +} + +type objects[T zapcore.ObjectMarshaler] []T + +func (os objects[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for _, o := range os { + if err := arr.AppendObject(o); err != nil { + return err + } + } + return nil +} + +// ObjectMarshalerPtr is a constraint that specifies that the given type +// implements zapcore.ObjectMarshaler on a pointer receiver. +type ObjectMarshalerPtr[T any] interface { + *T + zapcore.ObjectMarshaler +} + +// ObjectValues constructs a field with the given key, holding a list of the +// provided objects, where pointers to these objects can be marshaled by Zap. +// +// Note that pointers to these objects must implement zapcore.ObjectMarshaler. +// That is, if you're trying to marshal a []Request, the MarshalLogObject +// method must be declared on the *Request type, not the value (Request). +// If it's on the value, use Objects. +// +// Given an object that implements MarshalLogObject on the pointer receiver, +// you can log a slice of those objects with ObjectValues like so: +// +// type Request struct{ ... } +// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var requests []Request = ... +// logger.Info("sending requests", zap.ObjectValues("requests", requests)) +// +// If instead, you have a slice of pointers of such an object, use the Objects +// field constructor. +// +// var requests []*Request = ... +// logger.Info("sending requests", zap.Objects("requests", requests)) +func ObjectValues[T any, P ObjectMarshalerPtr[T]](key string, values []T) Field { + return Array(key, objectValues[T, P](values)) +} + +type objectValues[T any, P ObjectMarshalerPtr[T]] []T + +func (os objectValues[T, P]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for i := range os { + // It is necessary for us to explicitly reference the "P" type. + // We cannot simply pass "&os[i]" to AppendObject because its type + // is "*T", which the type system does not consider as + // implementing ObjectMarshaler. + // Only the type "P" satisfies ObjectMarshaler, which we have + // to convert "*T" to explicitly. + var p P = &os[i] + if err := arr.AppendObject(p); err != nil { + return err + } + } + return nil +} + // Strings constructs a field that carries a slice of strings. func Strings(key string, ss []string) Field { return Array(key, stringArray(ss)) } +// Stringers constructs a field with the given key, holding a list of the +// output provided by the value's String method +// +// Given an object that implements String on the value receiver, you +// can log a slice of those objects with Objects like so: +// +// type Request struct{ ... } +// func (a Request) String() string +// +// var requests []Request = ... +// logger.Info("sending requests", zap.Stringers("requests", requests)) +// +// Note that these objects must implement fmt.Stringer directly. +// That is, if you're trying to marshal a []Request, the String method +// must be declared on the Request type, not its pointer (*Request). +func Stringers[T fmt.Stringer](key string, values []T) Field { + return Array(key, stringers[T](values)) +} + +type stringers[T fmt.Stringer] []T + +func (os stringers[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for _, o := range os { + arr.AppendString(o.String()) + } + return nil +} + // Times constructs a field that carries a slice of time.Times. func Times(key string, ts []time.Time) Field { return Array(key, times(ts)) diff --git a/vendor/go.uber.org/zap/array_go118.go b/vendor/go.uber.org/zap/array_go118.go deleted file mode 100644 index d0d2c49d6..000000000 --- a/vendor/go.uber.org/zap/array_go118.go +++ /dev/null @@ -1,156 +0,0 @@ -// Copyright (c) 2022 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -//go:build go1.18 -// +build go1.18 - -package zap - -import ( - "fmt" - - "go.uber.org/zap/zapcore" -) - -// Objects constructs a field with the given key, holding a list of the -// provided objects that can be marshaled by Zap. -// -// Note that these objects must implement zapcore.ObjectMarshaler directly. -// That is, if you're trying to marshal a []Request, the MarshalLogObject -// method must be declared on the Request type, not its pointer (*Request). -// If it's on the pointer, use ObjectValues. -// -// Given an object that implements MarshalLogObject on the value receiver, you -// can log a slice of those objects with Objects like so: -// -// type Author struct{ ... } -// func (a Author) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var authors []Author = ... -// logger.Info("loading article", zap.Objects("authors", authors)) -// -// Similarly, given a type that implements MarshalLogObject on its pointer -// receiver, you can log a slice of pointers to that object with Objects like -// so: -// -// type Request struct{ ... } -// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var requests []*Request = ... -// logger.Info("sending requests", zap.Objects("requests", requests)) -// -// If instead, you have a slice of values of such an object, use the -// ObjectValues constructor. -// -// var requests []Request = ... -// logger.Info("sending requests", zap.ObjectValues("requests", requests)) -func Objects[T zapcore.ObjectMarshaler](key string, values []T) Field { - return Array(key, objects[T](values)) -} - -type objects[T zapcore.ObjectMarshaler] []T - -func (os objects[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for _, o := range os { - if err := arr.AppendObject(o); err != nil { - return err - } - } - return nil -} - -// ObjectMarshalerPtr is a constraint that specifies that the given type -// implements zapcore.ObjectMarshaler on a pointer receiver. -type ObjectMarshalerPtr[T any] interface { - *T - zapcore.ObjectMarshaler -} - -// ObjectValues constructs a field with the given key, holding a list of the -// provided objects, where pointers to these objects can be marshaled by Zap. -// -// Note that pointers to these objects must implement zapcore.ObjectMarshaler. -// That is, if you're trying to marshal a []Request, the MarshalLogObject -// method must be declared on the *Request type, not the value (Request). -// If it's on the value, use Objects. -// -// Given an object that implements MarshalLogObject on the pointer receiver, -// you can log a slice of those objects with ObjectValues like so: -// -// type Request struct{ ... } -// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error -// -// var requests []Request = ... -// logger.Info("sending requests", zap.ObjectValues("requests", requests)) -// -// If instead, you have a slice of pointers of such an object, use the Objects -// field constructor. -// -// var requests []*Request = ... -// logger.Info("sending requests", zap.Objects("requests", requests)) -func ObjectValues[T any, P ObjectMarshalerPtr[T]](key string, values []T) Field { - return Array(key, objectValues[T, P](values)) -} - -type objectValues[T any, P ObjectMarshalerPtr[T]] []T - -func (os objectValues[T, P]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for i := range os { - // It is necessary for us to explicitly reference the "P" type. - // We cannot simply pass "&os[i]" to AppendObject because its type - // is "*T", which the type system does not consider as - // implementing ObjectMarshaler. - // Only the type "P" satisfies ObjectMarshaler, which we have - // to convert "*T" to explicitly. - var p P = &os[i] - if err := arr.AppendObject(p); err != nil { - return err - } - } - return nil -} - -// Stringers constructs a field with the given key, holding a list of the -// output provided by the value's String method -// -// Given an object that implements String on the value receiver, you -// can log a slice of those objects with Objects like so: -// -// type Request struct{ ... } -// func (a Request) String() string -// -// var requests []Request = ... -// logger.Info("sending requests", zap.Stringers("requests", requests)) -// -// Note that these objects must implement fmt.Stringer directly. -// That is, if you're trying to marshal a []Request, the String method -// must be declared on the Request type, not its pointer (*Request). -func Stringers[T fmt.Stringer](key string, values []T) Field { - return Array(key, stringers[T](values)) -} - -type stringers[T fmt.Stringer] []T - -func (os stringers[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { - for _, o := range os { - arr.AppendString(o.String()) - } - return nil -} diff --git a/vendor/go.uber.org/zap/buffer/buffer.go b/vendor/go.uber.org/zap/buffer/buffer.go index 9e929cd98..27fb5cd5d 100644 --- a/vendor/go.uber.org/zap/buffer/buffer.go +++ b/vendor/go.uber.org/zap/buffer/buffer.go @@ -42,6 +42,11 @@ func (b *Buffer) AppendByte(v byte) { b.bs = append(b.bs, v) } +// AppendBytes writes a single byte to the Buffer. +func (b *Buffer) AppendBytes(v []byte) { + b.bs = append(b.bs, v...) +} + // AppendString writes a string to the Buffer. func (b *Buffer) AppendString(s string) { b.bs = append(b.bs, s...) diff --git a/vendor/go.uber.org/zap/error.go b/vendor/go.uber.org/zap/error.go index 38cb768de..45f7b838d 100644 --- a/vendor/go.uber.org/zap/error.go +++ b/vendor/go.uber.org/zap/error.go @@ -61,9 +61,12 @@ func (errs errArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { // allocating, pool the wrapper type. elem := _errArrayElemPool.Get() elem.error = errs[i] - arr.AppendObject(elem) + err := arr.AppendObject(elem) elem.error = nil _errArrayElemPool.Put(elem) + if err != nil { + return err + } } return nil } diff --git a/vendor/go.uber.org/zap/field.go b/vendor/go.uber.org/zap/field.go index 7f22c5349..c8dd3358a 100644 --- a/vendor/go.uber.org/zap/field.go +++ b/vendor/go.uber.org/zap/field.go @@ -25,6 +25,7 @@ import ( "math" "time" + "go.uber.org/zap/internal/stacktrace" "go.uber.org/zap/zapcore" ) @@ -374,7 +375,7 @@ func StackSkip(key string, skip int) Field { // from expanding the zapcore.Field union struct to include a byte slice. Since // taking a stacktrace is already so expensive (~10us), the extra allocation // is okay. - return String(key, takeStacktrace(skip+1)) // skip StackSkip + return String(key, stacktrace.Take(skip+1)) // skip StackSkip } // Duration constructs a field with the given key and value. The encoder @@ -410,6 +411,26 @@ func Inline(val zapcore.ObjectMarshaler) Field { } } +// Dict constructs a field containing the provided key-value pairs. +// It acts similar to [Object], but with the fields specified as arguments. +func Dict(key string, val ...Field) Field { + return dictField(key, val) +} + +// We need a function with the signature (string, T) for zap.Any. +func dictField(key string, val []Field) Field { + return Object(key, dictObject(val)) +} + +type dictObject []Field + +func (d dictObject) MarshalLogObject(enc zapcore.ObjectEncoder) error { + for _, f := range d { + f.AddTo(enc) + } + return nil +} + // We discovered an issue where zap.Any can cause a performance degradation // when used in new goroutines. // @@ -462,6 +483,8 @@ func Any(key string, value interface{}) Field { c = anyFieldC[zapcore.ObjectMarshaler](Object) case zapcore.ArrayMarshaler: c = anyFieldC[zapcore.ArrayMarshaler](Array) + case []Field: + c = anyFieldC[[]Field](dictField) case bool: c = anyFieldC[bool](Bool) case *bool: diff --git a/vendor/go.uber.org/zap/http_handler.go b/vendor/go.uber.org/zap/http_handler.go index 632b6831a..2be8f6515 100644 --- a/vendor/go.uber.org/zap/http_handler.go +++ b/vendor/go.uber.org/zap/http_handler.go @@ -69,6 +69,13 @@ import ( // // curl -X PUT localhost:8080/log/level -H "Content-Type: application/json" -d '{"level":"debug"}' func (lvl AtomicLevel) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if err := lvl.serveHTTP(w, r); err != nil { + w.WriteHeader(http.StatusInternalServerError) + fmt.Fprintf(w, "internal error: %v", err) + } +} + +func (lvl AtomicLevel) serveHTTP(w http.ResponseWriter, r *http.Request) error { type errorResponse struct { Error string `json:"error"` } @@ -80,19 +87,20 @@ func (lvl AtomicLevel) ServeHTTP(w http.ResponseWriter, r *http.Request) { switch r.Method { case http.MethodGet: - enc.Encode(payload{Level: lvl.Level()}) + return enc.Encode(payload{Level: lvl.Level()}) + case http.MethodPut: requestedLvl, err := decodePutRequest(r.Header.Get("Content-Type"), r) if err != nil { w.WriteHeader(http.StatusBadRequest) - enc.Encode(errorResponse{Error: err.Error()}) - return + return enc.Encode(errorResponse{Error: err.Error()}) } lvl.SetLevel(requestedLvl) - enc.Encode(payload{Level: lvl.Level()}) + return enc.Encode(payload{Level: lvl.Level()}) + default: w.WriteHeader(http.StatusMethodNotAllowed) - enc.Encode(errorResponse{ + return enc.Encode(errorResponse{ Error: "Only GET and PUT are supported.", }) } @@ -129,5 +137,4 @@ func decodePutJSON(body io.Reader) (zapcore.Level, error) { return 0, errors.New("must specify logging level") } return *pld.Level, nil - } diff --git a/vendor/go.uber.org/zap/stacktrace.go b/vendor/go.uber.org/zap/internal/stacktrace/stack.go similarity index 75% rename from vendor/go.uber.org/zap/stacktrace.go rename to vendor/go.uber.org/zap/internal/stacktrace/stack.go index 1f152eb1a..82af7551f 100644 --- a/vendor/go.uber.org/zap/stacktrace.go +++ b/vendor/go.uber.org/zap/internal/stacktrace/stack.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -18,7 +18,9 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -package zap +// Package stacktrace provides support for gathering stack traces +// efficiently. +package stacktrace import ( "runtime" @@ -28,13 +30,14 @@ import ( "go.uber.org/zap/internal/pool" ) -var _stacktracePool = pool.New(func() *stacktrace { - return &stacktrace{ +var _stackPool = pool.New(func() *Stack { + return &Stack{ storage: make([]uintptr, 64), } }) -type stacktrace struct { +// Stack is a captured stack trace. +type Stack struct { pcs []uintptr // program counters; always a subslice of storage frames *runtime.Frames @@ -48,30 +51,30 @@ type stacktrace struct { storage []uintptr } -// stacktraceDepth specifies how deep of a stack trace should be captured. -type stacktraceDepth int +// Depth specifies how deep of a stack trace should be captured. +type Depth int const ( - // stacktraceFirst captures only the first frame. - stacktraceFirst stacktraceDepth = iota + // First captures only the first frame. + First Depth = iota - // stacktraceFull captures the entire call stack, allocating more + // Full captures the entire call stack, allocating more // storage for it if needed. - stacktraceFull + Full ) -// captureStacktrace captures a stack trace of the specified depth, skipping +// Capture captures a stack trace of the specified depth, skipping // the provided number of frames. skip=0 identifies the caller of -// captureStacktrace. +// Capture. // // The caller must call Free on the returned stacktrace after using it. -func captureStacktrace(skip int, depth stacktraceDepth) *stacktrace { - stack := _stacktracePool.Get() +func Capture(skip int, depth Depth) *Stack { + stack := _stackPool.Get() switch depth { - case stacktraceFirst: + case First: stack.pcs = stack.storage[:1] - case stacktraceFull: + case Full: stack.pcs = stack.storage } @@ -85,7 +88,7 @@ func captureStacktrace(skip int, depth stacktraceDepth) *stacktrace { // runtime.Callers truncates the recorded stacktrace if there is no // room in the provided slice. For the full stack trace, keep expanding // storage until there are fewer frames than there is room. - if depth == stacktraceFull { + if depth == Full { pcs := stack.pcs for numFrames == len(pcs) { pcs = make([]uintptr, len(pcs)*2) @@ -107,50 +110,54 @@ func captureStacktrace(skip int, depth stacktraceDepth) *stacktrace { // Free releases resources associated with this stacktrace // and returns it back to the pool. -func (st *stacktrace) Free() { +func (st *Stack) Free() { st.frames = nil st.pcs = nil - _stacktracePool.Put(st) + _stackPool.Put(st) } // Count reports the total number of frames in this stacktrace. // Count DOES NOT change as Next is called. -func (st *stacktrace) Count() int { +func (st *Stack) Count() int { return len(st.pcs) } // Next returns the next frame in the stack trace, // and a boolean indicating whether there are more after it. -func (st *stacktrace) Next() (_ runtime.Frame, more bool) { +func (st *Stack) Next() (_ runtime.Frame, more bool) { return st.frames.Next() } -func takeStacktrace(skip int) string { - stack := captureStacktrace(skip+1, stacktraceFull) +// Take returns a string representation of the current stacktrace. +// +// skip is the number of frames to skip before recording the stack trace. +// skip=0 identifies the caller of Take. +func Take(skip int) string { + stack := Capture(skip+1, Full) defer stack.Free() buffer := bufferpool.Get() defer buffer.Free() - stackfmt := newStackFormatter(buffer) + stackfmt := NewFormatter(buffer) stackfmt.FormatStack(stack) return buffer.String() } -// stackFormatter formats a stack trace into a readable string representation. -type stackFormatter struct { +// Formatter formats a stack trace into a readable string representation. +type Formatter struct { b *buffer.Buffer nonEmpty bool // whehther we've written at least one frame already } -// newStackFormatter builds a new stackFormatter. -func newStackFormatter(b *buffer.Buffer) stackFormatter { - return stackFormatter{b: b} +// NewFormatter builds a new Formatter. +func NewFormatter(b *buffer.Buffer) Formatter { + return Formatter{b: b} } // FormatStack formats all remaining frames in the provided stacktrace -- minus // the final runtime.main/runtime.goexit frame. -func (sf *stackFormatter) FormatStack(stack *stacktrace) { +func (sf *Formatter) FormatStack(stack *Stack) { // Note: On the last iteration, frames.Next() returns false, with a valid // frame, but we ignore this frame. The last frame is a runtime frame which // adds noise, since it's only either runtime.main or runtime.goexit. @@ -160,7 +167,7 @@ func (sf *stackFormatter) FormatStack(stack *stacktrace) { } // FormatFrame formats the given frame. -func (sf *stackFormatter) FormatFrame(frame runtime.Frame) { +func (sf *Formatter) FormatFrame(frame runtime.Frame) { if sf.nonEmpty { sf.b.AppendByte('\n') } diff --git a/vendor/go.uber.org/zap/internal/ztest/clock.go b/vendor/go.uber.org/zap/internal/ztest/clock.go index fe8026d94..47b0b7f96 100644 --- a/vendor/go.uber.org/zap/internal/ztest/clock.go +++ b/vendor/go.uber.org/zap/internal/ztest/clock.go @@ -1,4 +1,4 @@ -// Copyright (c) 2021 Uber Technologies, Inc. +// Copyright (c) 2023 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -21,30 +21,133 @@ package ztest import ( + "sort" + "sync" "time" - - "github.com/benbjohnson/clock" ) -// MockClock provides control over the time. -type MockClock struct{ m *clock.Mock } +// MockClock is a fake source of time. +// It implements standard time operations, +// but allows the user to control the passage of time. +// +// Use the [Add] method to progress time. +type MockClock struct { + mu sync.RWMutex + now time.Time + + // The MockClock works by maintaining a list of waiters. + // Each waiter knows the time at which it should be resolved. + // When the clock advances, all waiters that are in range are resolved + // in chronological order. + waiters []waiter +} -// NewMockClock builds a new mock clock that provides control of time. +// NewMockClock builds a new mock clock +// using the current actual time as the initial time. func NewMockClock() *MockClock { - return &MockClock{clock.NewMock()} + return &MockClock{ + now: time.Now(), + } } // Now reports the current time. func (c *MockClock) Now() time.Time { - return c.m.Now() + c.mu.RLock() + defer c.mu.RUnlock() + return c.now } // NewTicker returns a time.Ticker that ticks at the specified frequency. +// +// As with [time.NewTicker], +// the ticker will drop ticks if the receiver is slow, +// and the channel is never closed. +// +// Calling Stop on the returned ticker is a no-op. +// The ticker only runs when the clock is advanced. func (c *MockClock) NewTicker(d time.Duration) *time.Ticker { - return &time.Ticker{C: c.m.Ticker(d).C} + ch := make(chan time.Time, 1) + + var tick func(time.Time) + tick = func(now time.Time) { + next := now.Add(d) + c.runAt(next, func() { + defer tick(next) + + select { + case ch <- next: + // ok + default: + // The receiver is slow. + // Drop the tick and continue. + } + }) + } + tick(c.Now()) + + return &time.Ticker{C: ch} +} + +// runAt schedules the given function to be run at the given time. +// The function runs without a lock held, so it may schedule more work. +func (c *MockClock) runAt(t time.Time, fn func()) { + c.mu.Lock() + defer c.mu.Unlock() + c.waiters = append(c.waiters, waiter{until: t, fn: fn}) +} + +type waiter struct { + until time.Time + fn func() } // Add progresses time by the given duration. +// Other operations waiting for the time to advance +// will be resolved if they are within range. +// +// Side effects of operations waiting for the time to advance +// will take effect on a best-effort basis. +// Avoid racing with operations that have side effects. +// +// Panics if the duration is negative. func (c *MockClock) Add(d time.Duration) { - c.m.Add(d) + if d < 0 { + panic("cannot add negative duration") + } + + c.mu.Lock() + defer c.mu.Unlock() + + sort.Slice(c.waiters, func(i, j int) bool { + return c.waiters[i].until.Before(c.waiters[j].until) + }) + + newTime := c.now.Add(d) + // newTime won't be recorded until the end of this method. + // This ensures that any waiters that are resolved + // are resolved at the time they were expecting. + + for len(c.waiters) > 0 { + w := c.waiters[0] + if w.until.After(newTime) { + break + } + c.waiters[0] = waiter{} // avoid memory leak + c.waiters = c.waiters[1:] + + // The waiter is within range. + // Travel to the time of the waiter and resolve it. + c.now = w.until + + // The waiter may schedule more work + // so we must release the lock. + c.mu.Unlock() + w.fn() + // Sleeping here is necessary to let the side effects of waiters + // take effect before we continue. + time.Sleep(1 * time.Millisecond) + c.mu.Lock() + } + + c.now = newTime } diff --git a/vendor/go.uber.org/zap/logger.go b/vendor/go.uber.org/zap/logger.go index 0e9548011..6205fe48a 100644 --- a/vendor/go.uber.org/zap/logger.go +++ b/vendor/go.uber.org/zap/logger.go @@ -27,6 +27,7 @@ import ( "strings" "go.uber.org/zap/internal/bufferpool" + "go.uber.org/zap/internal/stacktrace" "go.uber.org/zap/zapcore" ) @@ -173,7 +174,8 @@ func (log *Logger) WithOptions(opts ...Option) *Logger { } // With creates a child logger and adds structured context to it. Fields added -// to the child don't affect the parent, and vice versa. +// to the child don't affect the parent, and vice versa. Any fields that +// require evaluation (such as Objects) are evaluated upon invocation of With. func (log *Logger) With(fields ...Field) *Logger { if len(fields) == 0 { return log @@ -183,6 +185,28 @@ func (log *Logger) With(fields ...Field) *Logger { return l } +// WithLazy creates a child logger and adds structured context to it lazily. +// +// The fields are evaluated only if the logger is further chained with [With] +// or is written to with any of the log level methods. +// Until that occurs, the logger may retain references to objects inside the fields, +// and logging will reflect the state of an object at the time of logging, +// not the time of WithLazy(). +// +// WithLazy provides a worthwhile performance optimization for contextual loggers +// when the likelihood of using the child logger is low, +// such as error paths and rarely taken branches. +// +// Similar to [With], fields added to the child don't affect the parent, and vice versa. +func (log *Logger) WithLazy(fields ...Field) *Logger { + if len(fields) == 0 { + return log + } + return log.WithOptions(WrapCore(func(core zapcore.Core) zapcore.Core { + return zapcore.NewLazyWith(core, fields) + })) +} + // Level reports the minimum enabled level for this logger. // // For NopLoggers, this is [zapcore.InvalidLevel]. @@ -199,6 +223,8 @@ func (log *Logger) Check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { // Log logs a message at the specified level. The message includes any fields // passed at the log site, as well as any fields accumulated on the logger. +// Any Fields that require evaluation (such as Objects) are evaluated upon +// invocation of Log. func (log *Logger) Log(lvl zapcore.Level, msg string, fields ...Field) { if ce := log.check(lvl, msg); ce != nil { ce.Write(fields...) @@ -288,8 +314,8 @@ func (log *Logger) Name() string { } func (log *Logger) clone() *Logger { - copy := *log - return © + clone := *log + return &clone } func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { @@ -360,17 +386,17 @@ func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { // Adding the caller or stack trace requires capturing the callers of // this function. We'll share information between these two. - stackDepth := stacktraceFirst + stackDepth := stacktrace.First if addStack { - stackDepth = stacktraceFull + stackDepth = stacktrace.Full } - stack := captureStacktrace(log.callerSkip+callerSkipOffset, stackDepth) + stack := stacktrace.Capture(log.callerSkip+callerSkipOffset, stackDepth) defer stack.Free() if stack.Count() == 0 { if log.addCaller { fmt.Fprintf(log.errorOutput, "%v Logger.check error: failed to get caller\n", ent.Time.UTC()) - log.errorOutput.Sync() + _ = log.errorOutput.Sync() } return ce } @@ -391,7 +417,7 @@ func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { buffer := bufferpool.Get() defer buffer.Free() - stackfmt := newStackFormatter(buffer) + stackfmt := stacktrace.NewFormatter(buffer) // We've already extracted the first frame, so format that // separately and defer to stackfmt for the rest. diff --git a/vendor/go.uber.org/zap/sink.go b/vendor/go.uber.org/zap/sink.go index 478c9a10f..499772a00 100644 --- a/vendor/go.uber.org/zap/sink.go +++ b/vendor/go.uber.org/zap/sink.go @@ -66,7 +66,8 @@ func newSinkRegistry() *sinkRegistry { factories: make(map[string]func(*url.URL) (Sink, error)), openFile: os.OpenFile, } - sr.RegisterSink(schemeFile, sr.newFileSinkFromURL) + // Infallible operation: the registry is empty, so we can't have a conflict. + _ = sr.RegisterSink(schemeFile, sr.newFileSinkFromURL) return sr } @@ -154,7 +155,7 @@ func (sr *sinkRegistry) newFileSinkFromPath(path string) (Sink, error) { case "stderr": return nopCloserSink{os.Stderr}, nil } - return sr.openFile(path, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666) + return sr.openFile(path, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0o666) } func normalizeScheme(s string) (string, error) { diff --git a/vendor/go.uber.org/zap/writer.go b/vendor/go.uber.org/zap/writer.go index f08728e1e..06768c679 100644 --- a/vendor/go.uber.org/zap/writer.go +++ b/vendor/go.uber.org/zap/writer.go @@ -48,21 +48,21 @@ import ( // os.Stdout and os.Stderr. When specified without a scheme, relative file // paths also work. func Open(paths ...string) (zapcore.WriteSyncer, func(), error) { - writers, close, err := open(paths) + writers, closeAll, err := open(paths) if err != nil { return nil, nil, err } writer := CombineWriteSyncers(writers...) - return writer, close, nil + return writer, closeAll, nil } func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { writers := make([]zapcore.WriteSyncer, 0, len(paths)) closers := make([]io.Closer, 0, len(paths)) - close := func() { + closeAll := func() { for _, c := range closers { - c.Close() + _ = c.Close() } } @@ -77,11 +77,11 @@ func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { closers = append(closers, sink) } if openErr != nil { - close() + closeAll() return nil, nil, openErr } - return writers, close, nil + return writers, closeAll, nil } // CombineWriteSyncers is a utility that combines multiple WriteSyncers into a diff --git a/vendor/go.uber.org/zap/zapcore/core.go b/vendor/go.uber.org/zap/zapcore/core.go index 9dfd64051..776e93f6f 100644 --- a/vendor/go.uber.org/zap/zapcore/core.go +++ b/vendor/go.uber.org/zap/zapcore/core.go @@ -102,9 +102,9 @@ func (c *ioCore) Write(ent Entry, fields []Field) error { return err } if ent.Level > ErrorLevel { - // Since we may be crashing the program, sync the output. Ignore Sync - // errors, pending a clean solution to issue #370. - c.Sync() + // Since we may be crashing the program, sync the output. + // Ignore Sync errors, pending a clean solution to issue #370. + _ = c.Sync() } return nil } diff --git a/vendor/go.uber.org/zap/zapcore/entry.go b/vendor/go.uber.org/zap/zapcore/entry.go index 059844f92..459a5d7ce 100644 --- a/vendor/go.uber.org/zap/zapcore/entry.go +++ b/vendor/go.uber.org/zap/zapcore/entry.go @@ -242,7 +242,7 @@ func (ce *CheckedEntry) Write(fields ...Field) { // CheckedEntry is being used after it was returned to the pool, // the message may be an amalgamation from multiple call sites. fmt.Fprintf(ce.ErrorOutput, "%v Unsafe CheckedEntry re-use near Entry %+v.\n", ce.Time, ce.Entry) - ce.ErrorOutput.Sync() + _ = ce.ErrorOutput.Sync() // ignore error } return } @@ -254,7 +254,7 @@ func (ce *CheckedEntry) Write(fields ...Field) { } if err != nil && ce.ErrorOutput != nil { fmt.Fprintf(ce.ErrorOutput, "%v write error: %v\n", ce.Time, err) - ce.ErrorOutput.Sync() + _ = ce.ErrorOutput.Sync() // ignore error } hook := ce.after diff --git a/vendor/go.uber.org/zap/zapcore/error.go b/vendor/go.uber.org/zap/zapcore/error.go index c67dd71df..c40df1326 100644 --- a/vendor/go.uber.org/zap/zapcore/error.go +++ b/vendor/go.uber.org/zap/zapcore/error.go @@ -98,8 +98,11 @@ func (errs errArray) MarshalLogArray(arr ArrayEncoder) error { } el := newErrArrayElem(errs[i]) - arr.AppendObject(el) + err := arr.AppendObject(el) el.Free() + if err != nil { + return err + } } return nil } diff --git a/vendor/go.uber.org/zap/zapcore/json_encoder.go b/vendor/go.uber.org/zap/zapcore/json_encoder.go index ce6838de2..c8ab86979 100644 --- a/vendor/go.uber.org/zap/zapcore/json_encoder.go +++ b/vendor/go.uber.org/zap/zapcore/json_encoder.go @@ -486,73 +486,98 @@ func (enc *jsonEncoder) appendFloat(val float64, bitSize int) { // Unlike the standard library's encoder, it doesn't attempt to protect the // user from browser vulnerabilities or JSONP-related problems. func (enc *jsonEncoder) safeAddString(s string) { - for i := 0; i < len(s); { - if enc.tryAddRuneSelf(s[i]) { - i++ - continue - } - r, size := utf8.DecodeRuneInString(s[i:]) - if enc.tryAddRuneError(r, size) { - i++ - continue - } - enc.buf.AppendString(s[i : i+size]) - i += size - } + safeAppendStringLike( + (*buffer.Buffer).AppendString, + utf8.DecodeRuneInString, + enc.buf, + s, + ) } // safeAddByteString is no-alloc equivalent of safeAddString(string(s)) for s []byte. func (enc *jsonEncoder) safeAddByteString(s []byte) { + safeAppendStringLike( + (*buffer.Buffer).AppendBytes, + utf8.DecodeRune, + enc.buf, + s, + ) +} + +// safeAppendStringLike is a generic implementation of safeAddString and safeAddByteString. +// It appends a string or byte slice to the buffer, escaping all special characters. +func safeAppendStringLike[S []byte | string]( + // appendTo appends this string-like object to the buffer. + appendTo func(*buffer.Buffer, S), + // decodeRune decodes the next rune from the string-like object + // and returns its value and width in bytes. + decodeRune func(S) (rune, int), + buf *buffer.Buffer, + s S, +) { + // The encoding logic below works by skipping over characters + // that can be safely copied as-is, + // until a character is found that needs special handling. + // At that point, we copy everything we've seen so far, + // and then handle that special character. + // + // last is the index of the last byte that was copied to the buffer. + last := 0 for i := 0; i < len(s); { - if enc.tryAddRuneSelf(s[i]) { + if s[i] >= utf8.RuneSelf { + // Character >= RuneSelf may be part of a multi-byte rune. + // They need to be decoded before we can decide how to handle them. + r, size := decodeRune(s[i:]) + if r != utf8.RuneError || size != 1 { + // No special handling required. + // Skip over this rune and continue. + i += size + continue + } + + // Invalid UTF-8 sequence. + // Replace it with the Unicode replacement character. + appendTo(buf, s[last:i]) + buf.AppendString(`\ufffd`) + i++ - continue - } - r, size := utf8.DecodeRune(s[i:]) - if enc.tryAddRuneError(r, size) { + last = i + } else { + // Character < RuneSelf is a single-byte UTF-8 rune. + if s[i] >= 0x20 && s[i] != '\\' && s[i] != '"' { + // No escaping necessary. + // Skip over this character and continue. + i++ + continue + } + + // This character needs to be escaped. + appendTo(buf, s[last:i]) + switch s[i] { + case '\\', '"': + buf.AppendByte('\\') + buf.AppendByte(s[i]) + case '\n': + buf.AppendByte('\\') + buf.AppendByte('n') + case '\r': + buf.AppendByte('\\') + buf.AppendByte('r') + case '\t': + buf.AppendByte('\\') + buf.AppendByte('t') + default: + // Encode bytes < 0x20, except for the escape sequences above. + buf.AppendString(`\u00`) + buf.AppendByte(_hex[s[i]>>4]) + buf.AppendByte(_hex[s[i]&0xF]) + } + i++ - continue + last = i } - enc.buf.Write(s[i : i+size]) - i += size } -} -// tryAddRuneSelf appends b if it is valid UTF-8 character represented in a single byte. -func (enc *jsonEncoder) tryAddRuneSelf(b byte) bool { - if b >= utf8.RuneSelf { - return false - } - if b >= 0x20 && b != '\\' && b != '"' { - enc.buf.AppendByte(b) - return true - } - switch b { - case '\\', '"': - enc.buf.AppendByte('\\') - enc.buf.AppendByte(b) - case '\n': - enc.buf.AppendByte('\\') - enc.buf.AppendByte('n') - case '\r': - enc.buf.AppendByte('\\') - enc.buf.AppendByte('r') - case '\t': - enc.buf.AppendByte('\\') - enc.buf.AppendByte('t') - default: - // Encode bytes < 0x20, except for the escape sequences above. - enc.buf.AppendString(`\u00`) - enc.buf.AppendByte(_hex[b>>4]) - enc.buf.AppendByte(_hex[b&0xF]) - } - return true -} - -func (enc *jsonEncoder) tryAddRuneError(r rune, size int) bool { - if r == utf8.RuneError && size == 1 { - enc.buf.AppendString(`\ufffd`) - return true - } - return false + // add remaining + appendTo(buf, s[last:]) } diff --git a/vendor/go.uber.org/zap/zapcore/lazy_with.go b/vendor/go.uber.org/zap/zapcore/lazy_with.go new file mode 100644 index 000000000..05288d6a8 --- /dev/null +++ b/vendor/go.uber.org/zap/zapcore/lazy_with.go @@ -0,0 +1,54 @@ +// Copyright (c) 2023 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package zapcore + +import "sync" + +type lazyWithCore struct { + Core + sync.Once + fields []Field +} + +// NewLazyWith wraps a Core with a "lazy" Core that will only encode fields if +// the logger is written to (or is further chained in a lon-lazy manner). +func NewLazyWith(core Core, fields []Field) Core { + return &lazyWithCore{ + Core: core, + fields: fields, + } +} + +func (d *lazyWithCore) initOnce() { + d.Once.Do(func() { + d.Core = d.Core.With(d.fields) + }) +} + +func (d *lazyWithCore) With(fields []Field) Core { + d.initOnce() + return d.Core.With(fields) +} + +func (d *lazyWithCore) Check(e Entry, ce *CheckedEntry) *CheckedEntry { + d.initOnce() + return d.Core.Check(e, ce) +} diff --git a/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go index 7a0b9ed10..2459d069f 100644 --- a/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go +++ b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go @@ -47,6 +47,10 @@ type Config struct { // client ID & client secret sent. The zero value means to // auto-detect. AuthStyle oauth2.AuthStyle + + // authStyleCache caches which auth style to use when Endpoint.AuthStyle is + // the zero value (AuthStyleAutoDetect). + authStyleCache internal.LazyAuthStyleCache } // Token uses client credentials to retrieve a token. @@ -103,7 +107,7 @@ func (c *tokenSource) Token() (*oauth2.Token, error) { v[k] = p } - tk, err := internal.RetrieveToken(c.ctx, c.conf.ClientID, c.conf.ClientSecret, c.conf.TokenURL, v, internal.AuthStyle(c.conf.AuthStyle)) + tk, err := internal.RetrieveToken(c.ctx, c.conf.ClientID, c.conf.ClientSecret, c.conf.TokenURL, v, internal.AuthStyle(c.conf.AuthStyle), c.conf.authStyleCache.Get()) if err != nil { if rErr, ok := err.(*internal.RetrieveError); ok { return nil, (*oauth2.RetrieveError)(rErr) diff --git a/vendor/golang.org/x/oauth2/deviceauth.go b/vendor/golang.org/x/oauth2/deviceauth.go new file mode 100644 index 000000000..e99c92f39 --- /dev/null +++ b/vendor/golang.org/x/oauth2/deviceauth.go @@ -0,0 +1,198 @@ +package oauth2 + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "golang.org/x/oauth2/internal" +) + +// https://datatracker.ietf.org/doc/html/rfc8628#section-3.5 +const ( + errAuthorizationPending = "authorization_pending" + errSlowDown = "slow_down" + errAccessDenied = "access_denied" + errExpiredToken = "expired_token" +) + +// DeviceAuthResponse describes a successful RFC 8628 Device Authorization Response +// https://datatracker.ietf.org/doc/html/rfc8628#section-3.2 +type DeviceAuthResponse struct { + // DeviceCode + DeviceCode string `json:"device_code"` + // UserCode is the code the user should enter at the verification uri + UserCode string `json:"user_code"` + // VerificationURI is where user should enter the user code + VerificationURI string `json:"verification_uri"` + // VerificationURIComplete (if populated) includes the user code in the verification URI. This is typically shown to the user in non-textual form, such as a QR code. + VerificationURIComplete string `json:"verification_uri_complete,omitempty"` + // Expiry is when the device code and user code expire + Expiry time.Time `json:"expires_in,omitempty"` + // Interval is the duration in seconds that Poll should wait between requests + Interval int64 `json:"interval,omitempty"` +} + +func (d DeviceAuthResponse) MarshalJSON() ([]byte, error) { + type Alias DeviceAuthResponse + var expiresIn int64 + if !d.Expiry.IsZero() { + expiresIn = int64(time.Until(d.Expiry).Seconds()) + } + return json.Marshal(&struct { + ExpiresIn int64 `json:"expires_in,omitempty"` + *Alias + }{ + ExpiresIn: expiresIn, + Alias: (*Alias)(&d), + }) + +} + +func (c *DeviceAuthResponse) UnmarshalJSON(data []byte) error { + type Alias DeviceAuthResponse + aux := &struct { + ExpiresIn int64 `json:"expires_in"` + // workaround misspelling of verification_uri + VerificationURL string `json:"verification_url"` + *Alias + }{ + Alias: (*Alias)(c), + } + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + if aux.ExpiresIn != 0 { + c.Expiry = time.Now().UTC().Add(time.Second * time.Duration(aux.ExpiresIn)) + } + if c.VerificationURI == "" { + c.VerificationURI = aux.VerificationURL + } + return nil +} + +// DeviceAuth returns a device auth struct which contains a device code +// and authorization information provided for users to enter on another device. +func (c *Config) DeviceAuth(ctx context.Context, opts ...AuthCodeOption) (*DeviceAuthResponse, error) { + // https://datatracker.ietf.org/doc/html/rfc8628#section-3.1 + v := url.Values{ + "client_id": {c.ClientID}, + } + if len(c.Scopes) > 0 { + v.Set("scope", strings.Join(c.Scopes, " ")) + } + for _, opt := range opts { + opt.setValue(v) + } + return retrieveDeviceAuth(ctx, c, v) +} + +func retrieveDeviceAuth(ctx context.Context, c *Config, v url.Values) (*DeviceAuthResponse, error) { + if c.Endpoint.DeviceAuthURL == "" { + return nil, errors.New("endpoint missing DeviceAuthURL") + } + + req, err := http.NewRequest("POST", c.Endpoint.DeviceAuthURL, strings.NewReader(v.Encode())) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("Accept", "application/json") + + t := time.Now() + r, err := internal.ContextClient(ctx).Do(req) + if err != nil { + return nil, err + } + + body, err := io.ReadAll(io.LimitReader(r.Body, 1<<20)) + if err != nil { + return nil, fmt.Errorf("oauth2: cannot auth device: %v", err) + } + if code := r.StatusCode; code < 200 || code > 299 { + return nil, &RetrieveError{ + Response: r, + Body: body, + } + } + + da := &DeviceAuthResponse{} + err = json.Unmarshal(body, &da) + if err != nil { + return nil, fmt.Errorf("unmarshal %s", err) + } + + if !da.Expiry.IsZero() { + // Make a small adjustment to account for time taken by the request + da.Expiry = da.Expiry.Add(-time.Since(t)) + } + + return da, nil +} + +// DeviceAccessToken polls the server to exchange a device code for a token. +func (c *Config) DeviceAccessToken(ctx context.Context, da *DeviceAuthResponse, opts ...AuthCodeOption) (*Token, error) { + if !da.Expiry.IsZero() { + var cancel context.CancelFunc + ctx, cancel = context.WithDeadline(ctx, da.Expiry) + defer cancel() + } + + // https://datatracker.ietf.org/doc/html/rfc8628#section-3.4 + v := url.Values{ + "client_id": {c.ClientID}, + "grant_type": {"urn:ietf:params:oauth:grant-type:device_code"}, + "device_code": {da.DeviceCode}, + } + if len(c.Scopes) > 0 { + v.Set("scope", strings.Join(c.Scopes, " ")) + } + for _, opt := range opts { + opt.setValue(v) + } + + // "If no value is provided, clients MUST use 5 as the default." + // https://datatracker.ietf.org/doc/html/rfc8628#section-3.2 + interval := da.Interval + if interval == 0 { + interval = 5 + } + + ticker := time.NewTicker(time.Duration(interval) * time.Second) + defer ticker.Stop() + for { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-ticker.C: + tok, err := retrieveToken(ctx, c, v) + if err == nil { + return tok, nil + } + + e, ok := err.(*RetrieveError) + if !ok { + return nil, err + } + switch e.ErrorCode { + case errSlowDown: + // https://datatracker.ietf.org/doc/html/rfc8628#section-3.5 + // "the interval MUST be increased by 5 seconds for this and all subsequent requests" + interval += 5 + ticker.Reset(time.Duration(interval) * time.Second) + case errAuthorizationPending: + // Do nothing. + case errAccessDenied, errExpiredToken: + fallthrough + default: + return tok, err + } + } + } +} diff --git a/vendor/golang.org/x/oauth2/internal/token.go b/vendor/golang.org/x/oauth2/internal/token.go index 58901bda5..e83ddeef0 100644 --- a/vendor/golang.org/x/oauth2/internal/token.go +++ b/vendor/golang.org/x/oauth2/internal/token.go @@ -18,6 +18,7 @@ import ( "strconv" "strings" "sync" + "sync/atomic" "time" ) @@ -115,41 +116,60 @@ const ( AuthStyleInHeader AuthStyle = 2 ) -// authStyleCache is the set of tokenURLs we've successfully used via +// LazyAuthStyleCache is a backwards compatibility compromise to let Configs +// have a lazily-initialized AuthStyleCache. +// +// The two users of this, oauth2.Config and oauth2/clientcredentials.Config, +// both would ideally just embed an unexported AuthStyleCache but because both +// were historically allowed to be copied by value we can't retroactively add an +// uncopyable Mutex to them. +// +// We could use an atomic.Pointer, but that was added recently enough (in Go +// 1.18) that we'd break Go 1.17 users where the tests as of 2023-08-03 +// still pass. By using an atomic.Value, it supports both Go 1.17 and +// copying by value, even if that's not ideal. +type LazyAuthStyleCache struct { + v atomic.Value // of *AuthStyleCache +} + +func (lc *LazyAuthStyleCache) Get() *AuthStyleCache { + if c, ok := lc.v.Load().(*AuthStyleCache); ok { + return c + } + c := new(AuthStyleCache) + if !lc.v.CompareAndSwap(nil, c) { + c = lc.v.Load().(*AuthStyleCache) + } + return c +} + +// AuthStyleCache is the set of tokenURLs we've successfully used via // RetrieveToken and which style auth we ended up using. // It's called a cache, but it doesn't (yet?) shrink. It's expected that // the set of OAuth2 servers a program contacts over time is fixed and // small. -var authStyleCache struct { - sync.Mutex - m map[string]AuthStyle // keyed by tokenURL -} - -// ResetAuthCache resets the global authentication style cache used -// for AuthStyleUnknown token requests. -func ResetAuthCache() { - authStyleCache.Lock() - defer authStyleCache.Unlock() - authStyleCache.m = nil +type AuthStyleCache struct { + mu sync.Mutex + m map[string]AuthStyle // keyed by tokenURL } // lookupAuthStyle reports which auth style we last used with tokenURL // when calling RetrieveToken and whether we have ever done so. -func lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) { - authStyleCache.Lock() - defer authStyleCache.Unlock() - style, ok = authStyleCache.m[tokenURL] +func (c *AuthStyleCache) lookupAuthStyle(tokenURL string) (style AuthStyle, ok bool) { + c.mu.Lock() + defer c.mu.Unlock() + style, ok = c.m[tokenURL] return } // setAuthStyle adds an entry to authStyleCache, documented above. -func setAuthStyle(tokenURL string, v AuthStyle) { - authStyleCache.Lock() - defer authStyleCache.Unlock() - if authStyleCache.m == nil { - authStyleCache.m = make(map[string]AuthStyle) +func (c *AuthStyleCache) setAuthStyle(tokenURL string, v AuthStyle) { + c.mu.Lock() + defer c.mu.Unlock() + if c.m == nil { + c.m = make(map[string]AuthStyle) } - authStyleCache.m[tokenURL] = v + c.m[tokenURL] = v } // newTokenRequest returns a new *http.Request to retrieve a new token @@ -189,10 +209,10 @@ func cloneURLValues(v url.Values) url.Values { return v2 } -func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle) (*Token, error) { +func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, v url.Values, authStyle AuthStyle, styleCache *AuthStyleCache) (*Token, error) { needsAuthStyleProbe := authStyle == 0 if needsAuthStyleProbe { - if style, ok := lookupAuthStyle(tokenURL); ok { + if style, ok := styleCache.lookupAuthStyle(tokenURL); ok { authStyle = style needsAuthStyleProbe = false } else { @@ -222,7 +242,7 @@ func RetrieveToken(ctx context.Context, clientID, clientSecret, tokenURL string, token, err = doTokenRoundTrip(ctx, req) } if needsAuthStyleProbe && err == nil { - setAuthStyle(tokenURL, authStyle) + styleCache.setAuthStyle(tokenURL, authStyle) } // Don't overwrite `RefreshToken` with an empty value // if this was a token refreshing request. diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go index 9085fabe3..90a2c3d6d 100644 --- a/vendor/golang.org/x/oauth2/oauth2.go +++ b/vendor/golang.org/x/oauth2/oauth2.go @@ -58,6 +58,10 @@ type Config struct { // Scope specifies optional requested permissions. Scopes []string + + // authStyleCache caches which auth style to use when Endpoint.AuthStyle is + // the zero value (AuthStyleAutoDetect). + authStyleCache internal.LazyAuthStyleCache } // A TokenSource is anything that can return a token. @@ -71,8 +75,9 @@ type TokenSource interface { // Endpoint represents an OAuth 2.0 provider's authorization and token // endpoint URLs. type Endpoint struct { - AuthURL string - TokenURL string + AuthURL string + DeviceAuthURL string + TokenURL string // AuthStyle optionally specifies how the endpoint wants the // client ID & client secret sent. The zero value means to @@ -139,15 +144,19 @@ func SetAuthURLParam(key, value string) AuthCodeOption { // AuthCodeURL returns a URL to OAuth 2.0 provider's consent page // that asks for permissions for the required scopes explicitly. // -// State is a token to protect the user from CSRF attacks. You must -// always provide a non-empty string and validate that it matches the -// state query parameter on your redirect callback. -// See http://tools.ietf.org/html/rfc6749#section-10.12 for more info. +// State is an opaque value used by the client to maintain state between the +// request and callback. The authorization server includes this value when +// redirecting the user agent back to the client. // // Opts may include AccessTypeOnline or AccessTypeOffline, as well // as ApprovalForce. -// It can also be used to pass the PKCE challenge. -// See https://www.oauth.com/oauth2-servers/pkce/ for more info. +// +// To protect against CSRF attacks, opts should include a PKCE challenge +// (S256ChallengeOption). Not all servers support PKCE. An alternative is to +// generate a random state parameter and verify it after exchange. +// See https://datatracker.ietf.org/doc/html/rfc6749#section-10.12 (predating +// PKCE), https://www.oauth.com/oauth2-servers/pkce/ and +// https://www.ietf.org/archive/id/draft-ietf-oauth-v2-1-09.html#name-cross-site-request-forgery (describing both approaches) func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string { var buf bytes.Buffer buf.WriteString(c.Endpoint.AuthURL) @@ -162,7 +171,6 @@ func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string { v.Set("scope", strings.Join(c.Scopes, " ")) } if state != "" { - // TODO(light): Docs say never to omit state; don't allow empty. v.Set("state", state) } for _, opt := range opts { @@ -207,10 +215,11 @@ func (c *Config) PasswordCredentialsToken(ctx context.Context, username, passwor // The provided context optionally controls which HTTP client is used. See the HTTPClient variable. // // The code will be in the *http.Request.FormValue("code"). Before -// calling Exchange, be sure to validate FormValue("state"). +// calling Exchange, be sure to validate FormValue("state") if you are +// using it to protect against CSRF attacks. // -// Opts may include the PKCE verifier code if previously used in AuthCodeURL. -// See https://www.oauth.com/oauth2-servers/pkce/ for more info. +// If using PKCE to protect against CSRF attacks, opts should include a +// VerifierOption. func (c *Config) Exchange(ctx context.Context, code string, opts ...AuthCodeOption) (*Token, error) { v := url.Values{ "grant_type": {"authorization_code"}, diff --git a/vendor/golang.org/x/oauth2/pkce.go b/vendor/golang.org/x/oauth2/pkce.go new file mode 100644 index 000000000..50593b6df --- /dev/null +++ b/vendor/golang.org/x/oauth2/pkce.go @@ -0,0 +1,68 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +package oauth2 + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/base64" + "net/url" +) + +const ( + codeChallengeKey = "code_challenge" + codeChallengeMethodKey = "code_challenge_method" + codeVerifierKey = "code_verifier" +) + +// GenerateVerifier generates a PKCE code verifier with 32 octets of randomness. +// This follows recommendations in RFC 7636. +// +// A fresh verifier should be generated for each authorization. +// S256ChallengeOption(verifier) should then be passed to Config.AuthCodeURL +// (or Config.DeviceAccess) and VerifierOption(verifier) to Config.Exchange +// (or Config.DeviceAccessToken). +func GenerateVerifier() string { + // "RECOMMENDED that the output of a suitable random number generator be + // used to create a 32-octet sequence. The octet sequence is then + // base64url-encoded to produce a 43-octet URL-safe string to use as the + // code verifier." + // https://datatracker.ietf.org/doc/html/rfc7636#section-4.1 + data := make([]byte, 32) + if _, err := rand.Read(data); err != nil { + panic(err) + } + return base64.RawURLEncoding.EncodeToString(data) +} + +// VerifierOption returns a PKCE code verifier AuthCodeOption. It should be +// passed to Config.Exchange or Config.DeviceAccessToken only. +func VerifierOption(verifier string) AuthCodeOption { + return setParam{k: codeVerifierKey, v: verifier} +} + +// S256ChallengeFromVerifier returns a PKCE code challenge derived from verifier with method S256. +// +// Prefer to use S256ChallengeOption where possible. +func S256ChallengeFromVerifier(verifier string) string { + sha := sha256.Sum256([]byte(verifier)) + return base64.RawURLEncoding.EncodeToString(sha[:]) +} + +// S256ChallengeOption derives a PKCE code challenge derived from verifier with +// method S256. It should be passed to Config.AuthCodeURL or Config.DeviceAccess +// only. +func S256ChallengeOption(verifier string) AuthCodeOption { + return challengeOption{ + challenge_method: "S256", + challenge: S256ChallengeFromVerifier(verifier), + } +} + +type challengeOption struct{ challenge_method, challenge string } + +func (p challengeOption) setValue(m url.Values) { + m.Set(codeChallengeMethodKey, p.challenge_method) + m.Set(codeChallengeKey, p.challenge) +} diff --git a/vendor/golang.org/x/oauth2/token.go b/vendor/golang.org/x/oauth2/token.go index 5ffce9764..5bbb33217 100644 --- a/vendor/golang.org/x/oauth2/token.go +++ b/vendor/golang.org/x/oauth2/token.go @@ -164,7 +164,7 @@ func tokenFromInternal(t *internal.Token) *Token { // This token is then mapped from *internal.Token into an *oauth2.Token which is returned along // with an error.. func retrieveToken(ctx context.Context, c *Config, v url.Values) (*Token, error) { - tk, err := internal.RetrieveToken(ctx, c.ClientID, c.ClientSecret, c.Endpoint.TokenURL, v, internal.AuthStyle(c.Endpoint.AuthStyle)) + tk, err := internal.RetrieveToken(ctx, c.ClientID, c.ClientSecret, c.Endpoint.TokenURL, v, internal.AuthStyle(c.Endpoint.AuthStyle), c.authStyleCache.Get()) if err != nil { if rErr, ok := err.(*internal.RetrieveError); ok { return nil, (*RetrieveError)(rErr) diff --git a/vendor/gomodules.xyz/jsonpatch/v2/jsonpatch.go b/vendor/gomodules.xyz/jsonpatch/v2/jsonpatch.go index a411d542c..0d7823b3c 100644 --- a/vendor/gomodules.xyz/jsonpatch/v2/jsonpatch.go +++ b/vendor/gomodules.xyz/jsonpatch/v2/jsonpatch.go @@ -1,6 +1,7 @@ package jsonpatch import ( + "bytes" "encoding/json" "fmt" "reflect" @@ -64,6 +65,9 @@ func NewOperation(op, path string, value interface{}) Operation { // // An error will be returned if any of the two documents are invalid. func CreatePatch(a, b []byte) ([]Operation, error) { + if bytes.Equal(a, b) { + return []Operation{}, nil + } var aI interface{} var bI interface{} err := json.Unmarshal(a, &aI) diff --git a/vendor/google.golang.org/appengine/internal/api.go b/vendor/google.golang.org/appengine/internal/api.go index 721053c20..0569f5dd4 100644 --- a/vendor/google.golang.org/appengine/internal/api.go +++ b/vendor/google.golang.org/appengine/internal/api.go @@ -2,12 +2,14 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build !appengine // +build !appengine package internal import ( "bytes" + "context" "errors" "fmt" "io/ioutil" @@ -24,7 +26,6 @@ import ( "time" "github.com/golang/protobuf/proto" - netcontext "golang.org/x/net/context" basepb "google.golang.org/appengine/internal/base" logpb "google.golang.org/appengine/internal/log" @@ -32,8 +33,7 @@ import ( ) const ( - apiPath = "/rpc_http" - defaultTicketSuffix = "/default.20150612t184001.0" + apiPath = "/rpc_http" ) var ( @@ -65,21 +65,22 @@ var ( IdleConnTimeout: 90 * time.Second, }, } - - defaultTicketOnce sync.Once - defaultTicket string - backgroundContextOnce sync.Once - backgroundContext netcontext.Context ) -func apiURL() *url.URL { +func apiURL(ctx context.Context) *url.URL { host, port := "appengine.googleapis.internal", "10001" if h := os.Getenv("API_HOST"); h != "" { host = h } + if hostOverride := ctx.Value(apiHostOverrideKey); hostOverride != nil { + host = hostOverride.(string) + } if p := os.Getenv("API_PORT"); p != "" { port = p } + if portOverride := ctx.Value(apiPortOverrideKey); portOverride != nil { + port = portOverride.(string) + } return &url.URL{ Scheme: "http", Host: host + ":" + port, @@ -87,82 +88,97 @@ func apiURL() *url.URL { } } -func handleHTTP(w http.ResponseWriter, r *http.Request) { - c := &context{ - req: r, - outHeader: w.Header(), - apiURL: apiURL(), - } - r = r.WithContext(withContext(r.Context(), c)) - c.req = r - - stopFlushing := make(chan int) +// Middleware wraps an http handler so that it can make GAE API calls +func Middleware(next http.Handler) http.Handler { + return handleHTTPMiddleware(executeRequestSafelyMiddleware(next)) +} - // Patch up RemoteAddr so it looks reasonable. - if addr := r.Header.Get(userIPHeader); addr != "" { - r.RemoteAddr = addr - } else if addr = r.Header.Get(remoteAddrHeader); addr != "" { - r.RemoteAddr = addr - } else { - // Should not normally reach here, but pick a sensible default anyway. - r.RemoteAddr = "127.0.0.1" - } - // The address in the headers will most likely be of these forms: - // 123.123.123.123 - // 2001:db8::1 - // net/http.Request.RemoteAddr is specified to be in "IP:port" form. - if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil { - // Assume the remote address is only a host; add a default port. - r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80") - } +func handleHTTPMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + c := &aeContext{ + req: r, + outHeader: w.Header(), + } + r = r.WithContext(withContext(r.Context(), c)) + c.req = r + + stopFlushing := make(chan int) + + // Patch up RemoteAddr so it looks reasonable. + if addr := r.Header.Get(userIPHeader); addr != "" { + r.RemoteAddr = addr + } else if addr = r.Header.Get(remoteAddrHeader); addr != "" { + r.RemoteAddr = addr + } else { + // Should not normally reach here, but pick a sensible default anyway. + r.RemoteAddr = "127.0.0.1" + } + // The address in the headers will most likely be of these forms: + // 123.123.123.123 + // 2001:db8::1 + // net/http.Request.RemoteAddr is specified to be in "IP:port" form. + if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil { + // Assume the remote address is only a host; add a default port. + r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80") + } - // Start goroutine responsible for flushing app logs. - // This is done after adding c to ctx.m (and stopped before removing it) - // because flushing logs requires making an API call. - go c.logFlusher(stopFlushing) + if logToLogservice() { + // Start goroutine responsible for flushing app logs. + // This is done after adding c to ctx.m (and stopped before removing it) + // because flushing logs requires making an API call. + go c.logFlusher(stopFlushing) + } - executeRequestSafely(c, r) - c.outHeader = nil // make sure header changes aren't respected any more + next.ServeHTTP(c, r) + c.outHeader = nil // make sure header changes aren't respected any more - stopFlushing <- 1 // any logging beyond this point will be dropped + flushed := make(chan struct{}) + if logToLogservice() { + stopFlushing <- 1 // any logging beyond this point will be dropped - // Flush any pending logs asynchronously. - c.pendingLogs.Lock() - flushes := c.pendingLogs.flushes - if len(c.pendingLogs.lines) > 0 { - flushes++ - } - c.pendingLogs.Unlock() - flushed := make(chan struct{}) - go func() { - defer close(flushed) - // Force a log flush, because with very short requests we - // may not ever flush logs. - c.flushLog(true) - }() - w.Header().Set(logFlushHeader, strconv.Itoa(flushes)) + // Flush any pending logs asynchronously. + c.pendingLogs.Lock() + flushes := c.pendingLogs.flushes + if len(c.pendingLogs.lines) > 0 { + flushes++ + } + c.pendingLogs.Unlock() + go func() { + defer close(flushed) + // Force a log flush, because with very short requests we + // may not ever flush logs. + c.flushLog(true) + }() + w.Header().Set(logFlushHeader, strconv.Itoa(flushes)) + } - // Avoid nil Write call if c.Write is never called. - if c.outCode != 0 { - w.WriteHeader(c.outCode) - } - if c.outBody != nil { - w.Write(c.outBody) - } - // Wait for the last flush to complete before returning, - // otherwise the security ticket will not be valid. - <-flushed + // Avoid nil Write call if c.Write is never called. + if c.outCode != 0 { + w.WriteHeader(c.outCode) + } + if c.outBody != nil { + w.Write(c.outBody) + } + if logToLogservice() { + // Wait for the last flush to complete before returning, + // otherwise the security ticket will not be valid. + <-flushed + } + }) } -func executeRequestSafely(c *context, r *http.Request) { - defer func() { - if x := recover(); x != nil { - logf(c, 4, "%s", renderPanic(x)) // 4 == critical - c.outCode = 500 - } - }() +func executeRequestSafelyMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer func() { + if x := recover(); x != nil { + c := w.(*aeContext) + logf(c, 4, "%s", renderPanic(x)) // 4 == critical + c.outCode = 500 + } + }() - http.DefaultServeMux.ServeHTTP(c, r) + next.ServeHTTP(w, r) + }) } func renderPanic(x interface{}) string { @@ -204,9 +220,9 @@ func renderPanic(x interface{}) string { return string(buf) } -// context represents the context of an in-flight HTTP request. +// aeContext represents the aeContext of an in-flight HTTP request. // It implements the appengine.Context and http.ResponseWriter interfaces. -type context struct { +type aeContext struct { req *http.Request outCode int @@ -218,8 +234,6 @@ type context struct { lines []*logpb.UserAppLogLine flushes int } - - apiURL *url.URL } var contextKey = "holds a *context" @@ -227,8 +241,8 @@ var contextKey = "holds a *context" // jointContext joins two contexts in a superficial way. // It takes values and timeouts from a base context, and only values from another context. type jointContext struct { - base netcontext.Context - valuesOnly netcontext.Context + base context.Context + valuesOnly context.Context } func (c jointContext) Deadline() (time.Time, bool) { @@ -252,94 +266,54 @@ func (c jointContext) Value(key interface{}) interface{} { // fromContext returns the App Engine context or nil if ctx is not // derived from an App Engine context. -func fromContext(ctx netcontext.Context) *context { - c, _ := ctx.Value(&contextKey).(*context) +func fromContext(ctx context.Context) *aeContext { + c, _ := ctx.Value(&contextKey).(*aeContext) return c } -func withContext(parent netcontext.Context, c *context) netcontext.Context { - ctx := netcontext.WithValue(parent, &contextKey, c) +func withContext(parent context.Context, c *aeContext) context.Context { + ctx := context.WithValue(parent, &contextKey, c) if ns := c.req.Header.Get(curNamespaceHeader); ns != "" { ctx = withNamespace(ctx, ns) } return ctx } -func toContext(c *context) netcontext.Context { - return withContext(netcontext.Background(), c) +func toContext(c *aeContext) context.Context { + return withContext(context.Background(), c) } -func IncomingHeaders(ctx netcontext.Context) http.Header { +func IncomingHeaders(ctx context.Context) http.Header { if c := fromContext(ctx); c != nil { return c.req.Header } return nil } -func ReqContext(req *http.Request) netcontext.Context { +func ReqContext(req *http.Request) context.Context { return req.Context() } -func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context { +func WithContext(parent context.Context, req *http.Request) context.Context { return jointContext{ base: parent, valuesOnly: req.Context(), } } -// DefaultTicket returns a ticket used for background context or dev_appserver. -func DefaultTicket() string { - defaultTicketOnce.Do(func() { - if IsDevAppServer() { - defaultTicket = "testapp" + defaultTicketSuffix - return - } - appID := partitionlessAppID() - escAppID := strings.Replace(strings.Replace(appID, ":", "_", -1), ".", "_", -1) - majVersion := VersionID(nil) - if i := strings.Index(majVersion, "."); i > 0 { - majVersion = majVersion[:i] - } - defaultTicket = fmt.Sprintf("%s/%s.%s.%s", escAppID, ModuleName(nil), majVersion, InstanceID()) - }) - return defaultTicket -} - -func BackgroundContext() netcontext.Context { - backgroundContextOnce.Do(func() { - // Compute background security ticket. - ticket := DefaultTicket() - - c := &context{ - req: &http.Request{ - Header: http.Header{ - ticketHeader: []string{ticket}, - }, - }, - apiURL: apiURL(), - } - backgroundContext = toContext(c) - - // TODO(dsymonds): Wire up the shutdown handler to do a final flush. - go c.logFlusher(make(chan int)) - }) - - return backgroundContext -} - // RegisterTestRequest registers the HTTP request req for testing, such that -// any API calls are sent to the provided URL. It returns a closure to delete -// the registration. +// any API calls are sent to the provided URL. // It should only be used by aetest package. -func RegisterTestRequest(req *http.Request, apiURL *url.URL, decorate func(netcontext.Context) netcontext.Context) (*http.Request, func()) { - c := &context{ - req: req, - apiURL: apiURL, - } - ctx := withContext(decorate(req.Context()), c) - req = req.WithContext(ctx) - c.req = req - return req, func() {} +func RegisterTestRequest(req *http.Request, apiURL *url.URL, appID string) *http.Request { + ctx := req.Context() + ctx = withAPIHostOverride(ctx, apiURL.Hostname()) + ctx = withAPIPortOverride(ctx, apiURL.Port()) + ctx = WithAppIDOverride(ctx, appID) + + // use the unregistered request as a placeholder so that withContext can read the headers + c := &aeContext{req: req} + c.req = req.WithContext(withContext(ctx, c)) + return c.req } var errTimeout = &CallError{ @@ -348,7 +322,7 @@ var errTimeout = &CallError{ Timeout: true, } -func (c *context) Header() http.Header { return c.outHeader } +func (c *aeContext) Header() http.Header { return c.outHeader } // Copied from $GOROOT/src/pkg/net/http/transfer.go. Some response status // codes do not permit a response body (nor response entity headers such as @@ -365,7 +339,7 @@ func bodyAllowedForStatus(status int) bool { return true } -func (c *context) Write(b []byte) (int, error) { +func (c *aeContext) Write(b []byte) (int, error) { if c.outCode == 0 { c.WriteHeader(http.StatusOK) } @@ -376,7 +350,7 @@ func (c *context) Write(b []byte) (int, error) { return len(b), nil } -func (c *context) WriteHeader(code int) { +func (c *aeContext) WriteHeader(code int) { if c.outCode != 0 { logf(c, 3, "WriteHeader called multiple times on request.") // error level return @@ -384,10 +358,11 @@ func (c *context) WriteHeader(code int) { c.outCode = code } -func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) { +func post(ctx context.Context, body []byte, timeout time.Duration) (b []byte, err error) { + apiURL := apiURL(ctx) hreq := &http.Request{ Method: "POST", - URL: c.apiURL, + URL: apiURL, Header: http.Header{ apiEndpointHeader: apiEndpointHeaderValue, apiMethodHeader: apiMethodHeaderValue, @@ -396,13 +371,16 @@ func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) }, Body: ioutil.NopCloser(bytes.NewReader(body)), ContentLength: int64(len(body)), - Host: c.apiURL.Host, - } - if info := c.req.Header.Get(dapperHeader); info != "" { - hreq.Header.Set(dapperHeader, info) + Host: apiURL.Host, } - if info := c.req.Header.Get(traceHeader); info != "" { - hreq.Header.Set(traceHeader, info) + c := fromContext(ctx) + if c != nil { + if info := c.req.Header.Get(dapperHeader); info != "" { + hreq.Header.Set(dapperHeader, info) + } + if info := c.req.Header.Get(traceHeader); info != "" { + hreq.Header.Set(traceHeader, info) + } } tr := apiHTTPClient.Transport.(*http.Transport) @@ -444,7 +422,7 @@ func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) return hrespBody, nil } -func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error { +func Call(ctx context.Context, service, method string, in, out proto.Message) error { if ns := NamespaceFromContext(ctx); ns != "" { if fn, ok := NamespaceMods[service]; ok { fn(in, ns) @@ -463,15 +441,11 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message) } c := fromContext(ctx) - if c == nil { - // Give a good error message rather than a panic lower down. - return errNotAppEngineContext - } // Apply transaction modifications if we're in a transaction. if t := transactionFromContext(ctx); t != nil { if t.finished { - return errors.New("transaction context has expired") + return errors.New("transaction aeContext has expired") } applyTransaction(in, &t.transaction) } @@ -487,20 +461,13 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message) return err } - ticket := c.req.Header.Get(ticketHeader) - // Use a test ticket under test environment. - if ticket == "" { - if appid := ctx.Value(&appIDOverrideKey); appid != nil { - ticket = appid.(string) + defaultTicketSuffix + ticket := "" + if c != nil { + ticket = c.req.Header.Get(ticketHeader) + if dri := c.req.Header.Get(devRequestIdHeader); IsDevAppServer() && dri != "" { + ticket = dri } } - // Fall back to use background ticket when the request ticket is not available in Flex or dev_appserver. - if ticket == "" { - ticket = DefaultTicket() - } - if dri := c.req.Header.Get(devRequestIdHeader); IsDevAppServer() && dri != "" { - ticket = dri - } req := &remotepb.Request{ ServiceName: &service, Method: &method, @@ -512,7 +479,7 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message) return err } - hrespBody, err := c.post(hreqBody, timeout) + hrespBody, err := post(ctx, hreqBody, timeout) if err != nil { return err } @@ -549,11 +516,11 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message) return proto.Unmarshal(res.Response, out) } -func (c *context) Request() *http.Request { +func (c *aeContext) Request() *http.Request { return c.req } -func (c *context) addLogLine(ll *logpb.UserAppLogLine) { +func (c *aeContext) addLogLine(ll *logpb.UserAppLogLine) { // Truncate long log lines. // TODO(dsymonds): Check if this is still necessary. const lim = 8 << 10 @@ -575,18 +542,20 @@ var logLevelName = map[int64]string{ 4: "CRITICAL", } -func logf(c *context, level int64, format string, args ...interface{}) { +func logf(c *aeContext, level int64, format string, args ...interface{}) { if c == nil { - panic("not an App Engine context") + panic("not an App Engine aeContext") } s := fmt.Sprintf(format, args...) s = strings.TrimRight(s, "\n") // Remove any trailing newline characters. - c.addLogLine(&logpb.UserAppLogLine{ - TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3), - Level: &level, - Message: &s, - }) - // Only duplicate log to stderr if not running on App Engine second generation + if logToLogservice() { + c.addLogLine(&logpb.UserAppLogLine{ + TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3), + Level: &level, + Message: &s, + }) + } + // Log to stdout if not deployed if !IsSecondGen() { log.Print(logLevelName[level] + ": " + s) } @@ -594,7 +563,7 @@ func logf(c *context, level int64, format string, args ...interface{}) { // flushLog attempts to flush any pending logs to the appserver. // It should not be called concurrently. -func (c *context) flushLog(force bool) (flushed bool) { +func (c *aeContext) flushLog(force bool) (flushed bool) { c.pendingLogs.Lock() // Grab up to 30 MB. We can get away with up to 32 MB, but let's be cautious. n, rem := 0, 30<<20 @@ -655,7 +624,7 @@ const ( forceFlushInterval = 60 * time.Second ) -func (c *context) logFlusher(stop <-chan int) { +func (c *aeContext) logFlusher(stop <-chan int) { lastFlush := time.Now() tick := time.NewTicker(flushInterval) for { @@ -673,6 +642,12 @@ func (c *context) logFlusher(stop <-chan int) { } } -func ContextForTesting(req *http.Request) netcontext.Context { - return toContext(&context{req: req}) +func ContextForTesting(req *http.Request) context.Context { + return toContext(&aeContext{req: req}) +} + +func logToLogservice() bool { + // TODO: replace logservice with json structured logs to $LOG_DIR/app.log.json + // where $LOG_DIR is /var/log in prod and some tmpdir in dev + return os.Getenv("LOG_TO_LOGSERVICE") != "0" } diff --git a/vendor/google.golang.org/appengine/internal/api_classic.go b/vendor/google.golang.org/appengine/internal/api_classic.go index f0f40b2e3..87c33c798 100644 --- a/vendor/google.golang.org/appengine/internal/api_classic.go +++ b/vendor/google.golang.org/appengine/internal/api_classic.go @@ -2,11 +2,13 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build appengine // +build appengine package internal import ( + "context" "errors" "fmt" "net/http" @@ -17,20 +19,19 @@ import ( basepb "appengine_internal/base" "github.com/golang/protobuf/proto" - netcontext "golang.org/x/net/context" ) var contextKey = "holds an appengine.Context" // fromContext returns the App Engine context or nil if ctx is not // derived from an App Engine context. -func fromContext(ctx netcontext.Context) appengine.Context { +func fromContext(ctx context.Context) appengine.Context { c, _ := ctx.Value(&contextKey).(appengine.Context) return c } // This is only for classic App Engine adapters. -func ClassicContextFromContext(ctx netcontext.Context) (appengine.Context, error) { +func ClassicContextFromContext(ctx context.Context) (appengine.Context, error) { c := fromContext(ctx) if c == nil { return nil, errNotAppEngineContext @@ -38,8 +39,8 @@ func ClassicContextFromContext(ctx netcontext.Context) (appengine.Context, error return c, nil } -func withContext(parent netcontext.Context, c appengine.Context) netcontext.Context { - ctx := netcontext.WithValue(parent, &contextKey, c) +func withContext(parent context.Context, c appengine.Context) context.Context { + ctx := context.WithValue(parent, &contextKey, c) s := &basepb.StringProto{} c.Call("__go__", "GetNamespace", &basepb.VoidProto{}, s, nil) @@ -50,7 +51,7 @@ func withContext(parent netcontext.Context, c appengine.Context) netcontext.Cont return ctx } -func IncomingHeaders(ctx netcontext.Context) http.Header { +func IncomingHeaders(ctx context.Context) http.Header { if c := fromContext(ctx); c != nil { if req, ok := c.Request().(*http.Request); ok { return req.Header @@ -59,11 +60,11 @@ func IncomingHeaders(ctx netcontext.Context) http.Header { return nil } -func ReqContext(req *http.Request) netcontext.Context { - return WithContext(netcontext.Background(), req) +func ReqContext(req *http.Request) context.Context { + return WithContext(context.Background(), req) } -func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context { +func WithContext(parent context.Context, req *http.Request) context.Context { c := appengine.NewContext(req) return withContext(parent, c) } @@ -83,11 +84,11 @@ func (t *testingContext) Call(service, method string, _, _ appengine_internal.Pr } func (t *testingContext) Request() interface{} { return t.req } -func ContextForTesting(req *http.Request) netcontext.Context { - return withContext(netcontext.Background(), &testingContext{req: req}) +func ContextForTesting(req *http.Request) context.Context { + return withContext(context.Background(), &testingContext{req: req}) } -func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error { +func Call(ctx context.Context, service, method string, in, out proto.Message) error { if ns := NamespaceFromContext(ctx); ns != "" { if fn, ok := NamespaceMods[service]; ok { fn(in, ns) @@ -144,8 +145,8 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message) return err } -func handleHTTP(w http.ResponseWriter, r *http.Request) { - panic("handleHTTP called; this should be impossible") +func Middleware(next http.Handler) http.Handler { + panic("Middleware called; this should be impossible") } func logf(c appengine.Context, level int64, format string, args ...interface{}) { diff --git a/vendor/google.golang.org/appengine/internal/api_common.go b/vendor/google.golang.org/appengine/internal/api_common.go index e0c0b214b..5b95c13d9 100644 --- a/vendor/google.golang.org/appengine/internal/api_common.go +++ b/vendor/google.golang.org/appengine/internal/api_common.go @@ -5,20 +5,26 @@ package internal import ( + "context" "errors" "os" "github.com/golang/protobuf/proto" - netcontext "golang.org/x/net/context" ) +type ctxKey string + +func (c ctxKey) String() string { + return "appengine context key: " + string(c) +} + var errNotAppEngineContext = errors.New("not an App Engine context") -type CallOverrideFunc func(ctx netcontext.Context, service, method string, in, out proto.Message) error +type CallOverrideFunc func(ctx context.Context, service, method string, in, out proto.Message) error var callOverrideKey = "holds []CallOverrideFunc" -func WithCallOverride(ctx netcontext.Context, f CallOverrideFunc) netcontext.Context { +func WithCallOverride(ctx context.Context, f CallOverrideFunc) context.Context { // We avoid appending to any existing call override // so we don't risk overwriting a popped stack below. var cofs []CallOverrideFunc @@ -26,10 +32,10 @@ func WithCallOverride(ctx netcontext.Context, f CallOverrideFunc) netcontext.Con cofs = append(cofs, uf...) } cofs = append(cofs, f) - return netcontext.WithValue(ctx, &callOverrideKey, cofs) + return context.WithValue(ctx, &callOverrideKey, cofs) } -func callOverrideFromContext(ctx netcontext.Context) (CallOverrideFunc, netcontext.Context, bool) { +func callOverrideFromContext(ctx context.Context) (CallOverrideFunc, context.Context, bool) { cofs, _ := ctx.Value(&callOverrideKey).([]CallOverrideFunc) if len(cofs) == 0 { return nil, nil, false @@ -37,7 +43,7 @@ func callOverrideFromContext(ctx netcontext.Context) (CallOverrideFunc, netconte // We found a list of overrides; grab the last, and reconstitute a // context that will hide it. f := cofs[len(cofs)-1] - ctx = netcontext.WithValue(ctx, &callOverrideKey, cofs[:len(cofs)-1]) + ctx = context.WithValue(ctx, &callOverrideKey, cofs[:len(cofs)-1]) return f, ctx, true } @@ -45,23 +51,35 @@ type logOverrideFunc func(level int64, format string, args ...interface{}) var logOverrideKey = "holds a logOverrideFunc" -func WithLogOverride(ctx netcontext.Context, f logOverrideFunc) netcontext.Context { - return netcontext.WithValue(ctx, &logOverrideKey, f) +func WithLogOverride(ctx context.Context, f logOverrideFunc) context.Context { + return context.WithValue(ctx, &logOverrideKey, f) } var appIDOverrideKey = "holds a string, being the full app ID" -func WithAppIDOverride(ctx netcontext.Context, appID string) netcontext.Context { - return netcontext.WithValue(ctx, &appIDOverrideKey, appID) +func WithAppIDOverride(ctx context.Context, appID string) context.Context { + return context.WithValue(ctx, &appIDOverrideKey, appID) +} + +var apiHostOverrideKey = ctxKey("holds a string, being the alternate API_HOST") + +func withAPIHostOverride(ctx context.Context, apiHost string) context.Context { + return context.WithValue(ctx, apiHostOverrideKey, apiHost) +} + +var apiPortOverrideKey = ctxKey("holds a string, being the alternate API_PORT") + +func withAPIPortOverride(ctx context.Context, apiPort string) context.Context { + return context.WithValue(ctx, apiPortOverrideKey, apiPort) } var namespaceKey = "holds the namespace string" -func withNamespace(ctx netcontext.Context, ns string) netcontext.Context { - return netcontext.WithValue(ctx, &namespaceKey, ns) +func withNamespace(ctx context.Context, ns string) context.Context { + return context.WithValue(ctx, &namespaceKey, ns) } -func NamespaceFromContext(ctx netcontext.Context) string { +func NamespaceFromContext(ctx context.Context) string { // If there's no namespace, return the empty string. ns, _ := ctx.Value(&namespaceKey).(string) return ns @@ -70,14 +88,14 @@ func NamespaceFromContext(ctx netcontext.Context) string { // FullyQualifiedAppID returns the fully-qualified application ID. // This may contain a partition prefix (e.g. "s~" for High Replication apps), // or a domain prefix (e.g. "example.com:"). -func FullyQualifiedAppID(ctx netcontext.Context) string { +func FullyQualifiedAppID(ctx context.Context) string { if id, ok := ctx.Value(&appIDOverrideKey).(string); ok { return id } return fullyQualifiedAppID(ctx) } -func Logf(ctx netcontext.Context, level int64, format string, args ...interface{}) { +func Logf(ctx context.Context, level int64, format string, args ...interface{}) { if f, ok := ctx.Value(&logOverrideKey).(logOverrideFunc); ok { f(level, format, args...) return @@ -90,7 +108,7 @@ func Logf(ctx netcontext.Context, level int64, format string, args ...interface{ } // NamespacedContext wraps a Context to support namespaces. -func NamespacedContext(ctx netcontext.Context, namespace string) netcontext.Context { +func NamespacedContext(ctx context.Context, namespace string) context.Context { return withNamespace(ctx, namespace) } diff --git a/vendor/google.golang.org/appengine/internal/identity.go b/vendor/google.golang.org/appengine/internal/identity.go index 9b4134e42..0f95aa91d 100644 --- a/vendor/google.golang.org/appengine/internal/identity.go +++ b/vendor/google.golang.org/appengine/internal/identity.go @@ -5,9 +5,8 @@ package internal import ( + "context" "os" - - netcontext "golang.org/x/net/context" ) var ( @@ -23,7 +22,7 @@ var ( // AppID is the implementation of the wrapper function of the same name in // ../identity.go. See that file for commentary. -func AppID(c netcontext.Context) string { +func AppID(c context.Context) string { return appID(FullyQualifiedAppID(c)) } @@ -35,7 +34,7 @@ func IsStandard() bool { return appengineStandard || IsSecondGen() } -// IsStandard is the implementation of the wrapper function of the same name in +// IsSecondGen is the implementation of the wrapper function of the same name in // ../appengine.go. See that file for commentary. func IsSecondGen() bool { // Second-gen runtimes set $GAE_ENV so we use that to check if we're on a second-gen runtime. diff --git a/vendor/google.golang.org/appengine/internal/identity_classic.go b/vendor/google.golang.org/appengine/internal/identity_classic.go index 4e979f45e..5ad3548bf 100644 --- a/vendor/google.golang.org/appengine/internal/identity_classic.go +++ b/vendor/google.golang.org/appengine/internal/identity_classic.go @@ -2,21 +2,22 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build appengine // +build appengine package internal import ( - "appengine" + "context" - netcontext "golang.org/x/net/context" + "appengine" ) func init() { appengineStandard = true } -func DefaultVersionHostname(ctx netcontext.Context) string { +func DefaultVersionHostname(ctx context.Context) string { c := fromContext(ctx) if c == nil { panic(errNotAppEngineContext) @@ -24,12 +25,12 @@ func DefaultVersionHostname(ctx netcontext.Context) string { return appengine.DefaultVersionHostname(c) } -func Datacenter(_ netcontext.Context) string { return appengine.Datacenter() } -func ServerSoftware() string { return appengine.ServerSoftware() } -func InstanceID() string { return appengine.InstanceID() } -func IsDevAppServer() bool { return appengine.IsDevAppServer() } +func Datacenter(_ context.Context) string { return appengine.Datacenter() } +func ServerSoftware() string { return appengine.ServerSoftware() } +func InstanceID() string { return appengine.InstanceID() } +func IsDevAppServer() bool { return appengine.IsDevAppServer() } -func RequestID(ctx netcontext.Context) string { +func RequestID(ctx context.Context) string { c := fromContext(ctx) if c == nil { panic(errNotAppEngineContext) @@ -37,14 +38,14 @@ func RequestID(ctx netcontext.Context) string { return appengine.RequestID(c) } -func ModuleName(ctx netcontext.Context) string { +func ModuleName(ctx context.Context) string { c := fromContext(ctx) if c == nil { panic(errNotAppEngineContext) } return appengine.ModuleName(c) } -func VersionID(ctx netcontext.Context) string { +func VersionID(ctx context.Context) string { c := fromContext(ctx) if c == nil { panic(errNotAppEngineContext) @@ -52,7 +53,7 @@ func VersionID(ctx netcontext.Context) string { return appengine.VersionID(c) } -func fullyQualifiedAppID(ctx netcontext.Context) string { +func fullyQualifiedAppID(ctx context.Context) string { c := fromContext(ctx) if c == nil { panic(errNotAppEngineContext) diff --git a/vendor/google.golang.org/appengine/internal/identity_flex.go b/vendor/google.golang.org/appengine/internal/identity_flex.go index d5e2e7b5e..4201b6b58 100644 --- a/vendor/google.golang.org/appengine/internal/identity_flex.go +++ b/vendor/google.golang.org/appengine/internal/identity_flex.go @@ -2,6 +2,7 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build appenginevm // +build appenginevm package internal diff --git a/vendor/google.golang.org/appengine/internal/identity_vm.go b/vendor/google.golang.org/appengine/internal/identity_vm.go index 5d8067263..18ddda3a4 100644 --- a/vendor/google.golang.org/appengine/internal/identity_vm.go +++ b/vendor/google.golang.org/appengine/internal/identity_vm.go @@ -2,17 +2,17 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build !appengine // +build !appengine package internal import ( + "context" "log" "net/http" "os" "strings" - - netcontext "golang.org/x/net/context" ) // These functions are implementations of the wrapper functions @@ -24,7 +24,7 @@ const ( hDatacenter = "X-AppEngine-Datacenter" ) -func ctxHeaders(ctx netcontext.Context) http.Header { +func ctxHeaders(ctx context.Context) http.Header { c := fromContext(ctx) if c == nil { return nil @@ -32,15 +32,15 @@ func ctxHeaders(ctx netcontext.Context) http.Header { return c.Request().Header } -func DefaultVersionHostname(ctx netcontext.Context) string { +func DefaultVersionHostname(ctx context.Context) string { return ctxHeaders(ctx).Get(hDefaultVersionHostname) } -func RequestID(ctx netcontext.Context) string { +func RequestID(ctx context.Context) string { return ctxHeaders(ctx).Get(hRequestLogId) } -func Datacenter(ctx netcontext.Context) string { +func Datacenter(ctx context.Context) string { if dc := ctxHeaders(ctx).Get(hDatacenter); dc != "" { return dc } @@ -71,7 +71,7 @@ func ServerSoftware() string { // TODO(dsymonds): Remove the metadata fetches. -func ModuleName(_ netcontext.Context) string { +func ModuleName(_ context.Context) string { if s := os.Getenv("GAE_MODULE_NAME"); s != "" { return s } @@ -81,7 +81,7 @@ func ModuleName(_ netcontext.Context) string { return string(mustGetMetadata("instance/attributes/gae_backend_name")) } -func VersionID(_ netcontext.Context) string { +func VersionID(_ context.Context) string { if s1, s2 := os.Getenv("GAE_MODULE_VERSION"), os.Getenv("GAE_MINOR_VERSION"); s1 != "" && s2 != "" { return s1 + "." + s2 } @@ -112,7 +112,7 @@ func partitionlessAppID() string { return string(mustGetMetadata("instance/attributes/gae_project")) } -func fullyQualifiedAppID(_ netcontext.Context) string { +func fullyQualifiedAppID(_ context.Context) string { if s := os.Getenv("GAE_APPLICATION"); s != "" { return s } @@ -130,5 +130,5 @@ func fullyQualifiedAppID(_ netcontext.Context) string { } func IsDevAppServer() bool { - return os.Getenv("RUN_WITH_DEVAPPSERVER") != "" + return os.Getenv("RUN_WITH_DEVAPPSERVER") != "" || os.Getenv("GAE_ENV") == "localdev" } diff --git a/vendor/google.golang.org/appengine/internal/main.go b/vendor/google.golang.org/appengine/internal/main.go index 1e765312f..afd0ae84f 100644 --- a/vendor/google.golang.org/appengine/internal/main.go +++ b/vendor/google.golang.org/appengine/internal/main.go @@ -2,6 +2,7 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build appengine // +build appengine package internal diff --git a/vendor/google.golang.org/appengine/internal/main_vm.go b/vendor/google.golang.org/appengine/internal/main_vm.go index ddb79a333..86a8caf06 100644 --- a/vendor/google.golang.org/appengine/internal/main_vm.go +++ b/vendor/google.golang.org/appengine/internal/main_vm.go @@ -2,6 +2,7 @@ // Use of this source code is governed by the Apache 2.0 // license that can be found in the LICENSE file. +//go:build !appengine // +build !appengine package internal @@ -29,7 +30,7 @@ func Main() { if IsDevAppServer() { host = "127.0.0.1" } - if err := http.ListenAndServe(host+":"+port, http.HandlerFunc(handleHTTP)); err != nil { + if err := http.ListenAndServe(host+":"+port, Middleware(http.DefaultServeMux)); err != nil { log.Fatalf("http.ListenAndServe: %v", err) } } diff --git a/vendor/google.golang.org/appengine/internal/transaction.go b/vendor/google.golang.org/appengine/internal/transaction.go index 9006ae653..2ae8ab9fa 100644 --- a/vendor/google.golang.org/appengine/internal/transaction.go +++ b/vendor/google.golang.org/appengine/internal/transaction.go @@ -7,11 +7,11 @@ package internal // This file implements hooks for applying datastore transactions. import ( + "context" "errors" "reflect" "github.com/golang/protobuf/proto" - netcontext "golang.org/x/net/context" basepb "google.golang.org/appengine/internal/base" pb "google.golang.org/appengine/internal/datastore" @@ -38,13 +38,13 @@ func applyTransaction(pb proto.Message, t *pb.Transaction) { var transactionKey = "used for *Transaction" -func transactionFromContext(ctx netcontext.Context) *transaction { +func transactionFromContext(ctx context.Context) *transaction { t, _ := ctx.Value(&transactionKey).(*transaction) return t } -func withTransaction(ctx netcontext.Context, t *transaction) netcontext.Context { - return netcontext.WithValue(ctx, &transactionKey, t) +func withTransaction(ctx context.Context, t *transaction) context.Context { + return context.WithValue(ctx, &transactionKey, t) } type transaction struct { @@ -54,7 +54,7 @@ type transaction struct { var ErrConcurrentTransaction = errors.New("internal: concurrent transaction") -func RunTransactionOnce(c netcontext.Context, f func(netcontext.Context) error, xg bool, readOnly bool, previousTransaction *pb.Transaction) (*pb.Transaction, error) { +func RunTransactionOnce(c context.Context, f func(context.Context) error, xg bool, readOnly bool, previousTransaction *pb.Transaction) (*pb.Transaction, error) { if transactionFromContext(c) != nil { return nil, errors.New("nested transactions are not supported") } diff --git a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go index 6ffe1e6d9..6c0d72418 100644 --- a/vendor/google.golang.org/appengine/urlfetch/urlfetch.go +++ b/vendor/google.golang.org/appengine/urlfetch/urlfetch.go @@ -7,6 +7,7 @@ package urlfetch // import "google.golang.org/appengine/urlfetch" import ( + "context" "errors" "fmt" "io" @@ -18,7 +19,6 @@ import ( "time" "github.com/golang/protobuf/proto" - "golang.org/x/net/context" "google.golang.org/appengine/internal" pb "google.golang.org/appengine/internal/urlfetch" @@ -44,11 +44,10 @@ type Transport struct { var _ http.RoundTripper = (*Transport)(nil) // Client returns an *http.Client using a default urlfetch Transport. This -// client will have the default deadline of 5 seconds, and will check the -// validity of SSL certificates. +// client will check the validity of SSL certificates. // -// Any deadline of the provided context will be used for requests through this client; -// if the client does not have a deadline then a 5 second default is used. +// Any deadline of the provided context will be used for requests through this client. +// If the client does not have a deadline, then an App Engine default of 60 second is used. func Client(ctx context.Context) *http.Client { return &http.Client{ Transport: &Transport{ diff --git a/vendor/google.golang.org/grpc/README.md b/vendor/google.golang.org/grpc/README.md index 0e6ae69a5..1bc92248c 100644 --- a/vendor/google.golang.org/grpc/README.md +++ b/vendor/google.golang.org/grpc/README.md @@ -14,21 +14,14 @@ RPC framework that puts mobile and HTTP/2 first. For more information see the ## Installation -With [Go module][] support (Go 1.11+), simply add the following import +Simply add the following import to your code, and then `go [build|run|test]` +will automatically fetch the necessary dependencies: + ```go import "google.golang.org/grpc" ``` -to your code, and then `go [build|run|test]` will automatically fetch the -necessary dependencies. - -Otherwise, to install the `grpc-go` package, run the following command: - -```console -$ go get -u google.golang.org/grpc -``` - > **Note:** If you are trying to access `grpc-go` from **China**, see the > [FAQ](#FAQ) below. @@ -56,15 +49,6 @@ To build Go code, there are several options: - Set up a VPN and access google.golang.org through that. -- Without Go module support: `git clone` the repo manually: - - ```sh - git clone https://github.com/grpc/grpc-go.git $GOPATH/src/google.golang.org/grpc - ``` - - You will need to do the same for all of grpc's dependencies in `golang.org`, - e.g. `golang.org/x/net`. - - With Go module support: it is possible to use the `replace` feature of `go mod` to create aliases for golang.org packages. In your project's directory: @@ -76,33 +60,13 @@ To build Go code, there are several options: ``` Again, this will need to be done for all transitive dependencies hosted on - golang.org as well. For details, refer to [golang/go issue #28652](https://github.com/golang/go/issues/28652). + golang.org as well. For details, refer to [golang/go issue + #28652](https://github.com/golang/go/issues/28652). ### Compiling error, undefined: grpc.SupportPackageIsVersion -#### If you are using Go modules: - -Ensure your gRPC-Go version is `require`d at the appropriate version in -the same module containing the generated `.pb.go` files. For example, -`SupportPackageIsVersion6` needs `v1.27.0`, so in your `go.mod` file: - -```go -module - -require ( - google.golang.org/grpc v1.27.0 -) -``` - -#### If you are *not* using Go modules: - -Update the `proto` package, gRPC package, and rebuild the `.proto` files: - -```sh -go get -u github.com/golang/protobuf/{proto,protoc-gen-go} -go get -u google.golang.org/grpc -protoc --go_out=plugins=grpc:. *.proto -``` +Please update to the latest version of gRPC-Go using +`go get google.golang.org/grpc`. ### How to turn on logging @@ -121,9 +85,11 @@ possible reasons, including: 1. mis-configured transport credentials, connection failed on handshaking 1. bytes disrupted, possibly by a proxy in between 1. server shutdown - 1. Keepalive parameters caused connection shutdown, for example if you have configured - your server to terminate connections regularly to [trigger DNS lookups](https://github.com/grpc/grpc-go/issues/3170#issuecomment-552517779). - If this is the case, you may want to increase your [MaxConnectionAgeGrace](https://pkg.go.dev/google.golang.org/grpc/keepalive?tab=doc#ServerParameters), + 1. Keepalive parameters caused connection shutdown, for example if you have + configured your server to terminate connections regularly to [trigger DNS + lookups](https://github.com/grpc/grpc-go/issues/3170#issuecomment-552517779). + If this is the case, you may want to increase your + [MaxConnectionAgeGrace](https://pkg.go.dev/google.golang.org/grpc/keepalive?tab=doc#ServerParameters), to allow longer RPC calls to finish. It can be tricky to debug this because the error happens on the client side but diff --git a/vendor/google.golang.org/grpc/attributes/attributes.go b/vendor/google.golang.org/grpc/attributes/attributes.go index 49712aca3..712fef4d0 100644 --- a/vendor/google.golang.org/grpc/attributes/attributes.go +++ b/vendor/google.golang.org/grpc/attributes/attributes.go @@ -34,26 +34,26 @@ import ( // key/value pairs. Keys must be hashable, and users should define their own // types for keys. Values should not be modified after they are added to an // Attributes or if they were received from one. If values implement 'Equal(o -// interface{}) bool', it will be called by (*Attributes).Equal to determine -// whether two values with the same key should be considered equal. +// any) bool', it will be called by (*Attributes).Equal to determine whether +// two values with the same key should be considered equal. type Attributes struct { - m map[interface{}]interface{} + m map[any]any } // New returns a new Attributes containing the key/value pair. -func New(key, value interface{}) *Attributes { - return &Attributes{m: map[interface{}]interface{}{key: value}} +func New(key, value any) *Attributes { + return &Attributes{m: map[any]any{key: value}} } // WithValue returns a new Attributes containing the previous keys and values // and the new key/value pair. If the same key appears multiple times, the // last value overwrites all previous values for that key. To remove an // existing key, use a nil value. value should not be modified later. -func (a *Attributes) WithValue(key, value interface{}) *Attributes { +func (a *Attributes) WithValue(key, value any) *Attributes { if a == nil { return New(key, value) } - n := &Attributes{m: make(map[interface{}]interface{}, len(a.m)+1)} + n := &Attributes{m: make(map[any]any, len(a.m)+1)} for k, v := range a.m { n.m[k] = v } @@ -63,20 +63,19 @@ func (a *Attributes) WithValue(key, value interface{}) *Attributes { // Value returns the value associated with these attributes for key, or nil if // no value is associated with key. The returned value should not be modified. -func (a *Attributes) Value(key interface{}) interface{} { +func (a *Attributes) Value(key any) any { if a == nil { return nil } return a.m[key] } -// Equal returns whether a and o are equivalent. If 'Equal(o interface{}) -// bool' is implemented for a value in the attributes, it is called to -// determine if the value matches the one stored in the other attributes. If -// Equal is not implemented, standard equality is used to determine if the two -// values are equal. Note that some types (e.g. maps) aren't comparable by -// default, so they must be wrapped in a struct, or in an alias type, with Equal -// defined. +// Equal returns whether a and o are equivalent. If 'Equal(o any) bool' is +// implemented for a value in the attributes, it is called to determine if the +// value matches the one stored in the other attributes. If Equal is not +// implemented, standard equality is used to determine if the two values are +// equal. Note that some types (e.g. maps) aren't comparable by default, so +// they must be wrapped in a struct, or in an alias type, with Equal defined. func (a *Attributes) Equal(o *Attributes) bool { if a == nil && o == nil { return true @@ -93,7 +92,7 @@ func (a *Attributes) Equal(o *Attributes) bool { // o missing element of a return false } - if eq, ok := v.(interface{ Equal(o interface{}) bool }); ok { + if eq, ok := v.(interface{ Equal(o any) bool }); ok { if !eq.Equal(ov) { return false } @@ -122,7 +121,7 @@ func (a *Attributes) String() string { return sb.String() } -func str(x interface{}) string { +func str(x any) string { if v, ok := x.(fmt.Stringer); ok { return v.String() } else if v, ok := x.(string); ok { diff --git a/vendor/google.golang.org/grpc/balancer/balancer.go b/vendor/google.golang.org/grpc/balancer/balancer.go index 8f00523c0..b6377f445 100644 --- a/vendor/google.golang.org/grpc/balancer/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/balancer.go @@ -105,8 +105,8 @@ type SubConn interface { // // This will trigger a state transition for the SubConn. // - // Deprecated: This method is now part of the ClientConn interface and will - // eventually be removed from here. + // Deprecated: this method will be removed. Create new SubConns for new + // addresses instead. UpdateAddresses([]resolver.Address) // Connect starts the connecting for this SubConn. Connect() @@ -115,6 +115,13 @@ type SubConn interface { // creates a new one and returns it. Returns a close function which must // be called when the Producer is no longer needed. GetOrBuildProducer(ProducerBuilder) (p Producer, close func()) + // Shutdown shuts down the SubConn gracefully. Any started RPCs will be + // allowed to complete. No future calls should be made on the SubConn. + // One final state update will be delivered to the StateListener (or + // UpdateSubConnState; deprecated) with ConnectivityState of Shutdown to + // indicate the shutdown operation. This may be delivered before + // in-progress RPCs are complete and the actual connection is closed. + Shutdown() } // NewSubConnOptions contains options to create new SubConn. @@ -129,6 +136,11 @@ type NewSubConnOptions struct { // HealthCheckEnabled indicates whether health check service should be // enabled on this SubConn HealthCheckEnabled bool + // StateListener is called when the state of the subconn changes. If nil, + // Balancer.UpdateSubConnState will be called instead. Will never be + // invoked until after Connect() is called on the SubConn created with + // these options. + StateListener func(SubConnState) } // State contains the balancer's state relevant to the gRPC ClientConn. @@ -150,16 +162,24 @@ type ClientConn interface { // NewSubConn is called by balancer to create a new SubConn. // It doesn't block and wait for the connections to be established. // Behaviors of the SubConn can be controlled by options. + // + // Deprecated: please be aware that in a future version, SubConns will only + // support one address per SubConn. NewSubConn([]resolver.Address, NewSubConnOptions) (SubConn, error) // RemoveSubConn removes the SubConn from ClientConn. // The SubConn will be shutdown. + // + // Deprecated: use SubConn.Shutdown instead. RemoveSubConn(SubConn) // UpdateAddresses updates the addresses used in the passed in SubConn. // gRPC checks if the currently connected address is still in the new list. // If so, the connection will be kept. Else, the connection will be // gracefully closed, and a new connection will be created. // - // This will trigger a state transition for the SubConn. + // This may trigger a state transition for the SubConn. + // + // Deprecated: this method will be removed. Create new SubConns for new + // addresses instead. UpdateAddresses(SubConn, []resolver.Address) // UpdateState notifies gRPC that the balancer's internal state has @@ -250,7 +270,7 @@ type DoneInfo struct { // trailing metadata. // // The only supported type now is *orca_v3.LoadReport. - ServerLoad interface{} + ServerLoad any } var ( @@ -343,9 +363,13 @@ type Balancer interface { ResolverError(error) // UpdateSubConnState is called by gRPC when the state of a SubConn // changes. + // + // Deprecated: Use NewSubConnOptions.StateListener when creating the + // SubConn instead. UpdateSubConnState(SubConn, SubConnState) - // Close closes the balancer. The balancer is not required to call - // ClientConn.RemoveSubConn for its existing SubConns. + // Close closes the balancer. The balancer is not currently required to + // call SubConn.Shutdown for its existing SubConns; however, this will be + // required in a future release, so it is recommended. Close() } @@ -390,15 +414,14 @@ var ErrBadResolverState = errors.New("bad resolver state") type ProducerBuilder interface { // Build creates a Producer. The first parameter is always a // grpc.ClientConnInterface (a type to allow creating RPCs/streams on the - // associated SubConn), but is declared as interface{} to avoid a - // dependency cycle. Should also return a close function that will be - // called when all references to the Producer have been given up. - Build(grpcClientConnInterface interface{}) (p Producer, close func()) + // associated SubConn), but is declared as `any` to avoid a dependency + // cycle. Should also return a close function that will be called when all + // references to the Producer have been given up. + Build(grpcClientConnInterface any) (p Producer, close func()) } // A Producer is a type shared among potentially many consumers. It is // associated with a SubConn, and an implementation will typically contain // other methods to provide additional functionality, e.g. configuration or // subscription registration. -type Producer interface { -} +type Producer any diff --git a/vendor/google.golang.org/grpc/balancer/base/balancer.go b/vendor/google.golang.org/grpc/balancer/base/balancer.go index 3929c26d3..a7f1eeec8 100644 --- a/vendor/google.golang.org/grpc/balancer/base/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/base/balancer.go @@ -105,7 +105,12 @@ func (b *baseBalancer) UpdateClientConnState(s balancer.ClientConnState) error { addrsSet.Set(a, nil) if _, ok := b.subConns.Get(a); !ok { // a is a new address (not existing in b.subConns). - sc, err := b.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{HealthCheckEnabled: b.config.HealthCheck}) + var sc balancer.SubConn + opts := balancer.NewSubConnOptions{ + HealthCheckEnabled: b.config.HealthCheck, + StateListener: func(scs balancer.SubConnState) { b.updateSubConnState(sc, scs) }, + } + sc, err := b.cc.NewSubConn([]resolver.Address{a}, opts) if err != nil { logger.Warningf("base.baseBalancer: failed to create new SubConn: %v", err) continue @@ -121,10 +126,10 @@ func (b *baseBalancer) UpdateClientConnState(s balancer.ClientConnState) error { sc := sci.(balancer.SubConn) // a was removed by resolver. if _, ok := addrsSet.Get(a); !ok { - b.cc.RemoveSubConn(sc) + sc.Shutdown() b.subConns.Delete(a) // Keep the state of this sc in b.scStates until sc's state becomes Shutdown. - // The entry will be deleted in UpdateSubConnState. + // The entry will be deleted in updateSubConnState. } } // If resolver state contains no addresses, return an error so ClientConn @@ -177,7 +182,12 @@ func (b *baseBalancer) regeneratePicker() { b.picker = b.pickerBuilder.Build(PickerBuildInfo{ReadySCs: readySCs}) } +// UpdateSubConnState is a nop because a StateListener is always set in NewSubConn. func (b *baseBalancer) UpdateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { + logger.Errorf("base.baseBalancer: UpdateSubConnState(%v, %+v) called unexpectedly", sc, state) +} + +func (b *baseBalancer) updateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { s := state.ConnectivityState if logger.V(2) { logger.Infof("base.baseBalancer: handle SubConn state change: %p, %v", sc, s) @@ -204,8 +214,8 @@ func (b *baseBalancer) UpdateSubConnState(sc balancer.SubConn, state balancer.Su case connectivity.Idle: sc.Connect() case connectivity.Shutdown: - // When an address was removed by resolver, b called RemoveSubConn but - // kept the sc's state in scStates. Remove state for this sc here. + // When an address was removed by resolver, b called Shutdown but kept + // the sc's state in scStates. Remove state for this sc here. delete(b.scStates, sc) case connectivity.TransientFailure: // Save error to be reported via picker. @@ -226,7 +236,7 @@ func (b *baseBalancer) UpdateSubConnState(sc balancer.SubConn, state balancer.Su } // Close is a nop because base balancer doesn't have internal state to clean up, -// and it doesn't need to call RemoveSubConn for the SubConns. +// and it doesn't need to call Shutdown for the SubConns. func (b *baseBalancer) Close() { } diff --git a/vendor/google.golang.org/grpc/balancer_conn_wrappers.go b/vendor/google.golang.org/grpc/balancer_conn_wrappers.go index 04b9ad411..a4411c22b 100644 --- a/vendor/google.golang.org/grpc/balancer_conn_wrappers.go +++ b/vendor/google.golang.org/grpc/balancer_conn_wrappers.go @@ -99,20 +99,6 @@ func (ccb *ccBalancerWrapper) updateClientConnState(ccs *balancer.ClientConnStat // lock held. But the lock guards only the scheduling part. The actual // callback is called asynchronously without the lock being held. ok := ccb.serializer.Schedule(func(_ context.Context) { - // If the addresses specified in the update contain addresses of type - // "grpclb" and the selected LB policy is not "grpclb", these addresses - // will be filtered out and ccs will be modified with the updated - // address list. - if ccb.curBalancerName != grpclbName { - var addrs []resolver.Address - for _, addr := range ccs.ResolverState.Addresses { - if addr.Type == resolver.GRPCLB { - continue - } - addrs = append(addrs, addr) - } - ccs.ResolverState.Addresses = addrs - } errCh <- ccb.balancer.UpdateClientConnState(*ccs) }) if !ok { @@ -139,7 +125,9 @@ func (ccb *ccBalancerWrapper) updateClientConnState(ccs *balancer.ClientConnStat func (ccb *ccBalancerWrapper) updateSubConnState(sc balancer.SubConn, s connectivity.State, err error) { ccb.mu.Lock() ccb.serializer.Schedule(func(_ context.Context) { - ccb.balancer.UpdateSubConnState(sc, balancer.SubConnState{ConnectivityState: s, ConnectionError: err}) + // Even though it is optional for balancers, gracefulswitch ensures + // opts.StateListener is set, so this cannot ever be nil. + sc.(*acBalancerWrapper).stateListener(balancer.SubConnState{ConnectivityState: s, ConnectionError: err}) }) ccb.mu.Unlock() } @@ -221,7 +209,7 @@ func (ccb *ccBalancerWrapper) closeBalancer(m ccbMode) { } ccb.mode = m - done := ccb.serializer.Done + done := ccb.serializer.Done() b := ccb.balancer ok := ccb.serializer.Schedule(func(_ context.Context) { // Close the serializer to ensure that no more calls from gRPC are sent @@ -238,11 +226,9 @@ func (ccb *ccBalancerWrapper) closeBalancer(m ccbMode) { } ccb.mu.Unlock() - // Give enqueued callbacks a chance to finish. + // Give enqueued callbacks a chance to finish before closing the balancer. <-done - // Spawn a goroutine to close the balancer (since it may block trying to - // cleanup all allocated resources) and return early. - go b.Close() + b.Close() } // exitIdleMode is invoked by grpc when the channel exits idle mode either @@ -314,29 +300,19 @@ func (ccb *ccBalancerWrapper) NewSubConn(addrs []resolver.Address, opts balancer channelz.Warningf(logger, ccb.cc.channelzID, "acBalancerWrapper: NewSubConn: failed to newAddrConn: %v", err) return nil, err } - acbw := &acBalancerWrapper{ac: ac, producers: make(map[balancer.ProducerBuilder]*refCountedProducer)} + acbw := &acBalancerWrapper{ + ccb: ccb, + ac: ac, + producers: make(map[balancer.ProducerBuilder]*refCountedProducer), + stateListener: opts.StateListener, + } ac.acbw = acbw return acbw, nil } func (ccb *ccBalancerWrapper) RemoveSubConn(sc balancer.SubConn) { - if ccb.isIdleOrClosed() { - // It it safe to ignore this call when the balancer is closed or in idle - // because the ClientConn takes care of closing the connections. - // - // Not returning early from here when the balancer is closed or in idle - // leads to a deadlock though, because of the following sequence of - // calls when holding cc.mu: - // cc.exitIdleMode --> ccb.enterIdleMode --> gsw.Close --> - // ccb.RemoveAddrConn --> cc.removeAddrConn - return - } - - acbw, ok := sc.(*acBalancerWrapper) - if !ok { - return - } - ccb.cc.removeAddrConn(acbw.ac, errConnDrain) + // The graceful switch balancer will never call this. + logger.Errorf("ccb RemoveSubConn(%v) called unexpectedly, sc") } func (ccb *ccBalancerWrapper) UpdateAddresses(sc balancer.SubConn, addrs []resolver.Address) { @@ -380,7 +356,9 @@ func (ccb *ccBalancerWrapper) Target() string { // acBalancerWrapper is a wrapper on top of ac for balancers. // It implements balancer.SubConn interface. type acBalancerWrapper struct { - ac *addrConn // read-only + ac *addrConn // read-only + ccb *ccBalancerWrapper // read-only + stateListener func(balancer.SubConnState) mu sync.Mutex producers map[balancer.ProducerBuilder]*refCountedProducer @@ -398,6 +376,23 @@ func (acbw *acBalancerWrapper) Connect() { go acbw.ac.connect() } +func (acbw *acBalancerWrapper) Shutdown() { + ccb := acbw.ccb + if ccb.isIdleOrClosed() { + // It it safe to ignore this call when the balancer is closed or in idle + // because the ClientConn takes care of closing the connections. + // + // Not returning early from here when the balancer is closed or in idle + // leads to a deadlock though, because of the following sequence of + // calls when holding cc.mu: + // cc.exitIdleMode --> ccb.enterIdleMode --> gsw.Close --> + // ccb.RemoveAddrConn --> cc.removeAddrConn + return + } + + ccb.cc.removeAddrConn(acbw.ac, errConnDrain) +} + // NewStream begins a streaming RPC on the addrConn. If the addrConn is not // ready, blocks until it is or ctx expires. Returns an error when the context // expires or the addrConn is shut down. @@ -411,7 +406,7 @@ func (acbw *acBalancerWrapper) NewStream(ctx context.Context, desc *StreamDesc, // Invoke performs a unary RPC. If the addrConn is not ready, returns // errSubConnNotReady. -func (acbw *acBalancerWrapper) Invoke(ctx context.Context, method string, args interface{}, reply interface{}, opts ...CallOption) error { +func (acbw *acBalancerWrapper) Invoke(ctx context.Context, method string, args any, reply any, opts ...CallOption) error { cs, err := acbw.NewStream(ctx, unaryStreamDesc, method, opts...) if err != nil { return err diff --git a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go index ec2c2fa14..595480112 100644 --- a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go +++ b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go @@ -18,7 +18,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.30.0 +// protoc-gen-go v1.31.0 // protoc v4.22.0 // source: grpc/binlog/v1/binarylog.proto diff --git a/vendor/google.golang.org/grpc/call.go b/vendor/google.golang.org/grpc/call.go index e6a1dc5d7..788c89c16 100644 --- a/vendor/google.golang.org/grpc/call.go +++ b/vendor/google.golang.org/grpc/call.go @@ -26,12 +26,7 @@ import ( // received. This is typically called by generated code. // // All errors returned by Invoke are compatible with the status package. -func (cc *ClientConn) Invoke(ctx context.Context, method string, args, reply interface{}, opts ...CallOption) error { - if err := cc.idlenessMgr.onCallBegin(); err != nil { - return err - } - defer cc.idlenessMgr.onCallEnd() - +func (cc *ClientConn) Invoke(ctx context.Context, method string, args, reply any, opts ...CallOption) error { // allow interceptor to see all applicable call options, which means those // configured as defaults from dial option as well as per-call options opts = combine(cc.dopts.callOptions, opts) @@ -61,13 +56,13 @@ func combine(o1 []CallOption, o2 []CallOption) []CallOption { // received. This is typically called by generated code. // // DEPRECATED: Use ClientConn.Invoke instead. -func Invoke(ctx context.Context, method string, args, reply interface{}, cc *ClientConn, opts ...CallOption) error { +func Invoke(ctx context.Context, method string, args, reply any, cc *ClientConn, opts ...CallOption) error { return cc.Invoke(ctx, method, args, reply, opts...) } var unaryStreamDesc = &StreamDesc{ServerStreams: false, ClientStreams: false} -func invoke(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error { +func invoke(ctx context.Context, method string, req, reply any, cc *ClientConn, opts ...CallOption) error { cs, err := newClientStream(ctx, unaryStreamDesc, cc, method, opts...) if err != nil { return err diff --git a/vendor/google.golang.org/grpc/clientconn.go b/vendor/google.golang.org/grpc/clientconn.go index bfd7555a8..ff7fea102 100644 --- a/vendor/google.golang.org/grpc/clientconn.go +++ b/vendor/google.golang.org/grpc/clientconn.go @@ -34,9 +34,11 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/credentials" + "google.golang.org/grpc/internal" "google.golang.org/grpc/internal/backoff" "google.golang.org/grpc/internal/channelz" "google.golang.org/grpc/internal/grpcsync" + "google.golang.org/grpc/internal/idle" "google.golang.org/grpc/internal/pretty" iresolver "google.golang.org/grpc/internal/resolver" "google.golang.org/grpc/internal/transport" @@ -54,8 +56,6 @@ import ( const ( // minimum time to give a connection to complete minConnectTimeout = 20 * time.Second - // must match grpclbName in grpclb/grpclb.go - grpclbName = "grpclb" ) var ( @@ -138,7 +138,6 @@ func (dcs *defaultConfigSelector) SelectConfig(rpcInfo iresolver.RPCInfo) (*ires func DialContext(ctx context.Context, target string, opts ...DialOption) (conn *ClientConn, err error) { cc := &ClientConn{ target: target, - csMgr: &connectivityStateManager{}, conns: make(map[*addrConn]struct{}), dopts: defaultDialOptions(), czData: new(channelzData), @@ -191,6 +190,8 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * // Register ClientConn with channelz. cc.channelzRegistration(target) + cc.csMgr = newConnectivityStateManager(cc.ctx, cc.channelzID) + if err := cc.validateTransportCredentials(); err != nil { return nil, err } @@ -266,7 +267,7 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * // Configure idleness support with configured idle timeout or default idle // timeout duration. Idleness can be explicitly disabled by the user, by // setting the dial option to 0. - cc.idlenessMgr = newIdlenessManager(cc, cc.dopts.idleTimeout) + cc.idlenessMgr = idle.NewManager(idle.ManagerOptions{Enforcer: (*idler)(cc), Timeout: cc.dopts.idleTimeout, Logger: logger}) // Return early for non-blocking dials. if !cc.dopts.block { @@ -317,6 +318,16 @@ func (cc *ClientConn) addTraceEvent(msg string) { channelz.AddTraceEvent(logger, cc.channelzID, 0, ted) } +type idler ClientConn + +func (i *idler) EnterIdleMode() error { + return (*ClientConn)(i).enterIdleMode() +} + +func (i *idler) ExitIdleMode() error { + return (*ClientConn)(i).exitIdleMode() +} + // exitIdleMode moves the channel out of idle mode by recreating the name // resolver and load balancer. func (cc *ClientConn) exitIdleMode() error { @@ -327,7 +338,7 @@ func (cc *ClientConn) exitIdleMode() error { } if cc.idlenessState != ccIdlenessStateIdle { cc.mu.Unlock() - logger.Info("ClientConn asked to exit idle mode when not in idle mode") + channelz.Infof(logger, cc.channelzID, "ClientConn asked to exit idle mode, current mode is %v", cc.idlenessState) return nil } @@ -350,7 +361,7 @@ func (cc *ClientConn) exitIdleMode() error { cc.idlenessState = ccIdlenessStateExitingIdle exitedIdle := false if cc.blockingpicker == nil { - cc.blockingpicker = newPickerWrapper() + cc.blockingpicker = newPickerWrapper(cc.dopts.copts.StatsHandlers) } else { cc.blockingpicker.exitIdleMode() exitedIdle = true @@ -398,7 +409,8 @@ func (cc *ClientConn) enterIdleMode() error { return ErrClientConnClosing } if cc.idlenessState != ccIdlenessStateActive { - logger.Error("ClientConn asked to enter idle mode when not active") + channelz.Errorf(logger, cc.channelzID, "ClientConn asked to enter idle mode, current mode is %v", cc.idlenessState) + cc.mu.Unlock() return nil } @@ -475,7 +487,6 @@ func (cc *ClientConn) validateTransportCredentials() error { func (cc *ClientConn) channelzRegistration(target string) { cc.channelzID = channelz.RegisterChannel(&channelzChannel{cc}, cc.dopts.channelzParentID, target) cc.addTraceEvent("created") - cc.csMgr.channelzID = cc.channelzID } // chainUnaryClientInterceptors chains all unary client interceptors into one. @@ -492,7 +503,7 @@ func chainUnaryClientInterceptors(cc *ClientConn) { } else if len(interceptors) == 1 { chainedInt = interceptors[0] } else { - chainedInt = func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error { + chainedInt = func(ctx context.Context, method string, req, reply any, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error { return interceptors[0](ctx, method, req, reply, cc, getChainUnaryInvoker(interceptors, 0, invoker), opts...) } } @@ -504,7 +515,7 @@ func getChainUnaryInvoker(interceptors []UnaryClientInterceptor, curr int, final if curr == len(interceptors)-1 { return finalInvoker } - return func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error { + return func(ctx context.Context, method string, req, reply any, cc *ClientConn, opts ...CallOption) error { return interceptors[curr+1](ctx, method, req, reply, cc, getChainUnaryInvoker(interceptors, curr+1, finalInvoker), opts...) } } @@ -540,13 +551,27 @@ func getChainStreamer(interceptors []StreamClientInterceptor, curr int, finalStr } } +// newConnectivityStateManager creates an connectivityStateManager with +// the specified id. +func newConnectivityStateManager(ctx context.Context, id *channelz.Identifier) *connectivityStateManager { + return &connectivityStateManager{ + channelzID: id, + pubSub: grpcsync.NewPubSub(ctx), + } +} + // connectivityStateManager keeps the connectivity.State of ClientConn. // This struct will eventually be exported so the balancers can access it. +// +// TODO: If possible, get rid of the `connectivityStateManager` type, and +// provide this functionality using the `PubSub`, to avoid keeping track of +// the connectivity state at two places. type connectivityStateManager struct { mu sync.Mutex state connectivity.State notifyChan chan struct{} channelzID *channelz.Identifier + pubSub *grpcsync.PubSub } // updateState updates the connectivity.State of ClientConn. @@ -562,6 +587,8 @@ func (csm *connectivityStateManager) updateState(state connectivity.State) { return } csm.state = state + csm.pubSub.Publish(state) + channelz.Infof(logger, csm.channelzID, "Channel Connectivity change to %v", state) if csm.notifyChan != nil { // There are other goroutines waiting on this channel. @@ -591,7 +618,7 @@ func (csm *connectivityStateManager) getNotifyChan() <-chan struct{} { type ClientConnInterface interface { // Invoke performs a unary RPC and returns after the response is received // into reply. - Invoke(ctx context.Context, method string, args interface{}, reply interface{}, opts ...CallOption) error + Invoke(ctx context.Context, method string, args any, reply any, opts ...CallOption) error // NewStream begins a streaming RPC. NewStream(ctx context.Context, desc *StreamDesc, method string, opts ...CallOption) (ClientStream, error) } @@ -623,7 +650,7 @@ type ClientConn struct { channelzID *channelz.Identifier // Channelz identifier for the channel. resolverBuilder resolver.Builder // See parseTargetAndFindResolver(). balancerWrapper *ccBalancerWrapper // Uses gracefulswitch.balancer underneath. - idlenessMgr idlenessManager + idlenessMgr idle.Manager // The following provide their own synchronization, and therefore don't // require cc.mu to be held to access them. @@ -669,6 +696,19 @@ const ( ccIdlenessStateExitingIdle ) +func (s ccIdlenessState) String() string { + switch s { + case ccIdlenessStateActive: + return "active" + case ccIdlenessStateIdle: + return "idle" + case ccIdlenessStateExitingIdle: + return "exitingIdle" + default: + return "unknown" + } +} + // WaitForStateChange waits until the connectivity.State of ClientConn changes from sourceState or // ctx expires. A true value is returned in former case and false in latter. // @@ -760,6 +800,10 @@ func init() { panic(fmt.Sprintf("impossible error parsing empty service config: %v", cfg.Err)) } emptyServiceConfig = cfg.Config.(*ServiceConfig) + + internal.SubscribeToConnectivityStateChanges = func(cc *ClientConn, s grpcsync.Subscriber) func() { + return cc.csMgr.pubSub.Subscribe(s) + } } func (cc *ClientConn) maybeApplyDefaultServiceConfig(addrs []resolver.Address) { @@ -1047,8 +1091,8 @@ func (ac *addrConn) updateAddrs(addrs []resolver.Address) { ac.cancel() ac.ctx, ac.cancel = context.WithCancel(ac.cc.ctx) - // We have to defer here because GracefulClose => Close => onClose, which - // requires locking ac.mu. + // We have to defer here because GracefulClose => onClose, which requires + // locking ac.mu. if ac.transport != nil { defer ac.transport.GracefulClose() ac.transport = nil @@ -1153,23 +1197,13 @@ func (cc *ClientConn) applyServiceConfigAndBalancer(sc *ServiceConfig, configSel } var newBalancerName string - if cc.sc != nil && cc.sc.lbConfig != nil { + if cc.sc == nil || (cc.sc.lbConfig == nil && cc.sc.LB == nil) { + // No service config or no LB policy specified in config. + newBalancerName = PickFirstBalancerName + } else if cc.sc.lbConfig != nil { newBalancerName = cc.sc.lbConfig.name - } else { - var isGRPCLB bool - for _, a := range addrs { - if a.Type == resolver.GRPCLB { - isGRPCLB = true - break - } - } - if isGRPCLB { - newBalancerName = grpclbName - } else if cc.sc != nil && cc.sc.LB != nil { - newBalancerName = *cc.sc.LB - } else { - newBalancerName = PickFirstBalancerName - } + } else { // cc.sc.LB != nil + newBalancerName = *cc.sc.LB } cc.balancerWrapper.switchTo(newBalancerName) } @@ -1208,7 +1242,10 @@ func (cc *ClientConn) ResetConnectBackoff() { // Close tears down the ClientConn and all underlying connections. func (cc *ClientConn) Close() error { - defer cc.cancel() + defer func() { + cc.cancel() + <-cc.csMgr.pubSub.Done() + }() cc.mu.Lock() if cc.conns == nil { @@ -1242,7 +1279,7 @@ func (cc *ClientConn) Close() error { rWrapper.close() } if idlenessMgr != nil { - idlenessMgr.close() + idlenessMgr.Close() } for ac := range conns { @@ -1352,12 +1389,14 @@ func (ac *addrConn) resetTransport() { if err := ac.tryAllAddrs(acCtx, addrs, connectDeadline); err != nil { ac.cc.resolveNow(resolver.ResolveNowOptions{}) - // After exhausting all addresses, the addrConn enters - // TRANSIENT_FAILURE. + ac.mu.Lock() if acCtx.Err() != nil { + // addrConn was torn down. + ac.mu.Unlock() return } - ac.mu.Lock() + // After exhausting all addresses, the addrConn enters + // TRANSIENT_FAILURE. ac.updateConnectivityState(connectivity.TransientFailure, err) // Backoff. @@ -1553,7 +1592,7 @@ func (ac *addrConn) startHealthCheck(ctx context.Context) { // Set up the health check helper functions. currentTr := ac.transport - newStream := func(method string) (interface{}, error) { + newStream := func(method string) (any, error) { ac.mu.Lock() if ac.transport != currentTr { ac.mu.Unlock() @@ -1641,16 +1680,7 @@ func (ac *addrConn) tearDown(err error) { ac.updateConnectivityState(connectivity.Shutdown, nil) ac.cancel() ac.curAddr = resolver.Address{} - if err == errConnDrain && curTr != nil { - // GracefulClose(...) may be executed multiple times when - // i) receiving multiple GoAway frames from the server; or - // ii) there are concurrent name resolver/Balancer triggered - // address removal and GoAway. - // We have to unlock and re-lock here because GracefulClose => Close => onClose, which requires locking ac.mu. - ac.mu.Unlock() - curTr.GracefulClose() - ac.mu.Lock() - } + channelz.AddTraceEvent(logger, ac.channelzID, 0, &channelz.TraceEventDesc{ Desc: "Subchannel deleted", Severity: channelz.CtInfo, @@ -1664,6 +1694,29 @@ func (ac *addrConn) tearDown(err error) { // being deleted right away. channelz.RemoveEntry(ac.channelzID) ac.mu.Unlock() + + // We have to release the lock before the call to GracefulClose/Close here + // because both of them call onClose(), which requires locking ac.mu. + if curTr != nil { + if err == errConnDrain { + // Close the transport gracefully when the subConn is being shutdown. + // + // GracefulClose() may be executed multiple times if: + // - multiple GoAway frames are received from the server + // - there are concurrent name resolver or balancer triggered + // address removal and GoAway + curTr.GracefulClose() + } else { + // Hard close the transport when the channel is entering idle or is + // being shutdown. In the case where the channel is being shutdown, + // closing of transports is also taken care of by cancelation of cc.ctx. + // But in the case where the channel is entering idle, we need to + // explicitly close the transports here. Instead of distinguishing + // between these two cases, it is simpler to close the transport + // unconditionally here. + curTr.Close(err) + } + } } func (ac *addrConn) getState() connectivity.State { diff --git a/vendor/google.golang.org/grpc/codec.go b/vendor/google.golang.org/grpc/codec.go index 129776547..411e3dfd4 100644 --- a/vendor/google.golang.org/grpc/codec.go +++ b/vendor/google.golang.org/grpc/codec.go @@ -27,8 +27,8 @@ import ( // omits the name/string, which vary between the two and are not needed for // anything besides the registry in the encoding package. type baseCodec interface { - Marshal(v interface{}) ([]byte, error) - Unmarshal(data []byte, v interface{}) error + Marshal(v any) ([]byte, error) + Unmarshal(data []byte, v any) error } var _ baseCodec = Codec(nil) @@ -41,9 +41,9 @@ var _ baseCodec = encoding.Codec(nil) // Deprecated: use encoding.Codec instead. type Codec interface { // Marshal returns the wire format of v. - Marshal(v interface{}) ([]byte, error) + Marshal(v any) ([]byte, error) // Unmarshal parses the wire format into v. - Unmarshal(data []byte, v interface{}) error + Unmarshal(data []byte, v any) error // String returns the name of the Codec implementation. This is unused by // gRPC. String() string diff --git a/vendor/google.golang.org/grpc/dialoptions.go b/vendor/google.golang.org/grpc/dialoptions.go index 23ea95237..1fd0d5c12 100644 --- a/vendor/google.golang.org/grpc/dialoptions.go +++ b/vendor/google.golang.org/grpc/dialoptions.go @@ -139,6 +139,20 @@ func newJoinDialOption(opts ...DialOption) DialOption { return &joinDialOption{opts: opts} } +// WithSharedWriteBuffer allows reusing per-connection transport write buffer. +// If this option is set to true every connection will release the buffer after +// flushing the data on the wire. +// +// # Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. +func WithSharedWriteBuffer(val bool) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.SharedWriteBuffer = val + }) +} + // WithWriteBufferSize determines how much data can be batched before doing a // write on the wire. The corresponding memory allocation for this buffer will // be twice the size to keep syscalls low. The default value for this buffer is diff --git a/vendor/google.golang.org/grpc/encoding/encoding.go b/vendor/google.golang.org/grpc/encoding/encoding.go index 07a586135..69d5580b6 100644 --- a/vendor/google.golang.org/grpc/encoding/encoding.go +++ b/vendor/google.golang.org/grpc/encoding/encoding.go @@ -90,9 +90,9 @@ func GetCompressor(name string) Compressor { // methods can be called from concurrent goroutines. type Codec interface { // Marshal returns the wire format of v. - Marshal(v interface{}) ([]byte, error) + Marshal(v any) ([]byte, error) // Unmarshal parses the wire format into v. - Unmarshal(data []byte, v interface{}) error + Unmarshal(data []byte, v any) error // Name returns the name of the Codec implementation. The returned string // will be used as part of content type in transmission. The result must be // static; the result cannot change between calls. diff --git a/vendor/google.golang.org/grpc/encoding/proto/proto.go b/vendor/google.golang.org/grpc/encoding/proto/proto.go index 3009b35af..0ee3d3bae 100644 --- a/vendor/google.golang.org/grpc/encoding/proto/proto.go +++ b/vendor/google.golang.org/grpc/encoding/proto/proto.go @@ -37,7 +37,7 @@ func init() { // codec is a Codec implementation with protobuf. It is the default codec for gRPC. type codec struct{} -func (codec) Marshal(v interface{}) ([]byte, error) { +func (codec) Marshal(v any) ([]byte, error) { vv, ok := v.(proto.Message) if !ok { return nil, fmt.Errorf("failed to marshal, message is %T, want proto.Message", v) @@ -45,7 +45,7 @@ func (codec) Marshal(v interface{}) ([]byte, error) { return proto.Marshal(vv) } -func (codec) Unmarshal(data []byte, v interface{}) error { +func (codec) Unmarshal(data []byte, v any) error { vv, ok := v.(proto.Message) if !ok { return fmt.Errorf("failed to unmarshal, message is %T, want proto.Message", v) diff --git a/vendor/google.golang.org/grpc/grpclog/component.go b/vendor/google.golang.org/grpc/grpclog/component.go index 8358dd6e2..ac73c9ced 100644 --- a/vendor/google.golang.org/grpc/grpclog/component.go +++ b/vendor/google.golang.org/grpc/grpclog/component.go @@ -31,71 +31,71 @@ type componentData struct { var cache = map[string]*componentData{} -func (c *componentData) InfoDepth(depth int, args ...interface{}) { - args = append([]interface{}{"[" + string(c.name) + "]"}, args...) +func (c *componentData) InfoDepth(depth int, args ...any) { + args = append([]any{"[" + string(c.name) + "]"}, args...) grpclog.InfoDepth(depth+1, args...) } -func (c *componentData) WarningDepth(depth int, args ...interface{}) { - args = append([]interface{}{"[" + string(c.name) + "]"}, args...) +func (c *componentData) WarningDepth(depth int, args ...any) { + args = append([]any{"[" + string(c.name) + "]"}, args...) grpclog.WarningDepth(depth+1, args...) } -func (c *componentData) ErrorDepth(depth int, args ...interface{}) { - args = append([]interface{}{"[" + string(c.name) + "]"}, args...) +func (c *componentData) ErrorDepth(depth int, args ...any) { + args = append([]any{"[" + string(c.name) + "]"}, args...) grpclog.ErrorDepth(depth+1, args...) } -func (c *componentData) FatalDepth(depth int, args ...interface{}) { - args = append([]interface{}{"[" + string(c.name) + "]"}, args...) +func (c *componentData) FatalDepth(depth int, args ...any) { + args = append([]any{"[" + string(c.name) + "]"}, args...) grpclog.FatalDepth(depth+1, args...) } -func (c *componentData) Info(args ...interface{}) { +func (c *componentData) Info(args ...any) { c.InfoDepth(1, args...) } -func (c *componentData) Warning(args ...interface{}) { +func (c *componentData) Warning(args ...any) { c.WarningDepth(1, args...) } -func (c *componentData) Error(args ...interface{}) { +func (c *componentData) Error(args ...any) { c.ErrorDepth(1, args...) } -func (c *componentData) Fatal(args ...interface{}) { +func (c *componentData) Fatal(args ...any) { c.FatalDepth(1, args...) } -func (c *componentData) Infof(format string, args ...interface{}) { +func (c *componentData) Infof(format string, args ...any) { c.InfoDepth(1, fmt.Sprintf(format, args...)) } -func (c *componentData) Warningf(format string, args ...interface{}) { +func (c *componentData) Warningf(format string, args ...any) { c.WarningDepth(1, fmt.Sprintf(format, args...)) } -func (c *componentData) Errorf(format string, args ...interface{}) { +func (c *componentData) Errorf(format string, args ...any) { c.ErrorDepth(1, fmt.Sprintf(format, args...)) } -func (c *componentData) Fatalf(format string, args ...interface{}) { +func (c *componentData) Fatalf(format string, args ...any) { c.FatalDepth(1, fmt.Sprintf(format, args...)) } -func (c *componentData) Infoln(args ...interface{}) { +func (c *componentData) Infoln(args ...any) { c.InfoDepth(1, args...) } -func (c *componentData) Warningln(args ...interface{}) { +func (c *componentData) Warningln(args ...any) { c.WarningDepth(1, args...) } -func (c *componentData) Errorln(args ...interface{}) { +func (c *componentData) Errorln(args ...any) { c.ErrorDepth(1, args...) } -func (c *componentData) Fatalln(args ...interface{}) { +func (c *componentData) Fatalln(args ...any) { c.FatalDepth(1, args...) } diff --git a/vendor/google.golang.org/grpc/grpclog/grpclog.go b/vendor/google.golang.org/grpc/grpclog/grpclog.go index c8bb2be34..16928c9cb 100644 --- a/vendor/google.golang.org/grpc/grpclog/grpclog.go +++ b/vendor/google.golang.org/grpc/grpclog/grpclog.go @@ -42,53 +42,53 @@ func V(l int) bool { } // Info logs to the INFO log. -func Info(args ...interface{}) { +func Info(args ...any) { grpclog.Logger.Info(args...) } // Infof logs to the INFO log. Arguments are handled in the manner of fmt.Printf. -func Infof(format string, args ...interface{}) { +func Infof(format string, args ...any) { grpclog.Logger.Infof(format, args...) } // Infoln logs to the INFO log. Arguments are handled in the manner of fmt.Println. -func Infoln(args ...interface{}) { +func Infoln(args ...any) { grpclog.Logger.Infoln(args...) } // Warning logs to the WARNING log. -func Warning(args ...interface{}) { +func Warning(args ...any) { grpclog.Logger.Warning(args...) } // Warningf logs to the WARNING log. Arguments are handled in the manner of fmt.Printf. -func Warningf(format string, args ...interface{}) { +func Warningf(format string, args ...any) { grpclog.Logger.Warningf(format, args...) } // Warningln logs to the WARNING log. Arguments are handled in the manner of fmt.Println. -func Warningln(args ...interface{}) { +func Warningln(args ...any) { grpclog.Logger.Warningln(args...) } // Error logs to the ERROR log. -func Error(args ...interface{}) { +func Error(args ...any) { grpclog.Logger.Error(args...) } // Errorf logs to the ERROR log. Arguments are handled in the manner of fmt.Printf. -func Errorf(format string, args ...interface{}) { +func Errorf(format string, args ...any) { grpclog.Logger.Errorf(format, args...) } // Errorln logs to the ERROR log. Arguments are handled in the manner of fmt.Println. -func Errorln(args ...interface{}) { +func Errorln(args ...any) { grpclog.Logger.Errorln(args...) } // Fatal logs to the FATAL log. Arguments are handled in the manner of fmt.Print. // It calls os.Exit() with exit code 1. -func Fatal(args ...interface{}) { +func Fatal(args ...any) { grpclog.Logger.Fatal(args...) // Make sure fatal logs will exit. os.Exit(1) @@ -96,7 +96,7 @@ func Fatal(args ...interface{}) { // Fatalf logs to the FATAL log. Arguments are handled in the manner of fmt.Printf. // It calls os.Exit() with exit code 1. -func Fatalf(format string, args ...interface{}) { +func Fatalf(format string, args ...any) { grpclog.Logger.Fatalf(format, args...) // Make sure fatal logs will exit. os.Exit(1) @@ -104,7 +104,7 @@ func Fatalf(format string, args ...interface{}) { // Fatalln logs to the FATAL log. Arguments are handled in the manner of fmt.Println. // It calle os.Exit()) with exit code 1. -func Fatalln(args ...interface{}) { +func Fatalln(args ...any) { grpclog.Logger.Fatalln(args...) // Make sure fatal logs will exit. os.Exit(1) @@ -113,20 +113,20 @@ func Fatalln(args ...interface{}) { // Print prints to the logger. Arguments are handled in the manner of fmt.Print. // // Deprecated: use Info. -func Print(args ...interface{}) { +func Print(args ...any) { grpclog.Logger.Info(args...) } // Printf prints to the logger. Arguments are handled in the manner of fmt.Printf. // // Deprecated: use Infof. -func Printf(format string, args ...interface{}) { +func Printf(format string, args ...any) { grpclog.Logger.Infof(format, args...) } // Println prints to the logger. Arguments are handled in the manner of fmt.Println. // // Deprecated: use Infoln. -func Println(args ...interface{}) { +func Println(args ...any) { grpclog.Logger.Infoln(args...) } diff --git a/vendor/google.golang.org/grpc/grpclog/logger.go b/vendor/google.golang.org/grpc/grpclog/logger.go index ef06a4822..b1674d826 100644 --- a/vendor/google.golang.org/grpc/grpclog/logger.go +++ b/vendor/google.golang.org/grpc/grpclog/logger.go @@ -24,12 +24,12 @@ import "google.golang.org/grpc/internal/grpclog" // // Deprecated: use LoggerV2. type Logger interface { - Fatal(args ...interface{}) - Fatalf(format string, args ...interface{}) - Fatalln(args ...interface{}) - Print(args ...interface{}) - Printf(format string, args ...interface{}) - Println(args ...interface{}) + Fatal(args ...any) + Fatalf(format string, args ...any) + Fatalln(args ...any) + Print(args ...any) + Printf(format string, args ...any) + Println(args ...any) } // SetLogger sets the logger that is used in grpc. Call only from @@ -45,39 +45,39 @@ type loggerWrapper struct { Logger } -func (g *loggerWrapper) Info(args ...interface{}) { +func (g *loggerWrapper) Info(args ...any) { g.Logger.Print(args...) } -func (g *loggerWrapper) Infoln(args ...interface{}) { +func (g *loggerWrapper) Infoln(args ...any) { g.Logger.Println(args...) } -func (g *loggerWrapper) Infof(format string, args ...interface{}) { +func (g *loggerWrapper) Infof(format string, args ...any) { g.Logger.Printf(format, args...) } -func (g *loggerWrapper) Warning(args ...interface{}) { +func (g *loggerWrapper) Warning(args ...any) { g.Logger.Print(args...) } -func (g *loggerWrapper) Warningln(args ...interface{}) { +func (g *loggerWrapper) Warningln(args ...any) { g.Logger.Println(args...) } -func (g *loggerWrapper) Warningf(format string, args ...interface{}) { +func (g *loggerWrapper) Warningf(format string, args ...any) { g.Logger.Printf(format, args...) } -func (g *loggerWrapper) Error(args ...interface{}) { +func (g *loggerWrapper) Error(args ...any) { g.Logger.Print(args...) } -func (g *loggerWrapper) Errorln(args ...interface{}) { +func (g *loggerWrapper) Errorln(args ...any) { g.Logger.Println(args...) } -func (g *loggerWrapper) Errorf(format string, args ...interface{}) { +func (g *loggerWrapper) Errorf(format string, args ...any) { g.Logger.Printf(format, args...) } diff --git a/vendor/google.golang.org/grpc/grpclog/loggerv2.go b/vendor/google.golang.org/grpc/grpclog/loggerv2.go index 5de66e40d..ecfd36d71 100644 --- a/vendor/google.golang.org/grpc/grpclog/loggerv2.go +++ b/vendor/google.golang.org/grpc/grpclog/loggerv2.go @@ -33,35 +33,35 @@ import ( // LoggerV2 does underlying logging work for grpclog. type LoggerV2 interface { // Info logs to INFO log. Arguments are handled in the manner of fmt.Print. - Info(args ...interface{}) + Info(args ...any) // Infoln logs to INFO log. Arguments are handled in the manner of fmt.Println. - Infoln(args ...interface{}) + Infoln(args ...any) // Infof logs to INFO log. Arguments are handled in the manner of fmt.Printf. - Infof(format string, args ...interface{}) + Infof(format string, args ...any) // Warning logs to WARNING log. Arguments are handled in the manner of fmt.Print. - Warning(args ...interface{}) + Warning(args ...any) // Warningln logs to WARNING log. Arguments are handled in the manner of fmt.Println. - Warningln(args ...interface{}) + Warningln(args ...any) // Warningf logs to WARNING log. Arguments are handled in the manner of fmt.Printf. - Warningf(format string, args ...interface{}) + Warningf(format string, args ...any) // Error logs to ERROR log. Arguments are handled in the manner of fmt.Print. - Error(args ...interface{}) + Error(args ...any) // Errorln logs to ERROR log. Arguments are handled in the manner of fmt.Println. - Errorln(args ...interface{}) + Errorln(args ...any) // Errorf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. - Errorf(format string, args ...interface{}) + Errorf(format string, args ...any) // Fatal logs to ERROR log. Arguments are handled in the manner of fmt.Print. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatal(args ...interface{}) + Fatal(args ...any) // Fatalln logs to ERROR log. Arguments are handled in the manner of fmt.Println. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatalln(args ...interface{}) + Fatalln(args ...any) // Fatalf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatalf(format string, args ...interface{}) + Fatalf(format string, args ...any) // V reports whether verbosity level l is at least the requested verbose level. V(l int) bool } @@ -182,53 +182,53 @@ func (g *loggerT) output(severity int, s string) { g.m[severity].Output(2, string(b)) } -func (g *loggerT) Info(args ...interface{}) { +func (g *loggerT) Info(args ...any) { g.output(infoLog, fmt.Sprint(args...)) } -func (g *loggerT) Infoln(args ...interface{}) { +func (g *loggerT) Infoln(args ...any) { g.output(infoLog, fmt.Sprintln(args...)) } -func (g *loggerT) Infof(format string, args ...interface{}) { +func (g *loggerT) Infof(format string, args ...any) { g.output(infoLog, fmt.Sprintf(format, args...)) } -func (g *loggerT) Warning(args ...interface{}) { +func (g *loggerT) Warning(args ...any) { g.output(warningLog, fmt.Sprint(args...)) } -func (g *loggerT) Warningln(args ...interface{}) { +func (g *loggerT) Warningln(args ...any) { g.output(warningLog, fmt.Sprintln(args...)) } -func (g *loggerT) Warningf(format string, args ...interface{}) { +func (g *loggerT) Warningf(format string, args ...any) { g.output(warningLog, fmt.Sprintf(format, args...)) } -func (g *loggerT) Error(args ...interface{}) { +func (g *loggerT) Error(args ...any) { g.output(errorLog, fmt.Sprint(args...)) } -func (g *loggerT) Errorln(args ...interface{}) { +func (g *loggerT) Errorln(args ...any) { g.output(errorLog, fmt.Sprintln(args...)) } -func (g *loggerT) Errorf(format string, args ...interface{}) { +func (g *loggerT) Errorf(format string, args ...any) { g.output(errorLog, fmt.Sprintf(format, args...)) } -func (g *loggerT) Fatal(args ...interface{}) { +func (g *loggerT) Fatal(args ...any) { g.output(fatalLog, fmt.Sprint(args...)) os.Exit(1) } -func (g *loggerT) Fatalln(args ...interface{}) { +func (g *loggerT) Fatalln(args ...any) { g.output(fatalLog, fmt.Sprintln(args...)) os.Exit(1) } -func (g *loggerT) Fatalf(format string, args ...interface{}) { +func (g *loggerT) Fatalf(format string, args ...any) { g.output(fatalLog, fmt.Sprintf(format, args...)) os.Exit(1) } @@ -248,11 +248,11 @@ func (g *loggerT) V(l int) bool { type DepthLoggerV2 interface { LoggerV2 // InfoDepth logs to INFO log at the specified depth. Arguments are handled in the manner of fmt.Println. - InfoDepth(depth int, args ...interface{}) + InfoDepth(depth int, args ...any) // WarningDepth logs to WARNING log at the specified depth. Arguments are handled in the manner of fmt.Println. - WarningDepth(depth int, args ...interface{}) + WarningDepth(depth int, args ...any) // ErrorDepth logs to ERROR log at the specified depth. Arguments are handled in the manner of fmt.Println. - ErrorDepth(depth int, args ...interface{}) + ErrorDepth(depth int, args ...any) // FatalDepth logs to FATAL log at the specified depth. Arguments are handled in the manner of fmt.Println. - FatalDepth(depth int, args ...interface{}) + FatalDepth(depth int, args ...any) } diff --git a/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go b/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go index 142d35f75..24299efd6 100644 --- a/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go +++ b/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go @@ -17,7 +17,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.30.0 +// protoc-gen-go v1.31.0 // protoc v4.22.0 // source: grpc/health/v1/health.proto diff --git a/vendor/google.golang.org/grpc/interceptor.go b/vendor/google.golang.org/grpc/interceptor.go index bb96ef57b..877d78fc3 100644 --- a/vendor/google.golang.org/grpc/interceptor.go +++ b/vendor/google.golang.org/grpc/interceptor.go @@ -23,7 +23,7 @@ import ( ) // UnaryInvoker is called by UnaryClientInterceptor to complete RPCs. -type UnaryInvoker func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error +type UnaryInvoker func(ctx context.Context, method string, req, reply any, cc *ClientConn, opts ...CallOption) error // UnaryClientInterceptor intercepts the execution of a unary RPC on the client. // Unary interceptors can be specified as a DialOption, using @@ -40,7 +40,7 @@ type UnaryInvoker func(ctx context.Context, method string, req, reply interface{ // defaults from the ClientConn as well as per-call options. // // The returned error must be compatible with the status package. -type UnaryClientInterceptor func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error +type UnaryClientInterceptor func(ctx context.Context, method string, req, reply any, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error // Streamer is called by StreamClientInterceptor to create a ClientStream. type Streamer func(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (ClientStream, error) @@ -66,7 +66,7 @@ type StreamClientInterceptor func(ctx context.Context, desc *StreamDesc, cc *Cli // server side. All per-rpc information may be mutated by the interceptor. type UnaryServerInfo struct { // Server is the service implementation the user provides. This is read-only. - Server interface{} + Server any // FullMethod is the full RPC method string, i.e., /package.service/method. FullMethod string } @@ -78,13 +78,13 @@ type UnaryServerInfo struct { // status package, or be one of the context errors. Otherwise, gRPC will use // codes.Unknown as the status code and err.Error() as the status message of the // RPC. -type UnaryHandler func(ctx context.Context, req interface{}) (interface{}, error) +type UnaryHandler func(ctx context.Context, req any) (any, error) // UnaryServerInterceptor provides a hook to intercept the execution of a unary RPC on the server. info // contains all the information of this RPC the interceptor can operate on. And handler is the wrapper // of the service method implementation. It is the responsibility of the interceptor to invoke handler // to complete the RPC. -type UnaryServerInterceptor func(ctx context.Context, req interface{}, info *UnaryServerInfo, handler UnaryHandler) (resp interface{}, err error) +type UnaryServerInterceptor func(ctx context.Context, req any, info *UnaryServerInfo, handler UnaryHandler) (resp any, err error) // StreamServerInfo consists of various information about a streaming RPC on // server side. All per-rpc information may be mutated by the interceptor. @@ -101,4 +101,4 @@ type StreamServerInfo struct { // info contains all the information of this RPC the interceptor can operate on. And handler is the // service method implementation. It is the responsibility of the interceptor to invoke handler to // complete the RPC. -type StreamServerInterceptor func(srv interface{}, ss ServerStream, info *StreamServerInfo, handler StreamHandler) error +type StreamServerInterceptor func(srv any, ss ServerStream, info *StreamServerInfo, handler StreamHandler) error diff --git a/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/gracefulswitch.go b/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/gracefulswitch.go index 08666f62a..3c594e6e4 100644 --- a/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/gracefulswitch.go +++ b/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/gracefulswitch.go @@ -200,8 +200,8 @@ func (gsb *Balancer) ExitIdle() { } } -// UpdateSubConnState forwards the update to the appropriate child. -func (gsb *Balancer) UpdateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { +// updateSubConnState forwards the update to the appropriate child. +func (gsb *Balancer) updateSubConnState(sc balancer.SubConn, state balancer.SubConnState, cb func(balancer.SubConnState)) { gsb.currentMu.Lock() defer gsb.currentMu.Unlock() gsb.mu.Lock() @@ -214,13 +214,26 @@ func (gsb *Balancer) UpdateSubConnState(sc balancer.SubConn, state balancer.SubC } else if gsb.balancerPending != nil && gsb.balancerPending.subconns[sc] { balToUpdate = gsb.balancerPending } - gsb.mu.Unlock() if balToUpdate == nil { // SubConn belonged to a stale lb policy that has not yet fully closed, // or the balancer was already closed. + gsb.mu.Unlock() return } - balToUpdate.UpdateSubConnState(sc, state) + if state.ConnectivityState == connectivity.Shutdown { + delete(balToUpdate.subconns, sc) + } + gsb.mu.Unlock() + if cb != nil { + cb(state) + } else { + balToUpdate.UpdateSubConnState(sc, state) + } +} + +// UpdateSubConnState forwards the update to the appropriate child. +func (gsb *Balancer) UpdateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { + gsb.updateSubConnState(sc, state, nil) } // Close closes any active child balancers. @@ -242,7 +255,7 @@ func (gsb *Balancer) Close() { // // It implements the balancer.ClientConn interface and is passed down in that // capacity to the wrapped balancer. It maintains a set of subConns created by -// the wrapped balancer and calls from the latter to create/update/remove +// the wrapped balancer and calls from the latter to create/update/shutdown // SubConns update this set before being forwarded to the parent ClientConn. // State updates from the wrapped balancer can result in invocation of the // graceful switch logic. @@ -254,21 +267,10 @@ type balancerWrapper struct { subconns map[balancer.SubConn]bool // subconns created by this balancer } -func (bw *balancerWrapper) UpdateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { - if state.ConnectivityState == connectivity.Shutdown { - bw.gsb.mu.Lock() - delete(bw.subconns, sc) - bw.gsb.mu.Unlock() - } - // There is no need to protect this read with a mutex, as the write to the - // Balancer field happens in SwitchTo, which completes before this can be - // called. - bw.Balancer.UpdateSubConnState(sc, state) -} - -// Close closes the underlying LB policy and removes the subconns it created. bw -// must not be referenced via balancerCurrent or balancerPending in gsb when -// called. gsb.mu must not be held. Does not panic with a nil receiver. +// Close closes the underlying LB policy and shuts down the subconns it +// created. bw must not be referenced via balancerCurrent or balancerPending in +// gsb when called. gsb.mu must not be held. Does not panic with a nil +// receiver. func (bw *balancerWrapper) Close() { // before Close is called. if bw == nil { @@ -281,7 +283,7 @@ func (bw *balancerWrapper) Close() { bw.Balancer.Close() bw.gsb.mu.Lock() for sc := range bw.subconns { - bw.gsb.cc.RemoveSubConn(sc) + sc.Shutdown() } bw.gsb.mu.Unlock() } @@ -335,13 +337,16 @@ func (bw *balancerWrapper) NewSubConn(addrs []resolver.Address, opts balancer.Ne } bw.gsb.mu.Unlock() + var sc balancer.SubConn + oldListener := opts.StateListener + opts.StateListener = func(state balancer.SubConnState) { bw.gsb.updateSubConnState(sc, state, oldListener) } sc, err := bw.gsb.cc.NewSubConn(addrs, opts) if err != nil { return nil, err } bw.gsb.mu.Lock() if !bw.gsb.balancerCurrentOrPending(bw) { // balancer was closed during this call - bw.gsb.cc.RemoveSubConn(sc) + sc.Shutdown() bw.gsb.mu.Unlock() return nil, fmt.Errorf("%T at address %p that called NewSubConn is deleted", bw, bw) } @@ -360,13 +365,9 @@ func (bw *balancerWrapper) ResolveNow(opts resolver.ResolveNowOptions) { } func (bw *balancerWrapper) RemoveSubConn(sc balancer.SubConn) { - bw.gsb.mu.Lock() - if !bw.gsb.balancerCurrentOrPending(bw) { - bw.gsb.mu.Unlock() - return - } - bw.gsb.mu.Unlock() - bw.gsb.cc.RemoveSubConn(sc) + // Note: existing third party balancers may call this, so it must remain + // until RemoveSubConn is fully removed. + sc.Shutdown() } func (bw *balancerWrapper) UpdateAddresses(sc balancer.SubConn, addrs []resolver.Address) { diff --git a/vendor/google.golang.org/grpc/internal/balancerload/load.go b/vendor/google.golang.org/grpc/internal/balancerload/load.go index 3a905d966..94a08d687 100644 --- a/vendor/google.golang.org/grpc/internal/balancerload/load.go +++ b/vendor/google.golang.org/grpc/internal/balancerload/load.go @@ -25,7 +25,7 @@ import ( // Parser converts loads from metadata into a concrete type. type Parser interface { // Parse parses loads from metadata. - Parse(md metadata.MD) interface{} + Parse(md metadata.MD) any } var parser Parser @@ -38,7 +38,7 @@ func SetParser(lr Parser) { } // Parse calls parser.Read(). -func Parse(md metadata.MD) interface{} { +func Parse(md metadata.MD) any { if parser == nil { return nil } diff --git a/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go index 6c3f63221..0f31274a3 100644 --- a/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go +++ b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go @@ -230,7 +230,7 @@ type ClientMessage struct { OnClientSide bool // Message can be a proto.Message or []byte. Other messages formats are not // supported. - Message interface{} + Message any } func (c *ClientMessage) toProto() *binlogpb.GrpcLogEntry { @@ -270,7 +270,7 @@ type ServerMessage struct { OnClientSide bool // Message can be a proto.Message or []byte. Other messages formats are not // supported. - Message interface{} + Message any } func (c *ServerMessage) toProto() *binlogpb.GrpcLogEntry { diff --git a/vendor/google.golang.org/grpc/internal/buffer/unbounded.go b/vendor/google.golang.org/grpc/internal/buffer/unbounded.go index 81c2f5fd7..4399c3df4 100644 --- a/vendor/google.golang.org/grpc/internal/buffer/unbounded.go +++ b/vendor/google.golang.org/grpc/internal/buffer/unbounded.go @@ -28,25 +28,25 @@ import "sync" // the underlying mutex used for synchronization. // // Unbounded supports values of any type to be stored in it by using a channel -// of `interface{}`. This means that a call to Put() incurs an extra memory -// allocation, and also that users need a type assertion while reading. For -// performance critical code paths, using Unbounded is strongly discouraged and -// defining a new type specific implementation of this buffer is preferred. See +// of `any`. This means that a call to Put() incurs an extra memory allocation, +// and also that users need a type assertion while reading. For performance +// critical code paths, using Unbounded is strongly discouraged and defining a +// new type specific implementation of this buffer is preferred. See // internal/transport/transport.go for an example of this. type Unbounded struct { - c chan interface{} + c chan any closed bool mu sync.Mutex - backlog []interface{} + backlog []any } // NewUnbounded returns a new instance of Unbounded. func NewUnbounded() *Unbounded { - return &Unbounded{c: make(chan interface{}, 1)} + return &Unbounded{c: make(chan any, 1)} } // Put adds t to the unbounded buffer. -func (b *Unbounded) Put(t interface{}) { +func (b *Unbounded) Put(t any) { b.mu.Lock() defer b.mu.Unlock() if b.closed { @@ -89,7 +89,7 @@ func (b *Unbounded) Load() { // // If the unbounded buffer is closed, the read channel returned by this method // is closed. -func (b *Unbounded) Get() <-chan interface{} { +func (b *Unbounded) Get() <-chan any { return b.c } diff --git a/vendor/google.golang.org/grpc/internal/channelz/funcs.go b/vendor/google.golang.org/grpc/internal/channelz/funcs.go index 777cbcd79..5395e7752 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/funcs.go +++ b/vendor/google.golang.org/grpc/internal/channelz/funcs.go @@ -24,9 +24,7 @@ package channelz import ( - "context" "errors" - "fmt" "sort" "sync" "sync/atomic" @@ -40,8 +38,11 @@ const ( ) var ( - db dbWrapper - idGen idGenerator + // IDGen is the global channelz entity ID generator. It should not be used + // outside this package except by tests. + IDGen IDGenerator + + db dbWrapper // EntryPerPage defines the number of channelz entries to be shown on a web page. EntryPerPage = int64(50) curState int32 @@ -52,14 +53,14 @@ var ( func TurnOn() { if !IsOn() { db.set(newChannelMap()) - idGen.reset() + IDGen.Reset() atomic.StoreInt32(&curState, 1) } } // IsOn returns whether channelz data collection is on. func IsOn() bool { - return atomic.CompareAndSwapInt32(&curState, 1, 1) + return atomic.LoadInt32(&curState) == 1 } // SetMaxTraceEntry sets maximum number of trace entry per entity (i.e. channel/subchannel). @@ -97,43 +98,6 @@ func (d *dbWrapper) get() *channelMap { return d.DB } -// NewChannelzStorageForTesting initializes channelz data storage and id -// generator for testing purposes. -// -// Returns a cleanup function to be invoked by the test, which waits for up to -// 10s for all channelz state to be reset by the grpc goroutines when those -// entities get closed. This cleanup function helps with ensuring that tests -// don't mess up each other. -func NewChannelzStorageForTesting() (cleanup func() error) { - db.set(newChannelMap()) - idGen.reset() - - return func() error { - cm := db.get() - if cm == nil { - return nil - } - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - ticker := time.NewTicker(10 * time.Millisecond) - defer ticker.Stop() - for { - cm.mu.RLock() - topLevelChannels, servers, channels, subChannels, listenSockets, normalSockets := len(cm.topLevelChannels), len(cm.servers), len(cm.channels), len(cm.subChannels), len(cm.listenSockets), len(cm.normalSockets) - cm.mu.RUnlock() - - if err := ctx.Err(); err != nil { - return fmt.Errorf("after 10s the channelz map has not been cleaned up yet, topchannels: %d, servers: %d, channels: %d, subchannels: %d, listen sockets: %d, normal sockets: %d", topLevelChannels, servers, channels, subChannels, listenSockets, normalSockets) - } - if topLevelChannels == 0 && servers == 0 && channels == 0 && subChannels == 0 && listenSockets == 0 && normalSockets == 0 { - return nil - } - <-ticker.C - } - } -} - // GetTopChannels returns a slice of top channel's ChannelMetric, along with a // boolean indicating whether there's more top channels to be queried for. // @@ -193,7 +157,7 @@ func GetServer(id int64) *ServerMetric { // // If channelz is not turned ON, the channelz database is not mutated. func RegisterChannel(c Channel, pid *Identifier, ref string) *Identifier { - id := idGen.genID() + id := IDGen.genID() var parent int64 isTopChannel := true if pid != nil { @@ -229,7 +193,7 @@ func RegisterSubChannel(c Channel, pid *Identifier, ref string) (*Identifier, er if pid == nil { return nil, errors.New("a SubChannel's parent id cannot be nil") } - id := idGen.genID() + id := IDGen.genID() if !IsOn() { return newIdentifer(RefSubChannel, id, pid), nil } @@ -251,7 +215,7 @@ func RegisterSubChannel(c Channel, pid *Identifier, ref string) (*Identifier, er // // If channelz is not turned ON, the channelz database is not mutated. func RegisterServer(s Server, ref string) *Identifier { - id := idGen.genID() + id := IDGen.genID() if !IsOn() { return newIdentifer(RefServer, id, nil) } @@ -277,7 +241,7 @@ func RegisterListenSocket(s Socket, pid *Identifier, ref string) (*Identifier, e if pid == nil { return nil, errors.New("a ListenSocket's parent id cannot be 0") } - id := idGen.genID() + id := IDGen.genID() if !IsOn() { return newIdentifer(RefListenSocket, id, pid), nil } @@ -297,7 +261,7 @@ func RegisterNormalSocket(s Socket, pid *Identifier, ref string) (*Identifier, e if pid == nil { return nil, errors.New("a NormalSocket's parent id cannot be 0") } - id := idGen.genID() + id := IDGen.genID() if !IsOn() { return newIdentifer(RefNormalSocket, id, pid), nil } @@ -776,14 +740,17 @@ func (c *channelMap) GetServer(id int64) *ServerMetric { return sm } -type idGenerator struct { +// IDGenerator is an incrementing atomic that tracks IDs for channelz entities. +type IDGenerator struct { id int64 } -func (i *idGenerator) reset() { +// Reset resets the generated ID back to zero. Should only be used at +// initialization or by tests sensitive to the ID number. +func (i *IDGenerator) Reset() { atomic.StoreInt64(&i.id, 0) } -func (i *idGenerator) genID() int64 { +func (i *IDGenerator) genID() int64 { return atomic.AddInt64(&i.id, 1) } diff --git a/vendor/google.golang.org/grpc/internal/channelz/logging.go b/vendor/google.golang.org/grpc/internal/channelz/logging.go index 8e13a3d2c..f89e6f77b 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/logging.go +++ b/vendor/google.golang.org/grpc/internal/channelz/logging.go @@ -31,7 +31,7 @@ func withParens(id *Identifier) string { } // Info logs and adds a trace event if channelz is on. -func Info(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { +func Info(l grpclog.DepthLoggerV2, id *Identifier, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprint(args...), Severity: CtInfo, @@ -39,7 +39,7 @@ func Info(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { } // Infof logs and adds a trace event if channelz is on. -func Infof(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...interface{}) { +func Infof(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprintf(format, args...), Severity: CtInfo, @@ -47,7 +47,7 @@ func Infof(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...inter } // Warning logs and adds a trace event if channelz is on. -func Warning(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { +func Warning(l grpclog.DepthLoggerV2, id *Identifier, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprint(args...), Severity: CtWarning, @@ -55,7 +55,7 @@ func Warning(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { } // Warningf logs and adds a trace event if channelz is on. -func Warningf(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...interface{}) { +func Warningf(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprintf(format, args...), Severity: CtWarning, @@ -63,7 +63,7 @@ func Warningf(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...in } // Error logs and adds a trace event if channelz is on. -func Error(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { +func Error(l grpclog.DepthLoggerV2, id *Identifier, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprint(args...), Severity: CtError, @@ -71,7 +71,7 @@ func Error(l grpclog.DepthLoggerV2, id *Identifier, args ...interface{}) { } // Errorf logs and adds a trace event if channelz is on. -func Errorf(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...interface{}) { +func Errorf(l grpclog.DepthLoggerV2, id *Identifier, format string, args ...any) { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprintf(format, args...), Severity: CtError, diff --git a/vendor/google.golang.org/grpc/internal/channelz/types.go b/vendor/google.golang.org/grpc/internal/channelz/types.go index 7b2f350e2..1d4020f53 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/types.go +++ b/vendor/google.golang.org/grpc/internal/channelz/types.go @@ -628,6 +628,7 @@ type tracedChannel interface { type channelTrace struct { cm *channelMap + clearCalled bool createdTime time.Time eventCount int64 mu sync.Mutex @@ -656,6 +657,10 @@ func (c *channelTrace) append(e *TraceEvent) { } func (c *channelTrace) clear() { + if c.clearCalled { + return + } + c.clearCalled = true c.mu.Lock() for _, e := range c.events { if e.RefID != 0 { diff --git a/vendor/google.golang.org/grpc/internal/channelz/util_linux.go b/vendor/google.golang.org/grpc/internal/channelz/util_linux.go index 8d194e44e..98288c3f8 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/util_linux.go +++ b/vendor/google.golang.org/grpc/internal/channelz/util_linux.go @@ -23,7 +23,7 @@ import ( ) // GetSocketOption gets the socket option info of the conn. -func GetSocketOption(socket interface{}) *SocketOptionData { +func GetSocketOption(socket any) *SocketOptionData { c, ok := socket.(syscall.Conn) if !ok { return nil diff --git a/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go b/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go index 837ddc402..b5568b22e 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go +++ b/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go @@ -22,6 +22,6 @@ package channelz // GetSocketOption gets the socket option info of the conn. -func GetSocketOption(c interface{}) *SocketOptionData { +func GetSocketOption(c any) *SocketOptionData { return nil } diff --git a/vendor/google.golang.org/grpc/internal/credentials/credentials.go b/vendor/google.golang.org/grpc/internal/credentials/credentials.go index 32c9b5903..9deee7f65 100644 --- a/vendor/google.golang.org/grpc/internal/credentials/credentials.go +++ b/vendor/google.golang.org/grpc/internal/credentials/credentials.go @@ -25,12 +25,12 @@ import ( type requestInfoKey struct{} // NewRequestInfoContext creates a context with ri. -func NewRequestInfoContext(ctx context.Context, ri interface{}) context.Context { +func NewRequestInfoContext(ctx context.Context, ri any) context.Context { return context.WithValue(ctx, requestInfoKey{}, ri) } // RequestInfoFromContext extracts the RequestInfo from ctx. -func RequestInfoFromContext(ctx context.Context) interface{} { +func RequestInfoFromContext(ctx context.Context) any { return ctx.Value(requestInfoKey{}) } @@ -39,11 +39,11 @@ func RequestInfoFromContext(ctx context.Context) interface{} { type clientHandshakeInfoKey struct{} // ClientHandshakeInfoFromContext extracts the ClientHandshakeInfo from ctx. -func ClientHandshakeInfoFromContext(ctx context.Context) interface{} { +func ClientHandshakeInfoFromContext(ctx context.Context) any { return ctx.Value(clientHandshakeInfoKey{}) } // NewClientHandshakeInfoContext creates a context with chi. -func NewClientHandshakeInfoContext(ctx context.Context, chi interface{}) context.Context { +func NewClientHandshakeInfoContext(ctx context.Context, chi any) context.Context { return context.WithValue(ctx, clientHandshakeInfoKey{}, chi) } diff --git a/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go index 77c2c0b89..3cf10ddfb 100644 --- a/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go +++ b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go @@ -37,9 +37,12 @@ var ( // checking which NACKs configs specifying ring sizes > 8*1024*1024 (~8M). RingHashCap = uint64FromEnv("GRPC_RING_HASH_CAP", 4096, 1, 8*1024*1024) // PickFirstLBConfig is set if we should support configuration of the - // pick_first LB policy, which can be enabled by setting the environment - // variable "GRPC_EXPERIMENTAL_PICKFIRST_LB_CONFIG" to "true". - PickFirstLBConfig = boolFromEnv("GRPC_EXPERIMENTAL_PICKFIRST_LB_CONFIG", false) + // pick_first LB policy. + PickFirstLBConfig = boolFromEnv("GRPC_EXPERIMENTAL_PICKFIRST_LB_CONFIG", true) + // LeastRequestLB is set if we should support the least_request_experimental + // LB policy, which can be enabled by setting the environment variable + // "GRPC_EXPERIMENTAL_ENABLE_LEAST_REQUEST" to "true". + LeastRequestLB = boolFromEnv("GRPC_EXPERIMENTAL_ENABLE_LEAST_REQUEST", false) // ALTSMaxConcurrentHandshakes is the maximum number of concurrent ALTS // handshakes that can be performed. ALTSMaxConcurrentHandshakes = uint64FromEnv("GRPC_ALTS_MAX_CONCURRENT_HANDSHAKES", 100, 1, 100) diff --git a/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go b/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go index b68e26a36..bfc45102a 100644 --- a/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go +++ b/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go @@ -30,7 +30,7 @@ var Logger LoggerV2 var DepthLogger DepthLoggerV2 // InfoDepth logs to the INFO log at the specified depth. -func InfoDepth(depth int, args ...interface{}) { +func InfoDepth(depth int, args ...any) { if DepthLogger != nil { DepthLogger.InfoDepth(depth, args...) } else { @@ -39,7 +39,7 @@ func InfoDepth(depth int, args ...interface{}) { } // WarningDepth logs to the WARNING log at the specified depth. -func WarningDepth(depth int, args ...interface{}) { +func WarningDepth(depth int, args ...any) { if DepthLogger != nil { DepthLogger.WarningDepth(depth, args...) } else { @@ -48,7 +48,7 @@ func WarningDepth(depth int, args ...interface{}) { } // ErrorDepth logs to the ERROR log at the specified depth. -func ErrorDepth(depth int, args ...interface{}) { +func ErrorDepth(depth int, args ...any) { if DepthLogger != nil { DepthLogger.ErrorDepth(depth, args...) } else { @@ -57,7 +57,7 @@ func ErrorDepth(depth int, args ...interface{}) { } // FatalDepth logs to the FATAL log at the specified depth. -func FatalDepth(depth int, args ...interface{}) { +func FatalDepth(depth int, args ...any) { if DepthLogger != nil { DepthLogger.FatalDepth(depth, args...) } else { @@ -71,35 +71,35 @@ func FatalDepth(depth int, args ...interface{}) { // is defined here to avoid a circular dependency. type LoggerV2 interface { // Info logs to INFO log. Arguments are handled in the manner of fmt.Print. - Info(args ...interface{}) + Info(args ...any) // Infoln logs to INFO log. Arguments are handled in the manner of fmt.Println. - Infoln(args ...interface{}) + Infoln(args ...any) // Infof logs to INFO log. Arguments are handled in the manner of fmt.Printf. - Infof(format string, args ...interface{}) + Infof(format string, args ...any) // Warning logs to WARNING log. Arguments are handled in the manner of fmt.Print. - Warning(args ...interface{}) + Warning(args ...any) // Warningln logs to WARNING log. Arguments are handled in the manner of fmt.Println. - Warningln(args ...interface{}) + Warningln(args ...any) // Warningf logs to WARNING log. Arguments are handled in the manner of fmt.Printf. - Warningf(format string, args ...interface{}) + Warningf(format string, args ...any) // Error logs to ERROR log. Arguments are handled in the manner of fmt.Print. - Error(args ...interface{}) + Error(args ...any) // Errorln logs to ERROR log. Arguments are handled in the manner of fmt.Println. - Errorln(args ...interface{}) + Errorln(args ...any) // Errorf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. - Errorf(format string, args ...interface{}) + Errorf(format string, args ...any) // Fatal logs to ERROR log. Arguments are handled in the manner of fmt.Print. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatal(args ...interface{}) + Fatal(args ...any) // Fatalln logs to ERROR log. Arguments are handled in the manner of fmt.Println. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatalln(args ...interface{}) + Fatalln(args ...any) // Fatalf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. // gRPC ensures that all Fatal logs will exit with os.Exit(1). // Implementations may also call os.Exit() with a non-zero exit code. - Fatalf(format string, args ...interface{}) + Fatalf(format string, args ...any) // V reports whether verbosity level l is at least the requested verbose level. V(l int) bool } @@ -116,11 +116,11 @@ type LoggerV2 interface { // later release. type DepthLoggerV2 interface { // InfoDepth logs to INFO log at the specified depth. Arguments are handled in the manner of fmt.Println. - InfoDepth(depth int, args ...interface{}) + InfoDepth(depth int, args ...any) // WarningDepth logs to WARNING log at the specified depth. Arguments are handled in the manner of fmt.Println. - WarningDepth(depth int, args ...interface{}) + WarningDepth(depth int, args ...any) // ErrorDepth logs to ERROR log at the specified depth. Arguments are handled in the manner of fmt.Println. - ErrorDepth(depth int, args ...interface{}) + ErrorDepth(depth int, args ...any) // FatalDepth logs to FATAL log at the specified depth. Arguments are handled in the manner of fmt.Println. - FatalDepth(depth int, args ...interface{}) + FatalDepth(depth int, args ...any) } diff --git a/vendor/google.golang.org/grpc/internal/grpclog/prefixLogger.go b/vendor/google.golang.org/grpc/internal/grpclog/prefixLogger.go index 02224b42c..faa998de7 100644 --- a/vendor/google.golang.org/grpc/internal/grpclog/prefixLogger.go +++ b/vendor/google.golang.org/grpc/internal/grpclog/prefixLogger.go @@ -31,7 +31,7 @@ type PrefixLogger struct { } // Infof does info logging. -func (pl *PrefixLogger) Infof(format string, args ...interface{}) { +func (pl *PrefixLogger) Infof(format string, args ...any) { if pl != nil { // Handle nil, so the tests can pass in a nil logger. format = pl.prefix + format @@ -42,7 +42,7 @@ func (pl *PrefixLogger) Infof(format string, args ...interface{}) { } // Warningf does warning logging. -func (pl *PrefixLogger) Warningf(format string, args ...interface{}) { +func (pl *PrefixLogger) Warningf(format string, args ...any) { if pl != nil { format = pl.prefix + format pl.logger.WarningDepth(1, fmt.Sprintf(format, args...)) @@ -52,7 +52,7 @@ func (pl *PrefixLogger) Warningf(format string, args ...interface{}) { } // Errorf does error logging. -func (pl *PrefixLogger) Errorf(format string, args ...interface{}) { +func (pl *PrefixLogger) Errorf(format string, args ...any) { if pl != nil { format = pl.prefix + format pl.logger.ErrorDepth(1, fmt.Sprintf(format, args...)) @@ -62,7 +62,7 @@ func (pl *PrefixLogger) Errorf(format string, args ...interface{}) { } // Debugf does info logging at verbose level 2. -func (pl *PrefixLogger) Debugf(format string, args ...interface{}) { +func (pl *PrefixLogger) Debugf(format string, args ...any) { // TODO(6044): Refactor interfaces LoggerV2 and DepthLogger, and maybe // rewrite PrefixLogger a little to ensure that we don't use the global // `Logger` here, and instead use the `logger` field. diff --git a/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go b/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go index 37b8d4117..900917dbe 100644 --- a/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go +++ b/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go @@ -32,10 +32,10 @@ import ( // // This type is safe for concurrent access. type CallbackSerializer struct { - // Done is closed once the serializer is shut down completely, i.e all + // done is closed once the serializer is shut down completely, i.e all // scheduled callbacks are executed and the serializer has deallocated all // its resources. - Done chan struct{} + done chan struct{} callbacks *buffer.Unbounded closedMu sync.Mutex @@ -48,12 +48,12 @@ type CallbackSerializer struct { // callbacks will be added once this context is canceled, and any pending un-run // callbacks will be executed before the serializer is shut down. func NewCallbackSerializer(ctx context.Context) *CallbackSerializer { - t := &CallbackSerializer{ - Done: make(chan struct{}), + cs := &CallbackSerializer{ + done: make(chan struct{}), callbacks: buffer.NewUnbounded(), } - go t.run(ctx) - return t + go cs.run(ctx) + return cs } // Schedule adds a callback to be scheduled after existing callbacks are run. @@ -64,56 +64,62 @@ func NewCallbackSerializer(ctx context.Context) *CallbackSerializer { // Return value indicates if the callback was successfully added to the list of // callbacks to be executed by the serializer. It is not possible to add // callbacks once the context passed to NewCallbackSerializer is cancelled. -func (t *CallbackSerializer) Schedule(f func(ctx context.Context)) bool { - t.closedMu.Lock() - defer t.closedMu.Unlock() +func (cs *CallbackSerializer) Schedule(f func(ctx context.Context)) bool { + cs.closedMu.Lock() + defer cs.closedMu.Unlock() - if t.closed { + if cs.closed { return false } - t.callbacks.Put(f) + cs.callbacks.Put(f) return true } -func (t *CallbackSerializer) run(ctx context.Context) { +func (cs *CallbackSerializer) run(ctx context.Context) { var backlog []func(context.Context) - defer close(t.Done) + defer close(cs.done) for ctx.Err() == nil { select { case <-ctx.Done(): // Do nothing here. Next iteration of the for loop will not happen, // since ctx.Err() would be non-nil. - case callback, ok := <-t.callbacks.Get(): + case callback, ok := <-cs.callbacks.Get(): if !ok { return } - t.callbacks.Load() + cs.callbacks.Load() callback.(func(ctx context.Context))(ctx) } } // Fetch pending callbacks if any, and execute them before returning from - // this method and closing t.Done. - t.closedMu.Lock() - t.closed = true - backlog = t.fetchPendingCallbacks() - t.callbacks.Close() - t.closedMu.Unlock() + // this method and closing cs.done. + cs.closedMu.Lock() + cs.closed = true + backlog = cs.fetchPendingCallbacks() + cs.callbacks.Close() + cs.closedMu.Unlock() for _, b := range backlog { b(ctx) } } -func (t *CallbackSerializer) fetchPendingCallbacks() []func(context.Context) { +func (cs *CallbackSerializer) fetchPendingCallbacks() []func(context.Context) { var backlog []func(context.Context) for { select { - case b := <-t.callbacks.Get(): + case b := <-cs.callbacks.Get(): backlog = append(backlog, b.(func(context.Context))) - t.callbacks.Load() + cs.callbacks.Load() default: return backlog } } } + +// Done returns a channel that is closed after the context passed to +// NewCallbackSerializer is canceled and all callbacks have been executed. +func (cs *CallbackSerializer) Done() <-chan struct{} { + return cs.done +} diff --git a/vendor/google.golang.org/grpc/internal/grpcsync/pubsub.go b/vendor/google.golang.org/grpc/internal/grpcsync/pubsub.go index f58b5ffa6..aef8cec1a 100644 --- a/vendor/google.golang.org/grpc/internal/grpcsync/pubsub.go +++ b/vendor/google.golang.org/grpc/internal/grpcsync/pubsub.go @@ -29,7 +29,7 @@ import ( type Subscriber interface { // OnMessage is invoked when a new message is published. Implementations // must not block in this method. - OnMessage(msg interface{}) + OnMessage(msg any) } // PubSub is a simple one-to-many publish-subscribe system that supports @@ -40,25 +40,23 @@ type Subscriber interface { // subscribers interested in receiving these messages register a callback // via the Subscribe() method. // -// Once a PubSub is stopped, no more messages can be published, and -// it is guaranteed that no more subscriber callback will be invoked. +// Once a PubSub is stopped, no more messages can be published, but any pending +// published messages will be delivered to the subscribers. Done may be used +// to determine when all published messages have been delivered. type PubSub struct { - cs *CallbackSerializer - cancel context.CancelFunc + cs *CallbackSerializer // Access to the below fields are guarded by this mutex. mu sync.Mutex - msg interface{} + msg any subscribers map[Subscriber]bool - stopped bool } -// NewPubSub returns a new PubSub instance. -func NewPubSub() *PubSub { - ctx, cancel := context.WithCancel(context.Background()) +// NewPubSub returns a new PubSub instance. Users should cancel the +// provided context to shutdown the PubSub. +func NewPubSub(ctx context.Context) *PubSub { return &PubSub{ cs: NewCallbackSerializer(ctx), - cancel: cancel, subscribers: map[Subscriber]bool{}, } } @@ -75,10 +73,6 @@ func (ps *PubSub) Subscribe(sub Subscriber) (cancel func()) { ps.mu.Lock() defer ps.mu.Unlock() - if ps.stopped { - return func() {} - } - ps.subscribers[sub] = true if ps.msg != nil { @@ -102,14 +96,10 @@ func (ps *PubSub) Subscribe(sub Subscriber) (cancel func()) { // Publish publishes the provided message to the PubSub, and invokes // callbacks registered by subscribers asynchronously. -func (ps *PubSub) Publish(msg interface{}) { +func (ps *PubSub) Publish(msg any) { ps.mu.Lock() defer ps.mu.Unlock() - if ps.stopped { - return - } - ps.msg = msg for sub := range ps.subscribers { s := sub @@ -124,13 +114,8 @@ func (ps *PubSub) Publish(msg interface{}) { } } -// Stop shuts down the PubSub and releases any resources allocated by it. -// It is guaranteed that no subscriber callbacks would be invoked once this -// method returns. -func (ps *PubSub) Stop() { - ps.mu.Lock() - defer ps.mu.Unlock() - ps.stopped = true - - ps.cancel() +// Done returns a channel that is closed after the context passed to NewPubSub +// is canceled and all updates have been sent to subscribers. +func (ps *PubSub) Done() <-chan struct{} { + return ps.cs.Done() } diff --git a/vendor/google.golang.org/grpc/idle.go b/vendor/google.golang.org/grpc/internal/idle/idle.go similarity index 61% rename from vendor/google.golang.org/grpc/idle.go rename to vendor/google.golang.org/grpc/internal/idle/idle.go index dc3dc72f6..6c272476e 100644 --- a/vendor/google.golang.org/grpc/idle.go +++ b/vendor/google.golang.org/grpc/internal/idle/idle.go @@ -16,7 +16,9 @@ * */ -package grpc +// Package idle contains a component for managing idleness (entering and exiting) +// based on RPC activity. +package idle import ( "fmt" @@ -24,6 +26,8 @@ import ( "sync" "sync/atomic" "time" + + "google.golang.org/grpc/grpclog" ) // For overriding in unit tests. @@ -31,31 +35,31 @@ var timeAfterFunc = func(d time.Duration, f func()) *time.Timer { return time.AfterFunc(d, f) } -// idlenessEnforcer is the functionality provided by grpc.ClientConn to enter +// Enforcer is the functionality provided by grpc.ClientConn to enter // and exit from idle mode. -type idlenessEnforcer interface { - exitIdleMode() error - enterIdleMode() error +type Enforcer interface { + ExitIdleMode() error + EnterIdleMode() error } -// idlenessManager defines the functionality required to track RPC activity on a +// Manager defines the functionality required to track RPC activity on a // channel. -type idlenessManager interface { - onCallBegin() error - onCallEnd() - close() +type Manager interface { + OnCallBegin() error + OnCallEnd() + Close() } -type noopIdlenessManager struct{} +type noopManager struct{} -func (noopIdlenessManager) onCallBegin() error { return nil } -func (noopIdlenessManager) onCallEnd() {} -func (noopIdlenessManager) close() {} +func (noopManager) OnCallBegin() error { return nil } +func (noopManager) OnCallEnd() {} +func (noopManager) Close() {} -// idlenessManagerImpl implements the idlenessManager interface. It uses atomic -// operations to synchronize access to shared state and a mutex to guarantee -// mutual exclusion in a critical section. -type idlenessManagerImpl struct { +// manager implements the Manager interface. It uses atomic operations to +// synchronize access to shared state and a mutex to guarantee mutual exclusion +// in a critical section. +type manager struct { // State accessed atomically. lastCallEndTime int64 // Unix timestamp in nanos; time when the most recent RPC completed. activeCallsCount int32 // Count of active RPCs; -math.MaxInt32 means channel is idle or is trying to get there. @@ -64,14 +68,15 @@ type idlenessManagerImpl struct { // Can be accessed without atomics or mutex since these are set at creation // time and read-only after that. - enforcer idlenessEnforcer // Functionality provided by grpc.ClientConn. - timeout int64 // Idle timeout duration nanos stored as an int64. + enforcer Enforcer // Functionality provided by grpc.ClientConn. + timeout int64 // Idle timeout duration nanos stored as an int64. + logger grpclog.LoggerV2 // idleMu is used to guarantee mutual exclusion in two scenarios: // - Opposing intentions: // - a: Idle timeout has fired and handleIdleTimeout() is trying to put // the channel in idle mode because the channel has been inactive. - // - b: At the same time an RPC is made on the channel, and onCallBegin() + // - b: At the same time an RPC is made on the channel, and OnCallBegin() // is trying to prevent the channel from going idle. // - Competing intentions: // - The channel is in idle mode and there are multiple RPCs starting at @@ -83,28 +88,37 @@ type idlenessManagerImpl struct { timer *time.Timer } -// newIdlenessManager creates a new idleness manager implementation for the +// ManagerOptions is a collection of options used by +// NewManager. +type ManagerOptions struct { + Enforcer Enforcer + Timeout time.Duration + Logger grpclog.LoggerV2 +} + +// NewManager creates a new idleness manager implementation for the // given idle timeout. -func newIdlenessManager(enforcer idlenessEnforcer, idleTimeout time.Duration) idlenessManager { - if idleTimeout == 0 { - return noopIdlenessManager{} +func NewManager(opts ManagerOptions) Manager { + if opts.Timeout == 0 { + return noopManager{} } - i := &idlenessManagerImpl{ - enforcer: enforcer, - timeout: int64(idleTimeout), + m := &manager{ + enforcer: opts.Enforcer, + timeout: int64(opts.Timeout), + logger: opts.Logger, } - i.timer = timeAfterFunc(idleTimeout, i.handleIdleTimeout) - return i + m.timer = timeAfterFunc(opts.Timeout, m.handleIdleTimeout) + return m } // resetIdleTimer resets the idle timer to the given duration. This method // should only be called from the timer callback. -func (i *idlenessManagerImpl) resetIdleTimer(d time.Duration) { - i.idleMu.Lock() - defer i.idleMu.Unlock() +func (m *manager) resetIdleTimer(d time.Duration) { + m.idleMu.Lock() + defer m.idleMu.Unlock() - if i.timer == nil { + if m.timer == nil { // Only close sets timer to nil. We are done. return } @@ -112,47 +126,47 @@ func (i *idlenessManagerImpl) resetIdleTimer(d time.Duration) { // It is safe to ignore the return value from Reset() because this method is // only ever called from the timer callback, which means the timer has // already fired. - i.timer.Reset(d) + m.timer.Reset(d) } // handleIdleTimeout is the timer callback that is invoked upon expiry of the // configured idle timeout. The channel is considered inactive if there are no // ongoing calls and no RPC activity since the last time the timer fired. -func (i *idlenessManagerImpl) handleIdleTimeout() { - if i.isClosed() { +func (m *manager) handleIdleTimeout() { + if m.isClosed() { return } - if atomic.LoadInt32(&i.activeCallsCount) > 0 { - i.resetIdleTimer(time.Duration(i.timeout)) + if atomic.LoadInt32(&m.activeCallsCount) > 0 { + m.resetIdleTimer(time.Duration(m.timeout)) return } // There has been activity on the channel since we last got here. Reset the // timer and return. - if atomic.LoadInt32(&i.activeSinceLastTimerCheck) == 1 { + if atomic.LoadInt32(&m.activeSinceLastTimerCheck) == 1 { // Set the timer to fire after a duration of idle timeout, calculated // from the time the most recent RPC completed. - atomic.StoreInt32(&i.activeSinceLastTimerCheck, 0) - i.resetIdleTimer(time.Duration(atomic.LoadInt64(&i.lastCallEndTime) + i.timeout - time.Now().UnixNano())) + atomic.StoreInt32(&m.activeSinceLastTimerCheck, 0) + m.resetIdleTimer(time.Duration(atomic.LoadInt64(&m.lastCallEndTime) + m.timeout - time.Now().UnixNano())) return } // This CAS operation is extremely likely to succeed given that there has // been no activity since the last time we were here. Setting the - // activeCallsCount to -math.MaxInt32 indicates to onCallBegin() that the + // activeCallsCount to -math.MaxInt32 indicates to OnCallBegin() that the // channel is either in idle mode or is trying to get there. - if !atomic.CompareAndSwapInt32(&i.activeCallsCount, 0, -math.MaxInt32) { + if !atomic.CompareAndSwapInt32(&m.activeCallsCount, 0, -math.MaxInt32) { // This CAS operation can fail if an RPC started after we checked for // activity at the top of this method, or one was ongoing from before // the last time we were here. In both case, reset the timer and return. - i.resetIdleTimer(time.Duration(i.timeout)) + m.resetIdleTimer(time.Duration(m.timeout)) return } // Now that we've set the active calls count to -math.MaxInt32, it's time to // actually move to idle mode. - if i.tryEnterIdleMode() { + if m.tryEnterIdleMode() { // Successfully entered idle mode. No timer needed until we exit idle. return } @@ -160,8 +174,8 @@ func (i *idlenessManagerImpl) handleIdleTimeout() { // Failed to enter idle mode due to a concurrent RPC that kept the channel // active, or because of an error from the channel. Undo the attempt to // enter idle, and reset the timer to try again later. - atomic.AddInt32(&i.activeCallsCount, math.MaxInt32) - i.resetIdleTimer(time.Duration(i.timeout)) + atomic.AddInt32(&m.activeCallsCount, math.MaxInt32) + m.resetIdleTimer(time.Duration(m.timeout)) } // tryEnterIdleMode instructs the channel to enter idle mode. But before @@ -171,15 +185,15 @@ func (i *idlenessManagerImpl) handleIdleTimeout() { // Return value indicates whether or not the channel moved to idle mode. // // Holds idleMu which ensures mutual exclusion with exitIdleMode. -func (i *idlenessManagerImpl) tryEnterIdleMode() bool { - i.idleMu.Lock() - defer i.idleMu.Unlock() +func (m *manager) tryEnterIdleMode() bool { + m.idleMu.Lock() + defer m.idleMu.Unlock() - if atomic.LoadInt32(&i.activeCallsCount) != -math.MaxInt32 { + if atomic.LoadInt32(&m.activeCallsCount) != -math.MaxInt32 { // We raced and lost to a new RPC. Very rare, but stop entering idle. return false } - if atomic.LoadInt32(&i.activeSinceLastTimerCheck) == 1 { + if atomic.LoadInt32(&m.activeSinceLastTimerCheck) == 1 { // An very short RPC could have come in (and also finished) after we // checked for calls count and activity in handleIdleTimeout(), but // before the CAS operation. So, we need to check for activity again. @@ -189,99 +203,99 @@ func (i *idlenessManagerImpl) tryEnterIdleMode() bool { // No new RPCs have come in since we last set the active calls count value // -math.MaxInt32 in the timer callback. And since we have the lock, it is // safe to enter idle mode now. - if err := i.enforcer.enterIdleMode(); err != nil { - logger.Errorf("Failed to enter idle mode: %v", err) + if err := m.enforcer.EnterIdleMode(); err != nil { + m.logger.Errorf("Failed to enter idle mode: %v", err) return false } // Successfully entered idle mode. - i.actuallyIdle = true + m.actuallyIdle = true return true } -// onCallBegin is invoked at the start of every RPC. -func (i *idlenessManagerImpl) onCallBegin() error { - if i.isClosed() { +// OnCallBegin is invoked at the start of every RPC. +func (m *manager) OnCallBegin() error { + if m.isClosed() { return nil } - if atomic.AddInt32(&i.activeCallsCount, 1) > 0 { + if atomic.AddInt32(&m.activeCallsCount, 1) > 0 { // Channel is not idle now. Set the activity bit and allow the call. - atomic.StoreInt32(&i.activeSinceLastTimerCheck, 1) + atomic.StoreInt32(&m.activeSinceLastTimerCheck, 1) return nil } // Channel is either in idle mode or is in the process of moving to idle // mode. Attempt to exit idle mode to allow this RPC. - if err := i.exitIdleMode(); err != nil { + if err := m.exitIdleMode(); err != nil { // Undo the increment to calls count, and return an error causing the // RPC to fail. - atomic.AddInt32(&i.activeCallsCount, -1) + atomic.AddInt32(&m.activeCallsCount, -1) return err } - atomic.StoreInt32(&i.activeSinceLastTimerCheck, 1) + atomic.StoreInt32(&m.activeSinceLastTimerCheck, 1) return nil } // exitIdleMode instructs the channel to exit idle mode. // // Holds idleMu which ensures mutual exclusion with tryEnterIdleMode. -func (i *idlenessManagerImpl) exitIdleMode() error { - i.idleMu.Lock() - defer i.idleMu.Unlock() +func (m *manager) exitIdleMode() error { + m.idleMu.Lock() + defer m.idleMu.Unlock() - if !i.actuallyIdle { + if !m.actuallyIdle { // This can happen in two scenarios: // - handleIdleTimeout() set the calls count to -math.MaxInt32 and called // tryEnterIdleMode(). But before the latter could grab the lock, an RPC - // came in and onCallBegin() noticed that the calls count is negative. + // came in and OnCallBegin() noticed that the calls count is negative. // - Channel is in idle mode, and multiple new RPCs come in at the same - // time, all of them notice a negative calls count in onCallBegin and get + // time, all of them notice a negative calls count in OnCallBegin and get // here. The first one to get the lock would got the channel to exit idle. // // Either way, nothing to do here. return nil } - if err := i.enforcer.exitIdleMode(); err != nil { + if err := m.enforcer.ExitIdleMode(); err != nil { return fmt.Errorf("channel failed to exit idle mode: %v", err) } // Undo the idle entry process. This also respects any new RPC attempts. - atomic.AddInt32(&i.activeCallsCount, math.MaxInt32) - i.actuallyIdle = false + atomic.AddInt32(&m.activeCallsCount, math.MaxInt32) + m.actuallyIdle = false // Start a new timer to fire after the configured idle timeout. - i.timer = timeAfterFunc(time.Duration(i.timeout), i.handleIdleTimeout) + m.timer = timeAfterFunc(time.Duration(m.timeout), m.handleIdleTimeout) return nil } -// onCallEnd is invoked at the end of every RPC. -func (i *idlenessManagerImpl) onCallEnd() { - if i.isClosed() { +// OnCallEnd is invoked at the end of every RPC. +func (m *manager) OnCallEnd() { + if m.isClosed() { return } // Record the time at which the most recent call finished. - atomic.StoreInt64(&i.lastCallEndTime, time.Now().UnixNano()) + atomic.StoreInt64(&m.lastCallEndTime, time.Now().UnixNano()) // Decrement the active calls count. This count can temporarily go negative // when the timer callback is in the process of moving the channel to idle // mode, but one or more RPCs come in and complete before the timer callback // can get done with the process of moving to idle mode. - atomic.AddInt32(&i.activeCallsCount, -1) + atomic.AddInt32(&m.activeCallsCount, -1) } -func (i *idlenessManagerImpl) isClosed() bool { - return atomic.LoadInt32(&i.closed) == 1 +func (m *manager) isClosed() bool { + return atomic.LoadInt32(&m.closed) == 1 } -func (i *idlenessManagerImpl) close() { - atomic.StoreInt32(&i.closed, 1) +func (m *manager) Close() { + atomic.StoreInt32(&m.closed, 1) - i.idleMu.Lock() - i.timer.Stop() - i.timer = nil - i.idleMu.Unlock() + m.idleMu.Lock() + m.timer.Stop() + m.timer = nil + m.idleMu.Unlock() } diff --git a/vendor/google.golang.org/grpc/internal/internal.go b/vendor/google.golang.org/grpc/internal/internal.go index 42ff39c84..c8a8c76d6 100644 --- a/vendor/google.golang.org/grpc/internal/internal.go +++ b/vendor/google.golang.org/grpc/internal/internal.go @@ -30,7 +30,7 @@ import ( var ( // WithHealthCheckFunc is set by dialoptions.go - WithHealthCheckFunc interface{} // func (HealthChecker) DialOption + WithHealthCheckFunc any // func (HealthChecker) DialOption // HealthCheckFunc is used to provide client-side LB channel health checking HealthCheckFunc HealthChecker // BalancerUnregister is exported by package balancer to unregister a balancer. @@ -38,8 +38,12 @@ var ( // KeepaliveMinPingTime is the minimum ping interval. This must be 10s by // default, but tests may wish to set it lower for convenience. KeepaliveMinPingTime = 10 * time.Second + // KeepaliveMinServerPingTime is the minimum ping interval for servers. + // This must be 1s by default, but tests may wish to set it lower for + // convenience. + KeepaliveMinServerPingTime = time.Second // ParseServiceConfig parses a JSON representation of the service config. - ParseServiceConfig interface{} // func(string) *serviceconfig.ParseResult + ParseServiceConfig any // func(string) *serviceconfig.ParseResult // EqualServiceConfigForTesting is for testing service config generation and // parsing. Both a and b should be returned by ParseServiceConfig. // This function compares the config without rawJSON stripped, in case the @@ -49,33 +53,33 @@ var ( // given name. This is set by package certprovider for use from xDS // bootstrap code while parsing certificate provider configs in the // bootstrap file. - GetCertificateProviderBuilder interface{} // func(string) certprovider.Builder + GetCertificateProviderBuilder any // func(string) certprovider.Builder // GetXDSHandshakeInfoForTesting returns a pointer to the xds.HandshakeInfo // stored in the passed in attributes. This is set by // credentials/xds/xds.go. - GetXDSHandshakeInfoForTesting interface{} // func (*attributes.Attributes) *xds.HandshakeInfo + GetXDSHandshakeInfoForTesting any // func (*attributes.Attributes) *xds.HandshakeInfo // GetServerCredentials returns the transport credentials configured on a // gRPC server. An xDS-enabled server needs to know what type of credentials // is configured on the underlying gRPC server. This is set by server.go. - GetServerCredentials interface{} // func (*grpc.Server) credentials.TransportCredentials + GetServerCredentials any // func (*grpc.Server) credentials.TransportCredentials // CanonicalString returns the canonical string of the code defined here: // https://github.com/grpc/grpc/blob/master/doc/statuscodes.md. // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - CanonicalString interface{} // func (codes.Code) string + CanonicalString any // func (codes.Code) string // DrainServerTransports initiates a graceful close of existing connections // on a gRPC server accepted on the provided listener address. An // xDS-enabled server invokes this method on a grpc.Server when a particular // listener moves to "not-serving" mode. - DrainServerTransports interface{} // func(*grpc.Server, string) + DrainServerTransports any // func(*grpc.Server, string) // AddGlobalServerOptions adds an array of ServerOption that will be // effective globally for newly created servers. The priority will be: 1. // user-provided; 2. this method; 3. default values. // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - AddGlobalServerOptions interface{} // func(opt ...ServerOption) + AddGlobalServerOptions any // func(opt ...ServerOption) // ClearGlobalServerOptions clears the array of extra ServerOption. This // method is useful in testing and benchmarking. // @@ -88,14 +92,14 @@ var ( // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - AddGlobalDialOptions interface{} // func(opt ...DialOption) + AddGlobalDialOptions any // func(opt ...DialOption) // DisableGlobalDialOptions returns a DialOption that prevents the // ClientConn from applying the global DialOptions (set via // AddGlobalDialOptions). // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - DisableGlobalDialOptions interface{} // func() grpc.DialOption + DisableGlobalDialOptions any // func() grpc.DialOption // ClearGlobalDialOptions clears the array of extra DialOption. This // method is useful in testing and benchmarking. // @@ -104,23 +108,26 @@ var ( ClearGlobalDialOptions func() // JoinDialOptions combines the dial options passed as arguments into a // single dial option. - JoinDialOptions interface{} // func(...grpc.DialOption) grpc.DialOption + JoinDialOptions any // func(...grpc.DialOption) grpc.DialOption // JoinServerOptions combines the server options passed as arguments into a // single server option. - JoinServerOptions interface{} // func(...grpc.ServerOption) grpc.ServerOption + JoinServerOptions any // func(...grpc.ServerOption) grpc.ServerOption // WithBinaryLogger returns a DialOption that specifies the binary logger // for a ClientConn. // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - WithBinaryLogger interface{} // func(binarylog.Logger) grpc.DialOption + WithBinaryLogger any // func(binarylog.Logger) grpc.DialOption // BinaryLogger returns a ServerOption that can set the binary logger for a // server. // // This is used in the 1.0 release of gcp/observability, and thus must not be // deleted or changed. - BinaryLogger interface{} // func(binarylog.Logger) grpc.ServerOption + BinaryLogger any // func(binarylog.Logger) grpc.ServerOption + + // SubscribeToConnectivityStateChanges adds a grpcsync.Subscriber to a provided grpc.ClientConn + SubscribeToConnectivityStateChanges any // func(*grpc.ClientConn, grpcsync.Subscriber) // NewXDSResolverWithConfigForTesting creates a new xds resolver builder using // the provided xds bootstrap config instead of the global configuration from @@ -131,7 +138,7 @@ var ( // // This function should ONLY be used for testing and may not work with some // other features, including the CSDS service. - NewXDSResolverWithConfigForTesting interface{} // func([]byte) (resolver.Builder, error) + NewXDSResolverWithConfigForTesting any // func([]byte) (resolver.Builder, error) // RegisterRLSClusterSpecifierPluginForTesting registers the RLS Cluster // Specifier Plugin for testing purposes, regardless of the XDSRLS environment @@ -163,7 +170,11 @@ var ( UnregisterRBACHTTPFilterForTesting func() // ORCAAllowAnyMinReportingInterval is for examples/orca use ONLY. - ORCAAllowAnyMinReportingInterval interface{} // func(so *orca.ServiceOptions) + ORCAAllowAnyMinReportingInterval any // func(so *orca.ServiceOptions) + + // GRPCResolverSchemeExtraMetadata determines when gRPC will add extra + // metadata to RPCs. + GRPCResolverSchemeExtraMetadata string = "xds" ) // HealthChecker defines the signature of the client-side LB channel health checking function. @@ -174,7 +185,7 @@ var ( // // The health checking protocol is defined at: // https://github.com/grpc/grpc/blob/master/doc/health-checking.md -type HealthChecker func(ctx context.Context, newStream func(string) (interface{}, error), setConnectivityState func(connectivity.State, error), serviceName string) error +type HealthChecker func(ctx context.Context, newStream func(string) (any, error), setConnectivityState func(connectivity.State, error), serviceName string) error const ( // CredsBundleModeFallback switches GoogleDefaultCreds to fallback mode. diff --git a/vendor/google.golang.org/grpc/internal/metadata/metadata.go b/vendor/google.golang.org/grpc/internal/metadata/metadata.go index c82e608e0..900bfb716 100644 --- a/vendor/google.golang.org/grpc/internal/metadata/metadata.go +++ b/vendor/google.golang.org/grpc/internal/metadata/metadata.go @@ -35,7 +35,7 @@ const mdKey = mdKeyType("grpc.internal.address.metadata") type mdValue metadata.MD -func (m mdValue) Equal(o interface{}) bool { +func (m mdValue) Equal(o any) bool { om, ok := o.(mdValue) if !ok { return false diff --git a/vendor/google.golang.org/grpc/internal/pretty/pretty.go b/vendor/google.golang.org/grpc/internal/pretty/pretty.go index 0177af4b5..703319137 100644 --- a/vendor/google.golang.org/grpc/internal/pretty/pretty.go +++ b/vendor/google.golang.org/grpc/internal/pretty/pretty.go @@ -35,7 +35,7 @@ const jsonIndent = " " // ToJSON marshals the input into a json string. // // If marshal fails, it falls back to fmt.Sprintf("%+v"). -func ToJSON(e interface{}) string { +func ToJSON(e any) string { switch ee := e.(type) { case protov1.Message: mm := jsonpb.Marshaler{Indent: jsonIndent} diff --git a/vendor/google.golang.org/grpc/internal/resolver/config_selector.go b/vendor/google.golang.org/grpc/internal/resolver/config_selector.go index c7a18a948..f0603871c 100644 --- a/vendor/google.golang.org/grpc/internal/resolver/config_selector.go +++ b/vendor/google.golang.org/grpc/internal/resolver/config_selector.go @@ -92,7 +92,7 @@ type ClientStream interface { // calling RecvMsg on the same stream at the same time, but it is not safe // to call SendMsg on the same stream in different goroutines. It is also // not safe to call CloseSend concurrently with SendMsg. - SendMsg(m interface{}) error + SendMsg(m any) error // RecvMsg blocks until it receives a message into m or the stream is // done. It returns io.EOF when the stream completes successfully. On // any other error, the stream is aborted and the error contains the RPC @@ -101,7 +101,7 @@ type ClientStream interface { // It is safe to have a goroutine calling SendMsg and another goroutine // calling RecvMsg on the same stream at the same time, but it is not // safe to call RecvMsg on the same stream in different goroutines. - RecvMsg(m interface{}) error + RecvMsg(m any) error } // ClientInterceptor is an interceptor for gRPC client streams. diff --git a/vendor/google.golang.org/grpc/internal/status/status.go b/vendor/google.golang.org/grpc/internal/status/status.go index b0ead4f54..4cf85cad9 100644 --- a/vendor/google.golang.org/grpc/internal/status/status.go +++ b/vendor/google.golang.org/grpc/internal/status/status.go @@ -49,7 +49,7 @@ func New(c codes.Code, msg string) *Status { } // Newf returns New(c, fmt.Sprintf(format, a...)). -func Newf(c codes.Code, format string, a ...interface{}) *Status { +func Newf(c codes.Code, format string, a ...any) *Status { return New(c, fmt.Sprintf(format, a...)) } @@ -64,7 +64,7 @@ func Err(c codes.Code, msg string) error { } // Errorf returns Error(c, fmt.Sprintf(format, a...)). -func Errorf(c codes.Code, format string, a ...interface{}) error { +func Errorf(c codes.Code, format string, a ...any) error { return Err(c, fmt.Sprintf(format, a...)) } @@ -120,11 +120,11 @@ func (s *Status) WithDetails(details ...proto.Message) (*Status, error) { // Details returns a slice of details messages attached to the status. // If a detail cannot be decoded, the error is returned in place of the detail. -func (s *Status) Details() []interface{} { +func (s *Status) Details() []any { if s == nil || s.s == nil { return nil } - details := make([]interface{}, 0, len(s.s.Details)) + details := make([]any, 0, len(s.s.Details)) for _, any := range s.s.Details { detail := &ptypes.DynamicAny{} if err := ptypes.UnmarshalAny(any, detail); err != nil { diff --git a/vendor/google.golang.org/grpc/internal/transport/controlbuf.go b/vendor/google.golang.org/grpc/internal/transport/controlbuf.go index be5a9c81e..b330ccedc 100644 --- a/vendor/google.golang.org/grpc/internal/transport/controlbuf.go +++ b/vendor/google.golang.org/grpc/internal/transport/controlbuf.go @@ -40,7 +40,7 @@ var updateHeaderTblSize = func(e *hpack.Encoder, v uint32) { } type itemNode struct { - it interface{} + it any next *itemNode } @@ -49,7 +49,7 @@ type itemList struct { tail *itemNode } -func (il *itemList) enqueue(i interface{}) { +func (il *itemList) enqueue(i any) { n := &itemNode{it: i} if il.tail == nil { il.head, il.tail = n, n @@ -61,11 +61,11 @@ func (il *itemList) enqueue(i interface{}) { // peek returns the first item in the list without removing it from the // list. -func (il *itemList) peek() interface{} { +func (il *itemList) peek() any { return il.head.it } -func (il *itemList) dequeue() interface{} { +func (il *itemList) dequeue() any { if il.head == nil { return nil } @@ -336,7 +336,7 @@ func (c *controlBuffer) put(it cbItem) error { return err } -func (c *controlBuffer) executeAndPut(f func(it interface{}) bool, it cbItem) (bool, error) { +func (c *controlBuffer) executeAndPut(f func(it any) bool, it cbItem) (bool, error) { var wakeUp bool c.mu.Lock() if c.err != nil { @@ -373,7 +373,7 @@ func (c *controlBuffer) executeAndPut(f func(it interface{}) bool, it cbItem) (b } // Note argument f should never be nil. -func (c *controlBuffer) execute(f func(it interface{}) bool, it interface{}) (bool, error) { +func (c *controlBuffer) execute(f func(it any) bool, it any) (bool, error) { c.mu.Lock() if c.err != nil { c.mu.Unlock() @@ -387,7 +387,7 @@ func (c *controlBuffer) execute(f func(it interface{}) bool, it interface{}) (bo return true, nil } -func (c *controlBuffer) get(block bool) (interface{}, error) { +func (c *controlBuffer) get(block bool) (any, error) { for { c.mu.Lock() if c.err != nil { @@ -830,7 +830,7 @@ func (l *loopyWriter) goAwayHandler(g *goAway) error { return nil } -func (l *loopyWriter) handle(i interface{}) error { +func (l *loopyWriter) handle(i any) error { switch i := i.(type) { case *incomingWindowUpdate: l.incomingWindowUpdateHandler(i) diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_client.go b/vendor/google.golang.org/grpc/internal/transport/http2_client.go index 326bf0848..badab8acf 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http2_client.go +++ b/vendor/google.golang.org/grpc/internal/transport/http2_client.go @@ -330,7 +330,7 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts readerDone: make(chan struct{}), writerDone: make(chan struct{}), goAway: make(chan struct{}), - framer: newFramer(conn, writeBufSize, readBufSize, maxHeaderListSize), + framer: newFramer(conn, writeBufSize, readBufSize, opts.SharedWriteBuffer, maxHeaderListSize), fc: &trInFlow{limit: uint32(icwz)}, scheme: scheme, activeStreams: make(map[uint32]*Stream), @@ -762,7 +762,7 @@ func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, firstTry := true var ch chan struct{} transportDrainRequired := false - checkForStreamQuota := func(it interface{}) bool { + checkForStreamQuota := func(it any) bool { if t.streamQuota <= 0 { // Can go negative if server decreases it. if firstTry { t.waitingStreams++ @@ -800,7 +800,7 @@ func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, return true } var hdrListSizeErr error - checkForHeaderListSize := func(it interface{}) bool { + checkForHeaderListSize := func(it any) bool { if t.maxSendHeaderListSize == nil { return true } @@ -815,7 +815,7 @@ func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, return true } for { - success, err := t.controlBuf.executeAndPut(func(it interface{}) bool { + success, err := t.controlBuf.executeAndPut(func(it any) bool { return checkForHeaderListSize(it) && checkForStreamQuota(it) }, hdr) if err != nil { @@ -927,7 +927,7 @@ func (t *http2Client) closeStream(s *Stream, err error, rst bool, rstCode http2. rst: rst, rstCode: rstCode, } - addBackStreamQuota := func(interface{}) bool { + addBackStreamQuota := func(any) bool { t.streamQuota++ if t.streamQuota > 0 && t.waitingStreams > 0 { select { @@ -1080,7 +1080,7 @@ func (t *http2Client) updateWindow(s *Stream, n uint32) { // for the transport and the stream based on the current bdp // estimation. func (t *http2Client) updateFlowControl(n uint32) { - updateIWS := func(interface{}) bool { + updateIWS := func(any) bool { t.initialWindowSize = int32(n) t.mu.Lock() for _, s := range t.activeStreams { @@ -1233,7 +1233,7 @@ func (t *http2Client) handleSettings(f *http2.SettingsFrame, isFirst bool) { } updateFuncs = append(updateFuncs, updateStreamQuota) } - t.controlBuf.executeAndPut(func(interface{}) bool { + t.controlBuf.executeAndPut(func(any) bool { for _, f := range updateFuncs { f() } @@ -1505,14 +1505,15 @@ func (t *http2Client) operateHeaders(frame *http2.MetaHeadersFrame) { return } - isHeader := false - - // If headerChan hasn't been closed yet - if atomic.CompareAndSwapUint32(&s.headerChanClosed, 0, 1) { - s.headerValid = true - if !endStream { - // HEADERS frame block carries a Response-Headers. - isHeader = true + // For headers, set them in s.header and close headerChan. For trailers or + // trailers-only, closeStream will set the trailers and close headerChan as + // needed. + if !endStream { + // If headerChan hasn't been closed yet (expected, given we checked it + // above, but something else could have potentially closed the whole + // stream). + if atomic.CompareAndSwapUint32(&s.headerChanClosed, 0, 1) { + s.headerValid = true // These values can be set without any synchronization because // stream goroutine will read it only after seeing a closed // headerChan which we'll close after setting this. @@ -1520,15 +1521,12 @@ func (t *http2Client) operateHeaders(frame *http2.MetaHeadersFrame) { if len(mdata) > 0 { s.header = mdata } - } else { - // HEADERS frame block carries a Trailers-Only. - s.noHeaders = true + close(s.headerChan) } - close(s.headerChan) } for _, sh := range t.statsHandlers { - if isHeader { + if !endStream { inHeader := &stats.InHeader{ Client: true, WireLength: int(frame.Header().Length), @@ -1554,9 +1552,10 @@ func (t *http2Client) operateHeaders(frame *http2.MetaHeadersFrame) { statusGen = status.New(rawStatusCode, grpcMessage) } - // if client received END_STREAM from server while stream was still active, send RST_STREAM - rst := s.getState() == streamActive - t.closeStream(s, io.EOF, rst, http2.ErrCodeNo, statusGen, mdata, true) + // If client received END_STREAM from server while stream was still active, + // send RST_STREAM. + rstStream := s.getState() == streamActive + t.closeStream(s, io.EOF, rstStream, http2.ErrCodeNo, statusGen, mdata, true) } // readServerPreface reads and handles the initial settings frame from the diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_server.go b/vendor/google.golang.org/grpc/internal/transport/http2_server.go index f96064012..c06db679d 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http2_server.go +++ b/vendor/google.golang.org/grpc/internal/transport/http2_server.go @@ -165,21 +165,16 @@ func NewServerTransport(conn net.Conn, config *ServerConfig) (_ ServerTransport, if config.MaxHeaderListSize != nil { maxHeaderListSize = *config.MaxHeaderListSize } - framer := newFramer(conn, writeBufSize, readBufSize, maxHeaderListSize) + framer := newFramer(conn, writeBufSize, readBufSize, config.SharedWriteBuffer, maxHeaderListSize) // Send initial settings as connection preface to client. isettings := []http2.Setting{{ ID: http2.SettingMaxFrameSize, Val: http2MaxFrameLen, }} - // TODO(zhaoq): Have a better way to signal "no limit" because 0 is - // permitted in the HTTP2 spec. - maxStreams := config.MaxStreams - if maxStreams == 0 { - maxStreams = math.MaxUint32 - } else { + if config.MaxStreams != math.MaxUint32 { isettings = append(isettings, http2.Setting{ ID: http2.SettingMaxConcurrentStreams, - Val: maxStreams, + Val: config.MaxStreams, }) } dynamicWindow := true @@ -258,7 +253,7 @@ func NewServerTransport(conn net.Conn, config *ServerConfig) (_ ServerTransport, framer: framer, readerDone: make(chan struct{}), writerDone: make(chan struct{}), - maxStreams: maxStreams, + maxStreams: config.MaxStreams, inTapHandle: config.InTapHandle, fc: &trInFlow{limit: uint32(icwz)}, state: reachable, @@ -855,7 +850,7 @@ func (t *http2Server) handleSettings(f *http2.SettingsFrame) { } return nil }) - t.controlBuf.executeAndPut(func(interface{}) bool { + t.controlBuf.executeAndPut(func(any) bool { for _, f := range updateFuncs { f() } @@ -939,7 +934,7 @@ func appendHeaderFieldsFromMD(headerFields []hpack.HeaderField, md metadata.MD) return headerFields } -func (t *http2Server) checkForHeaderListSize(it interface{}) bool { +func (t *http2Server) checkForHeaderListSize(it any) bool { if t.maxSendHeaderListSize == nil { return true } diff --git a/vendor/google.golang.org/grpc/internal/transport/http_util.go b/vendor/google.golang.org/grpc/internal/transport/http_util.go index 19cbb18f5..195814008 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http_util.go +++ b/vendor/google.golang.org/grpc/internal/transport/http_util.go @@ -30,6 +30,7 @@ import ( "net/url" "strconv" "strings" + "sync" "time" "unicode/utf8" @@ -309,6 +310,7 @@ func decodeGrpcMessageUnchecked(msg string) string { } type bufWriter struct { + pool *sync.Pool buf []byte offset int batchSize int @@ -316,12 +318,17 @@ type bufWriter struct { err error } -func newBufWriter(conn net.Conn, batchSize int) *bufWriter { - return &bufWriter{ - buf: make([]byte, batchSize*2), +func newBufWriter(conn net.Conn, batchSize int, pool *sync.Pool) *bufWriter { + w := &bufWriter{ batchSize: batchSize, conn: conn, + pool: pool, } + // this indicates that we should use non shared buf + if pool == nil { + w.buf = make([]byte, batchSize) + } + return w } func (w *bufWriter) Write(b []byte) (n int, err error) { @@ -332,19 +339,34 @@ func (w *bufWriter) Write(b []byte) (n int, err error) { n, err = w.conn.Write(b) return n, toIOError(err) } + if w.buf == nil { + b := w.pool.Get().(*[]byte) + w.buf = *b + } for len(b) > 0 { nn := copy(w.buf[w.offset:], b) b = b[nn:] w.offset += nn n += nn if w.offset >= w.batchSize { - err = w.Flush() + err = w.flushKeepBuffer() } } return n, err } func (w *bufWriter) Flush() error { + err := w.flushKeepBuffer() + // Only release the buffer if we are in a "shared" mode + if w.buf != nil && w.pool != nil { + b := w.buf + w.pool.Put(&b) + w.buf = nil + } + return err +} + +func (w *bufWriter) flushKeepBuffer() error { if w.err != nil { return w.err } @@ -381,7 +403,10 @@ type framer struct { fr *http2.Framer } -func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, maxHeaderListSize uint32) *framer { +var writeBufferPoolMap map[int]*sync.Pool = make(map[int]*sync.Pool) +var writeBufferMutex sync.Mutex + +func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, sharedWriteBuffer bool, maxHeaderListSize uint32) *framer { if writeBufferSize < 0 { writeBufferSize = 0 } @@ -389,7 +414,11 @@ func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, maxHeaderList if readBufferSize > 0 { r = bufio.NewReaderSize(r, readBufferSize) } - w := newBufWriter(conn, writeBufferSize) + var pool *sync.Pool + if sharedWriteBuffer { + pool = getWriteBufferPool(writeBufferSize) + } + w := newBufWriter(conn, writeBufferSize, pool) f := &framer{ writer: w, fr: http2.NewFramer(w, r), @@ -403,6 +432,24 @@ func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, maxHeaderList return f } +func getWriteBufferPool(writeBufferSize int) *sync.Pool { + writeBufferMutex.Lock() + defer writeBufferMutex.Unlock() + size := writeBufferSize * 2 + pool, ok := writeBufferPoolMap[size] + if ok { + return pool + } + pool = &sync.Pool{ + New: func() any { + b := make([]byte, size) + return &b + }, + } + writeBufferPoolMap[size] = pool + return pool +} + // parseDialTarget returns the network and address to pass to dialer. func parseDialTarget(target string) (string, string) { net := "tcp" diff --git a/vendor/google.golang.org/grpc/internal/transport/transport.go b/vendor/google.golang.org/grpc/internal/transport/transport.go index aa1c89659..74a811fc0 100644 --- a/vendor/google.golang.org/grpc/internal/transport/transport.go +++ b/vendor/google.golang.org/grpc/internal/transport/transport.go @@ -43,10 +43,6 @@ import ( "google.golang.org/grpc/tap" ) -// ErrNoHeaders is used as a signal that a trailers only response was received, -// and is not a real error. -var ErrNoHeaders = errors.New("stream has no headers") - const logLevel = 2 type bufferPool struct { @@ -56,7 +52,7 @@ type bufferPool struct { func newBufferPool() *bufferPool { return &bufferPool{ pool: sync.Pool{ - New: func() interface{} { + New: func() any { return new(bytes.Buffer) }, }, @@ -390,14 +386,10 @@ func (s *Stream) Header() (metadata.MD, error) { } s.waitOnHeader() - if !s.headerValid { + if !s.headerValid || s.noHeaders { return nil, s.status.Err() } - if s.noHeaders { - return nil, ErrNoHeaders - } - return s.header.Copy(), nil } @@ -559,6 +551,7 @@ type ServerConfig struct { InitialConnWindowSize int32 WriteBufferSize int ReadBufferSize int + SharedWriteBuffer bool ChannelzParentID *channelz.Identifier MaxHeaderListSize *uint32 HeaderTableSize *uint32 @@ -592,6 +585,8 @@ type ConnectOptions struct { WriteBufferSize int // ReadBufferSize sets the size of read buffer, which in turn determines how much data can be read at most for one read syscall. ReadBufferSize int + // SharedWriteBuffer indicates whether connections should reuse write buffer + SharedWriteBuffer bool // ChannelzParentID sets the addrConn id which initiate the creation of this client transport. ChannelzParentID *channelz.Identifier // MaxHeaderListSize sets the max (uncompressed) size of header list that is prepared to be received. @@ -736,7 +731,7 @@ type ServerTransport interface { } // connectionErrorf creates an ConnectionError with the specified error description. -func connectionErrorf(temp bool, e error, format string, a ...interface{}) ConnectionError { +func connectionErrorf(temp bool, e error, format string, a ...any) ConnectionError { return ConnectionError{ Desc: fmt.Sprintf(format, a...), temp: temp, diff --git a/vendor/google.golang.org/grpc/picker_wrapper.go b/vendor/google.golang.org/grpc/picker_wrapper.go index 02f975951..236837f41 100644 --- a/vendor/google.golang.org/grpc/picker_wrapper.go +++ b/vendor/google.golang.org/grpc/picker_wrapper.go @@ -28,21 +28,26 @@ import ( "google.golang.org/grpc/internal/channelz" istatus "google.golang.org/grpc/internal/status" "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/stats" "google.golang.org/grpc/status" ) // pickerWrapper is a wrapper of balancer.Picker. It blocks on certain pick // actions and unblock when there's a picker update. type pickerWrapper struct { - mu sync.Mutex - done bool - idle bool - blockingCh chan struct{} - picker balancer.Picker + mu sync.Mutex + done bool + idle bool + blockingCh chan struct{} + picker balancer.Picker + statsHandlers []stats.Handler // to record blocking picker calls } -func newPickerWrapper() *pickerWrapper { - return &pickerWrapper{blockingCh: make(chan struct{})} +func newPickerWrapper(statsHandlers []stats.Handler) *pickerWrapper { + return &pickerWrapper{ + blockingCh: make(chan struct{}), + statsHandlers: statsHandlers, + } } // updatePicker is called by UpdateBalancerState. It unblocks all blocked pick. @@ -95,6 +100,7 @@ func (pw *pickerWrapper) pick(ctx context.Context, failfast bool, info balancer. var ch chan struct{} var lastPickErr error + for { pw.mu.Lock() if pw.done { @@ -129,6 +135,20 @@ func (pw *pickerWrapper) pick(ctx context.Context, failfast bool, info balancer. continue } + // If the channel is set, it means that the pick call had to wait for a + // new picker at some point. Either it's the first iteration and this + // function received the first picker, or a picker errored with + // ErrNoSubConnAvailable or errored with failfast set to false, which + // will trigger a continue to the next iteration. In the first case this + // conditional will hit if this call had to block (the channel is set). + // In the second case, the only way it will get to this conditional is + // if there is a new picker. + if ch != nil { + for _, sh := range pw.statsHandlers { + sh.HandleRPC(ctx, &stats.PickerUpdated{}) + } + } + ch = pw.blockingCh p := pw.picker pw.mu.Unlock() diff --git a/vendor/google.golang.org/grpc/pickfirst.go b/vendor/google.golang.org/grpc/pickfirst.go index abe266b02..2e9cf66b4 100644 --- a/vendor/google.golang.org/grpc/pickfirst.go +++ b/vendor/google.golang.org/grpc/pickfirst.go @@ -26,12 +26,18 @@ import ( "google.golang.org/grpc/balancer" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/internal/envconfig" + internalgrpclog "google.golang.org/grpc/internal/grpclog" "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/internal/pretty" + "google.golang.org/grpc/resolver" "google.golang.org/grpc/serviceconfig" ) -// PickFirstBalancerName is the name of the pick_first balancer. -const PickFirstBalancerName = "pick_first" +const ( + // PickFirstBalancerName is the name of the pick_first balancer. + PickFirstBalancerName = "pick_first" + logPrefix = "[pick-first-lb %p] " +) func newPickfirstBuilder() balancer.Builder { return &pickfirstBuilder{} @@ -40,7 +46,9 @@ func newPickfirstBuilder() balancer.Builder { type pickfirstBuilder struct{} func (*pickfirstBuilder) Build(cc balancer.ClientConn, opt balancer.BuildOptions) balancer.Balancer { - return &pickfirstBalancer{cc: cc} + b := &pickfirstBalancer{cc: cc} + b.logger = internalgrpclog.NewPrefixLogger(logger, fmt.Sprintf(logPrefix, b)) + return b } func (*pickfirstBuilder) Name() string { @@ -57,23 +65,36 @@ type pfConfig struct { } func (*pickfirstBuilder) ParseConfig(js json.RawMessage) (serviceconfig.LoadBalancingConfig, error) { - cfg := &pfConfig{} - if err := json.Unmarshal(js, cfg); err != nil { + if !envconfig.PickFirstLBConfig { + // Prior to supporting loadbalancing configuration, the pick_first LB + // policy did not implement the balancer.ConfigParser interface. This + // meant that if a non-empty configuration was passed to it, the service + // config unmarshaling code would throw a warning log, but would + // continue using the pick_first LB policy. The code below ensures the + // same behavior is retained if the env var is not set. + if string(js) != "{}" { + logger.Warningf("Ignoring non-empty balancer configuration %q for the pick_first LB policy", string(js)) + } + return nil, nil + } + + var cfg pfConfig + if err := json.Unmarshal(js, &cfg); err != nil { return nil, fmt.Errorf("pickfirst: unable to unmarshal LB policy config: %s, error: %v", string(js), err) } return cfg, nil } type pickfirstBalancer struct { + logger *internalgrpclog.PrefixLogger state connectivity.State cc balancer.ClientConn subConn balancer.SubConn - cfg *pfConfig } func (b *pickfirstBalancer) ResolverError(err error) { - if logger.V(2) { - logger.Infof("pickfirstBalancer: ResolverError called with error: %v", err) + if b.logger.V(2) { + b.logger.Infof("Received error from the name resolver: %v", err) } if b.subConn == nil { b.state = connectivity.TransientFailure @@ -96,35 +117,44 @@ func (b *pickfirstBalancer) UpdateClientConnState(state balancer.ClientConnState // The resolver reported an empty address list. Treat it like an error by // calling b.ResolverError. if b.subConn != nil { - // Remove the old subConn. All addresses were removed, so it is no longer - // valid. - b.cc.RemoveSubConn(b.subConn) + // Shut down the old subConn. All addresses were removed, so it is + // no longer valid. + b.subConn.Shutdown() b.subConn = nil } b.ResolverError(errors.New("produced zero addresses")) return balancer.ErrBadResolverState } - if state.BalancerConfig != nil { - cfg, ok := state.BalancerConfig.(*pfConfig) - if !ok { - return fmt.Errorf("pickfirstBalancer: received nil or illegal BalancerConfig (type %T): %v", state.BalancerConfig, state.BalancerConfig) - } - b.cfg = cfg + // We don't have to guard this block with the env var because ParseConfig + // already does so. + cfg, ok := state.BalancerConfig.(pfConfig) + if state.BalancerConfig != nil && !ok { + return fmt.Errorf("pickfirst: received illegal BalancerConfig (type %T): %v", state.BalancerConfig, state.BalancerConfig) } - - if envconfig.PickFirstLBConfig && b.cfg != nil && b.cfg.ShuffleAddressList { + if cfg.ShuffleAddressList { + addrs = append([]resolver.Address{}, addrs...) grpcrand.Shuffle(len(addrs), func(i, j int) { addrs[i], addrs[j] = addrs[j], addrs[i] }) } + + if b.logger.V(2) { + b.logger.Infof("Received new config %s, resolver state %s", pretty.ToJSON(cfg), pretty.ToJSON(state.ResolverState)) + } + if b.subConn != nil { b.cc.UpdateAddresses(b.subConn, addrs) return nil } - subConn, err := b.cc.NewSubConn(addrs, balancer.NewSubConnOptions{}) + var subConn balancer.SubConn + subConn, err := b.cc.NewSubConn(addrs, balancer.NewSubConnOptions{ + StateListener: func(state balancer.SubConnState) { + b.updateSubConnState(subConn, state) + }, + }) if err != nil { - if logger.V(2) { - logger.Errorf("pickfirstBalancer: failed to NewSubConn: %v", err) + if b.logger.V(2) { + b.logger.Infof("Failed to create new SubConn: %v", err) } b.state = connectivity.TransientFailure b.cc.UpdateState(balancer.State{ @@ -143,13 +173,19 @@ func (b *pickfirstBalancer) UpdateClientConnState(state balancer.ClientConnState return nil } +// UpdateSubConnState is unused as a StateListener is always registered when +// creating SubConns. func (b *pickfirstBalancer) UpdateSubConnState(subConn balancer.SubConn, state balancer.SubConnState) { - if logger.V(2) { - logger.Infof("pickfirstBalancer: UpdateSubConnState: %p, %v", subConn, state) + b.logger.Errorf("UpdateSubConnState(%v, %+v) called unexpectedly", subConn, state) +} + +func (b *pickfirstBalancer) updateSubConnState(subConn balancer.SubConn, state balancer.SubConnState) { + if b.logger.V(2) { + b.logger.Infof("Received SubConn state update: %p, %+v", subConn, state) } if b.subConn != subConn { - if logger.V(2) { - logger.Infof("pickfirstBalancer: ignored state change because subConn is not recognized") + if b.logger.V(2) { + b.logger.Infof("Ignored state change because subConn is not recognized") } return } diff --git a/vendor/google.golang.org/grpc/preloader.go b/vendor/google.golang.org/grpc/preloader.go index cd4554785..73bd63364 100644 --- a/vendor/google.golang.org/grpc/preloader.go +++ b/vendor/google.golang.org/grpc/preloader.go @@ -37,7 +37,7 @@ type PreparedMsg struct { } // Encode marshalls and compresses the message using the codec and compressor for the stream. -func (p *PreparedMsg) Encode(s Stream, msg interface{}) error { +func (p *PreparedMsg) Encode(s Stream, msg any) error { ctx := s.Context() rpcInfo, ok := rpcInfoFromContext(ctx) if !ok { diff --git a/vendor/google.golang.org/grpc/resolver/map.go b/vendor/google.golang.org/grpc/resolver/map.go index efcb7f3ef..804be887d 100644 --- a/vendor/google.golang.org/grpc/resolver/map.go +++ b/vendor/google.golang.org/grpc/resolver/map.go @@ -20,7 +20,7 @@ package resolver type addressMapEntry struct { addr Address - value interface{} + value any } // AddressMap is a map of addresses to arbitrary values taking into account @@ -69,7 +69,7 @@ func (l addressMapEntryList) find(addr Address) int { } // Get returns the value for the address in the map, if present. -func (a *AddressMap) Get(addr Address) (value interface{}, ok bool) { +func (a *AddressMap) Get(addr Address) (value any, ok bool) { addrKey := toMapKey(&addr) entryList := a.m[addrKey] if entry := entryList.find(addr); entry != -1 { @@ -79,7 +79,7 @@ func (a *AddressMap) Get(addr Address) (value interface{}, ok bool) { } // Set updates or adds the value to the address in the map. -func (a *AddressMap) Set(addr Address, value interface{}) { +func (a *AddressMap) Set(addr Address, value any) { addrKey := toMapKey(&addr) entryList := a.m[addrKey] if entry := entryList.find(addr); entry != -1 { @@ -127,8 +127,8 @@ func (a *AddressMap) Keys() []Address { } // Values returns a slice of all current map values. -func (a *AddressMap) Values() []interface{} { - ret := make([]interface{}, 0, a.Len()) +func (a *AddressMap) Values() []any { + ret := make([]any, 0, a.Len()) for _, entryList := range a.m { for _, entry := range entryList { ret = append(ret, entry.value) diff --git a/vendor/google.golang.org/grpc/resolver/resolver.go b/vendor/google.golang.org/grpc/resolver/resolver.go index d8db6f5d3..11384e228 100644 --- a/vendor/google.golang.org/grpc/resolver/resolver.go +++ b/vendor/google.golang.org/grpc/resolver/resolver.go @@ -77,25 +77,6 @@ func GetDefaultScheme() string { return defaultScheme } -// AddressType indicates the address type returned by name resolution. -// -// Deprecated: use Attributes in Address instead. -type AddressType uint8 - -const ( - // Backend indicates the address is for a backend server. - // - // Deprecated: use Attributes in Address instead. - Backend AddressType = iota - // GRPCLB indicates the address is for a grpclb load balancer. - // - // Deprecated: to select the GRPCLB load balancing policy, use a service - // config with a corresponding loadBalancingConfig. To supply balancer - // addresses to the GRPCLB load balancing policy, set State.Attributes - // using balancer/grpclb/state.Set. - GRPCLB -) - // Address represents a server the client connects to. // // # Experimental @@ -111,9 +92,6 @@ type Address struct { // the address, instead of the hostname from the Dial target string. In most cases, // this should not be set. // - // If Type is GRPCLB, ServerName should be the name of the remote load - // balancer, not the name of the backend. - // // WARNING: ServerName must only be populated with trusted values. It // is insecure to populate it with data from untrusted inputs since untrusted // values could be used to bypass the authority checks performed by TLS. @@ -126,18 +104,16 @@ type Address struct { // BalancerAttributes contains arbitrary data about this address intended // for consumption by the LB policy. These attributes do not affect SubConn // creation, connection establishment, handshaking, etc. - BalancerAttributes *attributes.Attributes - - // Type is the type of this address. // - // Deprecated: use Attributes instead. - Type AddressType + // Deprecated: when an Address is inside an Endpoint, this field should not + // be used, and it will eventually be removed entirely. + BalancerAttributes *attributes.Attributes // Metadata is the information associated with Addr, which may be used // to make load balancing decision. // // Deprecated: use Attributes instead. - Metadata interface{} + Metadata any } // Equal returns whether a and o are identical. Metadata is compared directly, @@ -150,7 +126,7 @@ func (a Address) Equal(o Address) bool { return a.Addr == o.Addr && a.ServerName == o.ServerName && a.Attributes.Equal(o.Attributes) && a.BalancerAttributes.Equal(o.BalancerAttributes) && - a.Type == o.Type && a.Metadata == o.Metadata + a.Metadata == o.Metadata } // String returns JSON formatted string representation of the address. @@ -194,11 +170,37 @@ type BuildOptions struct { Dialer func(context.Context, string) (net.Conn, error) } +// An Endpoint is one network endpoint, or server, which may have multiple +// addresses with which it can be accessed. +type Endpoint struct { + // Addresses contains a list of addresses used to access this endpoint. + Addresses []Address + + // Attributes contains arbitrary data about this endpoint intended for + // consumption by the LB policy. + Attributes *attributes.Attributes +} + // State contains the current Resolver state relevant to the ClientConn. type State struct { // Addresses is the latest set of resolved addresses for the target. + // + // If a resolver sets Addresses but does not set Endpoints, one Endpoint + // will be created for each Address before the State is passed to the LB + // policy. The BalancerAttributes of each entry in Addresses will be set + // in Endpoints.Attributes, and be cleared in the Endpoint's Address's + // BalancerAttributes. + // + // Soon, Addresses will be deprecated and replaced fully by Endpoints. Addresses []Address + // Endpoints is the latest set of resolved endpoints for the target. + // + // If a resolver produces a State containing Endpoints but not Addresses, + // it must take care to ensure the LB policies it selects will support + // Endpoints. + Endpoints []Endpoint + // ServiceConfig contains the result from parsing the latest service // config. If it is nil, it indicates no service config is present or the // resolver does not provide service configs. @@ -258,15 +260,6 @@ type ClientConn interface { // target does not contain a scheme or if the parsed scheme is not registered // (i.e. no corresponding resolver available to resolve the endpoint), we will // apply the default scheme, and will attempt to reparse it. -// -// Examples: -// -// - "dns://some_authority/foo.bar" -// Target{Scheme: "dns", Authority: "some_authority", Endpoint: "foo.bar"} -// - "foo.bar" -// Target{Scheme: resolver.GetDefaultScheme(), Endpoint: "foo.bar"} -// - "unknown_scheme://authority/endpoint" -// Target{Scheme: resolver.GetDefaultScheme(), Endpoint: "unknown_scheme://authority/endpoint"} type Target struct { // URL contains the parsed dial target with an optional default scheme added // to it if the original dial target contained no scheme or contained an @@ -321,10 +314,3 @@ type Resolver interface { // Close closes the resolver. Close() } - -// UnregisterForTesting removes the resolver builder with the given scheme from the -// resolver map. -// This function is for testing only. -func UnregisterForTesting(scheme string) { - delete(m, scheme) -} diff --git a/vendor/google.golang.org/grpc/resolver_conn_wrapper.go b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go index b408b3688..d68330560 100644 --- a/vendor/google.golang.org/grpc/resolver_conn_wrapper.go +++ b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go @@ -133,7 +133,7 @@ func (ccr *ccResolverWrapper) close() { ccr.mu.Unlock() // Give enqueued callbacks a chance to finish. - <-ccr.serializer.Done + <-ccr.serializer.Done() // Spawn a goroutine to close the resolver (since it may block trying to // cleanup all allocated resources) and return early. @@ -152,6 +152,14 @@ func (ccr *ccResolverWrapper) serializerScheduleLocked(f func(context.Context)) // which includes addresses and service config. func (ccr *ccResolverWrapper) UpdateState(s resolver.State) error { errCh := make(chan error, 1) + if s.Endpoints == nil { + s.Endpoints = make([]resolver.Endpoint, 0, len(s.Addresses)) + for _, a := range s.Addresses { + ep := resolver.Endpoint{Addresses: []resolver.Address{a}, Attributes: a.BalancerAttributes} + ep.Addresses[0].BalancerAttributes = nil + s.Endpoints = append(s.Endpoints, ep) + } + } ok := ccr.serializer.Schedule(func(context.Context) { ccr.addChannelzTraceEvent(s) ccr.curState = s diff --git a/vendor/google.golang.org/grpc/rpc_util.go b/vendor/google.golang.org/grpc/rpc_util.go index a844d28f4..b7723aa09 100644 --- a/vendor/google.golang.org/grpc/rpc_util.go +++ b/vendor/google.golang.org/grpc/rpc_util.go @@ -75,7 +75,7 @@ func NewGZIPCompressorWithLevel(level int) (Compressor, error) { } return &gzipCompressor{ pool: sync.Pool{ - New: func() interface{} { + New: func() any { w, err := gzip.NewWriterLevel(io.Discard, level) if err != nil { panic(err) @@ -626,7 +626,7 @@ func (p *parser) recvMsg(maxReceiveMessageSize int) (pf payloadFormat, msg []byt // encode serializes msg and returns a buffer containing the message, or an // error if it is too large to be transmitted by grpc. If msg is nil, it // generates an empty message. -func encode(c baseCodec, msg interface{}) ([]byte, error) { +func encode(c baseCodec, msg any) ([]byte, error) { if msg == nil { // NOTE: typed nils will not be caught by this check return nil, nil } @@ -693,7 +693,7 @@ func msgHeader(data, compData []byte) (hdr []byte, payload []byte) { return hdr, data } -func outPayload(client bool, msg interface{}, data, payload []byte, t time.Time) *stats.OutPayload { +func outPayload(client bool, msg any, data, payload []byte, t time.Time) *stats.OutPayload { return &stats.OutPayload{ Client: client, Payload: msg, @@ -792,7 +792,7 @@ func decompress(compressor encoding.Compressor, d []byte, maxReceiveMessageSize // For the two compressor parameters, both should not be set, but if they are, // dc takes precedence over compressor. // TODO(dfawley): wrap the old compressor/decompressor using the new API? -func recv(p *parser, c baseCodec, s *transport.Stream, dc Decompressor, m interface{}, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) error { +func recv(p *parser, c baseCodec, s *transport.Stream, dc Decompressor, m any, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) error { buf, err := recvAndDecompress(p, s, dc, maxReceiveMessageSize, payInfo, compressor) if err != nil { return err @@ -863,19 +863,22 @@ func ErrorDesc(err error) string { // Errorf returns nil if c is OK. // // Deprecated: use status.Errorf instead. -func Errorf(c codes.Code, format string, a ...interface{}) error { +func Errorf(c codes.Code, format string, a ...any) error { return status.Errorf(c, format, a...) } +var errContextCanceled = status.Error(codes.Canceled, context.Canceled.Error()) +var errContextDeadline = status.Error(codes.DeadlineExceeded, context.DeadlineExceeded.Error()) + // toRPCErr converts an error into an error from the status package. func toRPCErr(err error) error { switch err { case nil, io.EOF: return err case context.DeadlineExceeded: - return status.Error(codes.DeadlineExceeded, err.Error()) + return errContextDeadline case context.Canceled: - return status.Error(codes.Canceled, err.Error()) + return errContextCanceled case io.ErrUnexpectedEOF: return status.Error(codes.Internal, err.Error()) } diff --git a/vendor/google.golang.org/grpc/server.go b/vendor/google.golang.org/grpc/server.go index e076ec714..eeae92fbe 100644 --- a/vendor/google.golang.org/grpc/server.go +++ b/vendor/google.golang.org/grpc/server.go @@ -86,7 +86,7 @@ func init() { var statusOK = status.New(codes.OK, "") var logger = grpclog.Component("core") -type methodHandler func(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor UnaryServerInterceptor) (interface{}, error) +type methodHandler func(srv any, ctx context.Context, dec func(any) error, interceptor UnaryServerInterceptor) (any, error) // MethodDesc represents an RPC service's method specification. type MethodDesc struct { @@ -99,26 +99,20 @@ type ServiceDesc struct { ServiceName string // The pointer to the service interface. Used to check whether the user // provided implementation satisfies the interface requirements. - HandlerType interface{} + HandlerType any Methods []MethodDesc Streams []StreamDesc - Metadata interface{} + Metadata any } // serviceInfo wraps information about a service. It is very similar to // ServiceDesc and is constructed from it for internal purposes. type serviceInfo struct { // Contains the implementation for the methods in this service. - serviceImpl interface{} + serviceImpl any methods map[string]*MethodDesc streams map[string]*StreamDesc - mdata interface{} -} - -type serverWorkerData struct { - st transport.ServerTransport - wg *sync.WaitGroup - stream *transport.Stream + mdata any } // Server is a gRPC server to serve RPC requests. @@ -145,7 +139,7 @@ type Server struct { channelzID *channelz.Identifier czData *channelzData - serverWorkerChannel chan *serverWorkerData + serverWorkerChannel chan func() } type serverOptions struct { @@ -170,6 +164,7 @@ type serverOptions struct { initialConnWindowSize int32 writeBufferSize int readBufferSize int + sharedWriteBuffer bool connectionTimeout time.Duration maxHeaderListSize *uint32 headerTableSize *uint32 @@ -178,6 +173,7 @@ type serverOptions struct { } var defaultServerOptions = serverOptions{ + maxConcurrentStreams: math.MaxUint32, maxReceiveMessageSize: defaultServerMaxReceiveMessageSize, maxSendMessageSize: defaultServerMaxSendMessageSize, connectionTimeout: 120 * time.Second, @@ -235,6 +231,20 @@ func newJoinServerOption(opts ...ServerOption) ServerOption { return &joinServerOption{opts: opts} } +// SharedWriteBuffer allows reusing per-connection transport write buffer. +// If this option is set to true every connection will release the buffer after +// flushing the data on the wire. +// +// # Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. +func SharedWriteBuffer(val bool) ServerOption { + return newFuncServerOption(func(o *serverOptions) { + o.sharedWriteBuffer = val + }) +} + // WriteBufferSize determines how much data can be batched before doing a write // on the wire. The corresponding memory allocation for this buffer will be // twice the size to keep syscalls low. The default value for this buffer is @@ -275,9 +285,9 @@ func InitialConnWindowSize(s int32) ServerOption { // KeepaliveParams returns a ServerOption that sets keepalive and max-age parameters for the server. func KeepaliveParams(kp keepalive.ServerParameters) ServerOption { - if kp.Time > 0 && kp.Time < time.Second { + if kp.Time > 0 && kp.Time < internal.KeepaliveMinServerPingTime { logger.Warning("Adjusting keepalive ping interval to minimum period of 1s") - kp.Time = time.Second + kp.Time = internal.KeepaliveMinServerPingTime } return newFuncServerOption(func(o *serverOptions) { @@ -389,6 +399,9 @@ func MaxSendMsgSize(m int) ServerOption { // MaxConcurrentStreams returns a ServerOption that will apply a limit on the number // of concurrent streams to each ServerTransport. func MaxConcurrentStreams(n uint32) ServerOption { + if n == 0 { + n = math.MaxUint32 + } return newFuncServerOption(func(o *serverOptions) { o.maxConcurrentStreams = n }) @@ -590,24 +603,19 @@ const serverWorkerResetThreshold = 1 << 16 // [1] https://github.com/golang/go/issues/18138 func (s *Server) serverWorker() { for completed := 0; completed < serverWorkerResetThreshold; completed++ { - data, ok := <-s.serverWorkerChannel + f, ok := <-s.serverWorkerChannel if !ok { return } - s.handleSingleStream(data) + f() } go s.serverWorker() } -func (s *Server) handleSingleStream(data *serverWorkerData) { - defer data.wg.Done() - s.handleStream(data.st, data.stream, s.traceInfo(data.st, data.stream)) -} - // initServerWorkers creates worker goroutines and a channel to process incoming // connections to reduce the time spent overall on runtime.morestack. func (s *Server) initServerWorkers() { - s.serverWorkerChannel = make(chan *serverWorkerData) + s.serverWorkerChannel = make(chan func()) for i := uint32(0); i < s.opts.numServerWorkers; i++ { go s.serverWorker() } @@ -655,7 +663,7 @@ func NewServer(opt ...ServerOption) *Server { // printf records an event in s's event log, unless s has been stopped. // REQUIRES s.mu is held. -func (s *Server) printf(format string, a ...interface{}) { +func (s *Server) printf(format string, a ...any) { if s.events != nil { s.events.Printf(format, a...) } @@ -663,7 +671,7 @@ func (s *Server) printf(format string, a ...interface{}) { // errorf records an error in s's event log, unless s has been stopped. // REQUIRES s.mu is held. -func (s *Server) errorf(format string, a ...interface{}) { +func (s *Server) errorf(format string, a ...any) { if s.events != nil { s.events.Errorf(format, a...) } @@ -678,14 +686,14 @@ type ServiceRegistrar interface { // once the server has started serving. // desc describes the service and its methods and handlers. impl is the // service implementation which is passed to the method handlers. - RegisterService(desc *ServiceDesc, impl interface{}) + RegisterService(desc *ServiceDesc, impl any) } // RegisterService registers a service and its implementation to the gRPC // server. It is called from the IDL generated code. This must be called before // invoking Serve. If ss is non-nil (for legacy code), its type is checked to // ensure it implements sd.HandlerType. -func (s *Server) RegisterService(sd *ServiceDesc, ss interface{}) { +func (s *Server) RegisterService(sd *ServiceDesc, ss any) { if ss != nil { ht := reflect.TypeOf(sd.HandlerType).Elem() st := reflect.TypeOf(ss) @@ -696,7 +704,7 @@ func (s *Server) RegisterService(sd *ServiceDesc, ss interface{}) { s.register(sd, ss) } -func (s *Server) register(sd *ServiceDesc, ss interface{}) { +func (s *Server) register(sd *ServiceDesc, ss any) { s.mu.Lock() defer s.mu.Unlock() s.printf("RegisterService(%q)", sd.ServiceName) @@ -737,7 +745,7 @@ type MethodInfo struct { type ServiceInfo struct { Methods []MethodInfo // Metadata is the metadata specified in ServiceDesc when registering service. - Metadata interface{} + Metadata any } // GetServiceInfo returns a map from service names to ServiceInfo. @@ -938,6 +946,7 @@ func (s *Server) newHTTP2Transport(c net.Conn) transport.ServerTransport { InitialConnWindowSize: s.opts.initialConnWindowSize, WriteBufferSize: s.opts.writeBufferSize, ReadBufferSize: s.opts.readBufferSize, + SharedWriteBuffer: s.opts.sharedWriteBuffer, ChannelzParentID: s.channelzID, MaxHeaderListSize: s.opts.maxHeaderListSize, HeaderTableSize: s.opts.headerTableSize, @@ -966,21 +975,26 @@ func (s *Server) serveStreams(st transport.ServerTransport) { defer st.Close(errors.New("finished serving streams for the server transport")) var wg sync.WaitGroup + streamQuota := newHandlerQuota(s.opts.maxConcurrentStreams) st.HandleStreams(func(stream *transport.Stream) { wg.Add(1) + + streamQuota.acquire() + f := func() { + defer streamQuota.release() + defer wg.Done() + s.handleStream(st, stream, s.traceInfo(st, stream)) + } + if s.opts.numServerWorkers > 0 { - data := &serverWorkerData{st: st, wg: &wg, stream: stream} select { - case s.serverWorkerChannel <- data: + case s.serverWorkerChannel <- f: return default: // If all stream workers are busy, fallback to the default code path. } } - go func() { - defer wg.Done() - s.handleStream(st, stream, s.traceInfo(st, stream)) - }() + go f() }, func(ctx context.Context, method string) context.Context { if !EnableTracing { return ctx @@ -1119,7 +1133,7 @@ func (s *Server) incrCallsFailed() { atomic.AddInt64(&s.czData.callsFailed, 1) } -func (s *Server) sendResponse(t transport.ServerTransport, stream *transport.Stream, msg interface{}, cp Compressor, opts *transport.Options, comp encoding.Compressor) error { +func (s *Server) sendResponse(t transport.ServerTransport, stream *transport.Stream, msg any, cp Compressor, opts *transport.Options, comp encoding.Compressor) error { data, err := encode(s.getCodec(stream.ContentSubtype()), msg) if err != nil { channelz.Error(logger, s.channelzID, "grpc: server failed to encode response: ", err) @@ -1166,7 +1180,7 @@ func chainUnaryServerInterceptors(s *Server) { } func chainUnaryInterceptors(interceptors []UnaryServerInterceptor) UnaryServerInterceptor { - return func(ctx context.Context, req interface{}, info *UnaryServerInfo, handler UnaryHandler) (interface{}, error) { + return func(ctx context.Context, req any, info *UnaryServerInfo, handler UnaryHandler) (any, error) { return interceptors[0](ctx, req, info, getChainUnaryHandler(interceptors, 0, info, handler)) } } @@ -1175,7 +1189,7 @@ func getChainUnaryHandler(interceptors []UnaryServerInterceptor, curr int, info if curr == len(interceptors)-1 { return finalHandler } - return func(ctx context.Context, req interface{}) (interface{}, error) { + return func(ctx context.Context, req any) (any, error) { return interceptors[curr+1](ctx, req, info, getChainUnaryHandler(interceptors, curr+1, info, finalHandler)) } } @@ -1212,7 +1226,7 @@ func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport. defer func() { if trInfo != nil { if err != nil && err != io.EOF { - trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) trInfo.tr.SetError() } trInfo.tr.Finish() @@ -1329,7 +1343,7 @@ func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport. if channelz.IsOn() { t.IncrMsgRecv() } - df := func(v interface{}) error { + df := func(v any) error { if err := s.getCodec(stream.ContentSubtype()).Unmarshal(d, v); err != nil { return status.Errorf(codes.Internal, "grpc: error unmarshalling request: %v", err) } @@ -1493,7 +1507,7 @@ func chainStreamServerInterceptors(s *Server) { } func chainStreamInterceptors(interceptors []StreamServerInterceptor) StreamServerInterceptor { - return func(srv interface{}, ss ServerStream, info *StreamServerInfo, handler StreamHandler) error { + return func(srv any, ss ServerStream, info *StreamServerInfo, handler StreamHandler) error { return interceptors[0](srv, ss, info, getChainStreamHandler(interceptors, 0, info, handler)) } } @@ -1502,7 +1516,7 @@ func getChainStreamHandler(interceptors []StreamServerInterceptor, curr int, inf if curr == len(interceptors)-1 { return finalHandler } - return func(srv interface{}, stream ServerStream) error { + return func(srv any, stream ServerStream) error { return interceptors[curr+1](srv, stream, info, getChainStreamHandler(interceptors, curr+1, info, finalHandler)) } } @@ -1543,7 +1557,7 @@ func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transp if trInfo != nil { ss.mu.Lock() if err != nil && err != io.EOF { - ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) ss.trInfo.tr.SetError() } ss.trInfo.tr.Finish() @@ -1646,7 +1660,7 @@ func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transp trInfo.tr.LazyLog(&trInfo.firstLine, false) } var appErr error - var server interface{} + var server any if info != nil { server = info.serviceImpl } @@ -1712,13 +1726,13 @@ func (s *Server) handleStream(t transport.ServerTransport, stream *transport.Str pos := strings.LastIndex(sm, "/") if pos == -1 { if trInfo != nil { - trInfo.tr.LazyLog(&fmtStringer{"Malformed method name %q", []interface{}{sm}}, true) + trInfo.tr.LazyLog(&fmtStringer{"Malformed method name %q", []any{sm}}, true) trInfo.tr.SetError() } errDesc := fmt.Sprintf("malformed method name: %q", stream.Method()) if err := t.WriteStatus(stream, status.New(codes.Unimplemented, errDesc)); err != nil { if trInfo != nil { - trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) trInfo.tr.SetError() } channelz.Warningf(logger, s.channelzID, "grpc: Server.handleStream failed to write status: %v", err) @@ -1759,7 +1773,7 @@ func (s *Server) handleStream(t transport.ServerTransport, stream *transport.Str } if err := t.WriteStatus(stream, status.New(codes.Unimplemented, errDesc)); err != nil { if trInfo != nil { - trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) trInfo.tr.SetError() } channelz.Warningf(logger, s.channelzID, "grpc: Server.handleStream failed to write status: %v", err) @@ -2075,3 +2089,34 @@ func validateSendCompressor(name, clientCompressors string) error { } return fmt.Errorf("client does not support compressor %q", name) } + +// atomicSemaphore implements a blocking, counting semaphore. acquire should be +// called synchronously; release may be called asynchronously. +type atomicSemaphore struct { + n atomic.Int64 + wait chan struct{} +} + +func (q *atomicSemaphore) acquire() { + if q.n.Add(-1) < 0 { + // We ran out of quota. Block until a release happens. + <-q.wait + } +} + +func (q *atomicSemaphore) release() { + // N.B. the "<= 0" check below should allow for this to work with multiple + // concurrent calls to acquire, but also note that with synchronous calls to + // acquire, as our system does, n will never be less than -1. There are + // fairness issues (queuing) to consider if this was to be generalized. + if q.n.Add(1) <= 0 { + // An acquire was waiting on us. Unblock it. + q.wait <- struct{}{} + } +} + +func newHandlerQuota(n uint32) *atomicSemaphore { + a := &atomicSemaphore{wait: make(chan struct{}, 1)} + a.n.Store(int64(n)) + return a +} diff --git a/vendor/google.golang.org/grpc/shared_buffer_pool.go b/vendor/google.golang.org/grpc/shared_buffer_pool.go index c3a5a9ac1..48a64cfe8 100644 --- a/vendor/google.golang.org/grpc/shared_buffer_pool.go +++ b/vendor/google.golang.org/grpc/shared_buffer_pool.go @@ -109,7 +109,7 @@ const ( type simpleSharedBufferChildPool interface { Get(size int) []byte - Put(interface{}) + Put(any) } type bufferPool struct { @@ -133,7 +133,7 @@ func (p *bufferPool) Get(size int) []byte { func newBytesPool(size int) simpleSharedBufferChildPool { return &bufferPool{ Pool: sync.Pool{ - New: func() interface{} { + New: func() any { bs := make([]byte, size) return &bs }, diff --git a/vendor/google.golang.org/grpc/stats/stats.go b/vendor/google.golang.org/grpc/stats/stats.go index 7a552a9b7..4ab70e2d4 100644 --- a/vendor/google.golang.org/grpc/stats/stats.go +++ b/vendor/google.golang.org/grpc/stats/stats.go @@ -59,12 +59,22 @@ func (s *Begin) IsClient() bool { return s.Client } func (s *Begin) isRPCStats() {} +// PickerUpdated indicates that the LB policy provided a new picker while the +// RPC was waiting for one. +type PickerUpdated struct{} + +// IsClient indicates if the stats information is from client side. Only Client +// Side interfaces with a Picker, thus always returns true. +func (*PickerUpdated) IsClient() bool { return true } + +func (*PickerUpdated) isRPCStats() {} + // InPayload contains the information for an incoming payload. type InPayload struct { // Client is true if this InPayload is from client side. Client bool // Payload is the payload with original type. - Payload interface{} + Payload any // Data is the serialized message payload. Data []byte @@ -134,7 +144,7 @@ type OutPayload struct { // Client is true if this OutPayload is from client side. Client bool // Payload is the payload with original type. - Payload interface{} + Payload any // Data is the serialized message payload. Data []byte // Length is the size of the uncompressed payload data. Does not include any diff --git a/vendor/google.golang.org/grpc/status/status.go b/vendor/google.golang.org/grpc/status/status.go index bcf2e4d81..a93360efb 100644 --- a/vendor/google.golang.org/grpc/status/status.go +++ b/vendor/google.golang.org/grpc/status/status.go @@ -50,7 +50,7 @@ func New(c codes.Code, msg string) *Status { } // Newf returns New(c, fmt.Sprintf(format, a...)). -func Newf(c codes.Code, format string, a ...interface{}) *Status { +func Newf(c codes.Code, format string, a ...any) *Status { return New(c, fmt.Sprintf(format, a...)) } @@ -60,7 +60,7 @@ func Error(c codes.Code, msg string) error { } // Errorf returns Error(c, fmt.Sprintf(format, a...)). -func Errorf(c codes.Code, format string, a ...interface{}) error { +func Errorf(c codes.Code, format string, a ...any) error { return Error(c, fmt.Sprintf(format, a...)) } @@ -99,25 +99,27 @@ func FromError(err error) (s *Status, ok bool) { } type grpcstatus interface{ GRPCStatus() *Status } if gs, ok := err.(grpcstatus); ok { - if gs.GRPCStatus() == nil { + grpcStatus := gs.GRPCStatus() + if grpcStatus == nil { // Error has status nil, which maps to codes.OK. There // is no sensible behavior for this, so we turn it into // an error with codes.Unknown and discard the existing // status. return New(codes.Unknown, err.Error()), false } - return gs.GRPCStatus(), true + return grpcStatus, true } var gs grpcstatus if errors.As(err, &gs) { - if gs.GRPCStatus() == nil { + grpcStatus := gs.GRPCStatus() + if grpcStatus == nil { // Error wraps an error that has status nil, which maps // to codes.OK. There is no sensible behavior for this, // so we turn it into an error with codes.Unknown and // discard the existing status. return New(codes.Unknown, err.Error()), false } - p := gs.GRPCStatus().Proto() + p := grpcStatus.Proto() p.Message = err.Error() return status.FromProto(p), true } diff --git a/vendor/google.golang.org/grpc/stream.go b/vendor/google.golang.org/grpc/stream.go index de32a7597..b14b2fbea 100644 --- a/vendor/google.golang.org/grpc/stream.go +++ b/vendor/google.golang.org/grpc/stream.go @@ -31,6 +31,7 @@ import ( "google.golang.org/grpc/balancer" "google.golang.org/grpc/codes" "google.golang.org/grpc/encoding" + "google.golang.org/grpc/internal" "google.golang.org/grpc/internal/balancerload" "google.golang.org/grpc/internal/binarylog" "google.golang.org/grpc/internal/channelz" @@ -54,7 +55,7 @@ import ( // status package, or be one of the context errors. Otherwise, gRPC will use // codes.Unknown as the status code and err.Error() as the status message of the // RPC. -type StreamHandler func(srv interface{}, stream ServerStream) error +type StreamHandler func(srv any, stream ServerStream) error // StreamDesc represents a streaming RPC service's method specification. Used // on the server when registering services and on the client when initiating @@ -79,9 +80,9 @@ type Stream interface { // Deprecated: See ClientStream and ServerStream documentation instead. Context() context.Context // Deprecated: See ClientStream and ServerStream documentation instead. - SendMsg(m interface{}) error + SendMsg(m any) error // Deprecated: See ClientStream and ServerStream documentation instead. - RecvMsg(m interface{}) error + RecvMsg(m any) error } // ClientStream defines the client-side behavior of a streaming RPC. @@ -90,7 +91,9 @@ type Stream interface { // status package. type ClientStream interface { // Header returns the header metadata received from the server if there - // is any. It blocks if the metadata is not ready to read. + // is any. It blocks if the metadata is not ready to read. If the metadata + // is nil and the error is also nil, then the stream was terminated without + // headers, and the status can be discovered by calling RecvMsg. Header() (metadata.MD, error) // Trailer returns the trailer metadata from the server, if there is any. // It must only be called after stream.CloseAndRecv has returned, or @@ -126,7 +129,7 @@ type ClientStream interface { // // It is not safe to modify the message after calling SendMsg. Tracing // libraries and stats handlers may use the message lazily. - SendMsg(m interface{}) error + SendMsg(m any) error // RecvMsg blocks until it receives a message into m or the stream is // done. It returns io.EOF when the stream completes successfully. On // any other error, the stream is aborted and the error contains the RPC @@ -135,7 +138,7 @@ type ClientStream interface { // It is safe to have a goroutine calling SendMsg and another goroutine // calling RecvMsg on the same stream at the same time, but it is not // safe to call RecvMsg on the same stream in different goroutines. - RecvMsg(m interface{}) error + RecvMsg(m any) error } // NewStream creates a new Stream for the client side. This is typically @@ -155,11 +158,6 @@ type ClientStream interface { // If none of the above happen, a goroutine and a context will be leaked, and grpc // will not call the optionally-configured stats handler with a stats.End message. func (cc *ClientConn) NewStream(ctx context.Context, desc *StreamDesc, method string, opts ...CallOption) (ClientStream, error) { - if err := cc.idlenessMgr.onCallBegin(); err != nil { - return nil, err - } - defer cc.idlenessMgr.onCallEnd() - // allow interceptor to see all applicable call options, which means those // configured as defaults from dial option as well as per-call options opts = combine(cc.dopts.callOptions, opts) @@ -176,6 +174,16 @@ func NewClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, meth } func newClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (_ ClientStream, err error) { + // Start tracking the RPC for idleness purposes. This is where a stream is + // created for both streaming and unary RPCs, and hence is a good place to + // track active RPC count. + if err := cc.idlenessMgr.OnCallBegin(); err != nil { + return nil, err + } + // Add a calloption, to decrement the active call count, that gets executed + // when the RPC completes. + opts = append([]CallOption{OnFinish(func(error) { cc.idlenessMgr.OnCallEnd() })}, opts...) + if md, added, ok := metadata.FromOutgoingContextRaw(ctx); ok { // validate md if err := imetadata.Validate(md); err != nil { @@ -433,7 +441,7 @@ func (cs *clientStream) newAttemptLocked(isTransparent bool) (*csAttempt, error) ctx = trace.NewContext(ctx, trInfo.tr) } - if cs.cc.parsedTarget.URL.Scheme == "xds" { + if cs.cc.parsedTarget.URL.Scheme == internal.GRPCResolverSchemeExtraMetadata { // Add extra metadata (metadata that will be added by transport) to context // so the balancer can see them. ctx = grpcutil.WithExtraMetadata(ctx, metadata.Pairs( @@ -788,23 +796,24 @@ func (cs *clientStream) withRetry(op func(a *csAttempt) error, onSuccess func()) func (cs *clientStream) Header() (metadata.MD, error) { var m metadata.MD - noHeader := false err := cs.withRetry(func(a *csAttempt) error { var err error m, err = a.s.Header() - if err == transport.ErrNoHeaders { - noHeader = true - return nil - } return toRPCErr(err) }, cs.commitAttemptLocked) + if m == nil && err == nil { + // The stream ended with success. Finish the clientStream. + err = io.EOF + } + if err != nil { cs.finish(err) - return nil, err + // Do not return the error. The user should get it by calling Recv(). + return nil, nil } - if len(cs.binlogs) != 0 && !cs.serverHeaderBinlogged && !noHeader { + if len(cs.binlogs) != 0 && !cs.serverHeaderBinlogged && m != nil { // Only log if binary log is on and header has not been logged, and // there is actually headers to log. logEntry := &binarylog.ServerHeader{ @@ -820,6 +829,7 @@ func (cs *clientStream) Header() (metadata.MD, error) { binlog.Log(cs.ctx, logEntry) } } + return m, nil } @@ -860,7 +870,7 @@ func (cs *clientStream) bufferForRetryLocked(sz int, op func(a *csAttempt) error cs.buffer = append(cs.buffer, op) } -func (cs *clientStream) SendMsg(m interface{}) (err error) { +func (cs *clientStream) SendMsg(m any) (err error) { defer func() { if err != nil && err != io.EOF { // Call finish on the client stream for errors generated by this SendMsg @@ -904,7 +914,7 @@ func (cs *clientStream) SendMsg(m interface{}) (err error) { return err } -func (cs *clientStream) RecvMsg(m interface{}) error { +func (cs *clientStream) RecvMsg(m any) error { if len(cs.binlogs) != 0 && !cs.serverHeaderBinlogged { // Call Header() to binary log header if it's not already logged. cs.Header() @@ -928,24 +938,6 @@ func (cs *clientStream) RecvMsg(m interface{}) error { if err != nil || !cs.desc.ServerStreams { // err != nil or non-server-streaming indicates end of stream. cs.finish(err) - - if len(cs.binlogs) != 0 { - // finish will not log Trailer. Log Trailer here. - logEntry := &binarylog.ServerTrailer{ - OnClientSide: true, - Trailer: cs.Trailer(), - Err: err, - } - if logEntry.Err == io.EOF { - logEntry.Err = nil - } - if peer, ok := peer.FromContext(cs.Context()); ok { - logEntry.PeerAddr = peer.Addr - } - for _, binlog := range cs.binlogs { - binlog.Log(cs.ctx, logEntry) - } - } } return err } @@ -1001,18 +993,30 @@ func (cs *clientStream) finish(err error) { } } } + cs.mu.Unlock() - // For binary logging. only log cancel in finish (could be caused by RPC ctx - // canceled or ClientConn closed). Trailer will be logged in RecvMsg. - // - // Only one of cancel or trailer needs to be logged. In the cases where - // users don't call RecvMsg, users must have already canceled the RPC. - if len(cs.binlogs) != 0 && status.Code(err) == codes.Canceled { - c := &binarylog.Cancel{ - OnClientSide: true, - } - for _, binlog := range cs.binlogs { - binlog.Log(cs.ctx, c) + // Only one of cancel or trailer needs to be logged. + if len(cs.binlogs) != 0 { + switch err { + case errContextCanceled, errContextDeadline, ErrClientConnClosing: + c := &binarylog.Cancel{ + OnClientSide: true, + } + for _, binlog := range cs.binlogs { + binlog.Log(cs.ctx, c) + } + default: + logEntry := &binarylog.ServerTrailer{ + OnClientSide: true, + Trailer: cs.Trailer(), + Err: err, + } + if peer, ok := peer.FromContext(cs.Context()); ok { + logEntry.PeerAddr = peer.Addr + } + for _, binlog := range cs.binlogs { + binlog.Log(cs.ctx, logEntry) + } } } if err == nil { @@ -1028,7 +1032,7 @@ func (cs *clientStream) finish(err error) { cs.cancel() } -func (a *csAttempt) sendMsg(m interface{}, hdr, payld, data []byte) error { +func (a *csAttempt) sendMsg(m any, hdr, payld, data []byte) error { cs := a.cs if a.trInfo != nil { a.mu.Lock() @@ -1055,7 +1059,7 @@ func (a *csAttempt) sendMsg(m interface{}, hdr, payld, data []byte) error { return nil } -func (a *csAttempt) recvMsg(m interface{}, payInfo *payloadInfo) (err error) { +func (a *csAttempt) recvMsg(m any, payInfo *payloadInfo) (err error) { cs := a.cs if len(a.statsHandlers) != 0 && payInfo == nil { payInfo = &payloadInfo{} @@ -1348,7 +1352,7 @@ func (as *addrConnStream) Context() context.Context { return as.s.Context() } -func (as *addrConnStream) SendMsg(m interface{}) (err error) { +func (as *addrConnStream) SendMsg(m any) (err error) { defer func() { if err != nil && err != io.EOF { // Call finish on the client stream for errors generated by this SendMsg @@ -1393,7 +1397,7 @@ func (as *addrConnStream) SendMsg(m interface{}) (err error) { return nil } -func (as *addrConnStream) RecvMsg(m interface{}) (err error) { +func (as *addrConnStream) RecvMsg(m any) (err error) { defer func() { if err != nil || !as.desc.ServerStreams { // err != nil or non-server-streaming indicates end of stream. @@ -1512,7 +1516,7 @@ type ServerStream interface { // // It is not safe to modify the message after calling SendMsg. Tracing // libraries and stats handlers may use the message lazily. - SendMsg(m interface{}) error + SendMsg(m any) error // RecvMsg blocks until it receives a message into m or the stream is // done. It returns io.EOF when the client has performed a CloseSend. On // any non-EOF error, the stream is aborted and the error contains the @@ -1521,7 +1525,7 @@ type ServerStream interface { // It is safe to have a goroutine calling SendMsg and another goroutine // calling RecvMsg on the same stream at the same time, but it is not // safe to call RecvMsg on the same stream in different goroutines. - RecvMsg(m interface{}) error + RecvMsg(m any) error } // serverStream implements a server side Stream. @@ -1602,7 +1606,7 @@ func (ss *serverStream) SetTrailer(md metadata.MD) { ss.s.SetTrailer(md) } -func (ss *serverStream) SendMsg(m interface{}) (err error) { +func (ss *serverStream) SendMsg(m any) (err error) { defer func() { if ss.trInfo != nil { ss.mu.Lock() @@ -1610,7 +1614,7 @@ func (ss *serverStream) SendMsg(m interface{}) (err error) { if err == nil { ss.trInfo.tr.LazyLog(&payload{sent: true, msg: m}, true) } else { - ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) ss.trInfo.tr.SetError() } } @@ -1677,7 +1681,7 @@ func (ss *serverStream) SendMsg(m interface{}) (err error) { return nil } -func (ss *serverStream) RecvMsg(m interface{}) (err error) { +func (ss *serverStream) RecvMsg(m any) (err error) { defer func() { if ss.trInfo != nil { ss.mu.Lock() @@ -1685,7 +1689,7 @@ func (ss *serverStream) RecvMsg(m interface{}) (err error) { if err == nil { ss.trInfo.tr.LazyLog(&payload{sent: false, msg: m}, true) } else if err != io.EOF { - ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []any{err}}, true) ss.trInfo.tr.SetError() } } @@ -1757,7 +1761,7 @@ func MethodFromServerStream(stream ServerStream) (string, bool) { // prepareMsg returns the hdr, payload and data // using the compressors passed or using the // passed preparedmsg -func prepareMsg(m interface{}, codec baseCodec, cp Compressor, comp encoding.Compressor) (hdr, payload, data []byte, err error) { +func prepareMsg(m any, codec baseCodec, cp Compressor, comp encoding.Compressor) (hdr, payload, data []byte, err error) { if preparedMsg, ok := m.(*PreparedMsg); ok { return preparedMsg.hdr, preparedMsg.payload, preparedMsg.encodedData, nil } diff --git a/vendor/google.golang.org/grpc/trace.go b/vendor/google.golang.org/grpc/trace.go index 07a2d26b3..9ded79321 100644 --- a/vendor/google.golang.org/grpc/trace.go +++ b/vendor/google.golang.org/grpc/trace.go @@ -97,8 +97,8 @@ func truncate(x string, l int) string { // payload represents an RPC request or response payload. type payload struct { - sent bool // whether this is an outgoing payload - msg interface{} // e.g. a proto.Message + sent bool // whether this is an outgoing payload + msg any // e.g. a proto.Message // TODO(dsymonds): add stringifying info to codec, and limit how much we hold here? } @@ -111,7 +111,7 @@ func (p payload) String() string { type fmtStringer struct { format string - a []interface{} + a []any } func (f *fmtStringer) String() string { diff --git a/vendor/google.golang.org/grpc/version.go b/vendor/google.golang.org/grpc/version.go index 353cfd528..724ad2102 100644 --- a/vendor/google.golang.org/grpc/version.go +++ b/vendor/google.golang.org/grpc/version.go @@ -19,4 +19,4 @@ package grpc // Version is the current grpc version. -const Version = "1.57.0" +const Version = "1.58.3" diff --git a/vendor/google.golang.org/grpc/vet.sh b/vendor/google.golang.org/grpc/vet.sh index a8e4732b3..bbc9e2e3c 100644 --- a/vendor/google.golang.org/grpc/vet.sh +++ b/vendor/google.golang.org/grpc/vet.sh @@ -84,6 +84,9 @@ not git grep -l 'x/net/context' -- "*.go" # thread safety. git grep -l '"math/rand"' -- "*.go" 2>&1 | not grep -v '^examples\|^stress\|grpcrand\|^benchmark\|wrr_test' +# - Do not use "interface{}"; use "any" instead. +git grep -l 'interface{}' -- "*.go" 2>&1 | not grep -v '\.pb\.go\|protoc-gen-go-grpc' + # - Do not call grpclog directly. Use grpclog.Component instead. git grep -l -e 'grpclog.I' --or -e 'grpclog.W' --or -e 'grpclog.E' --or -e 'grpclog.F' --or -e 'grpclog.V' -- "*.go" | not grep -v '^grpclog/component.go\|^internal/grpctest/tlogger_test.go' @@ -106,7 +109,7 @@ for MOD_FILE in $(find . -name 'go.mod'); do goimports -l . 2>&1 | not grep -vE "\.pb\.go" golint ./... 2>&1 | not grep -vE "/grpc_testing_not_regenerate/.*\.pb\.go:" - go mod tidy -compat=1.17 + go mod tidy -compat=1.19 git status --porcelain 2>&1 | fail_on_output || \ (git status; git --no-pager diff; exit 1) popd @@ -168,8 +171,6 @@ proto.RegisteredExtension is deprecated proto.RegisteredExtensions is deprecated proto.RegisterMapType is deprecated proto.Unmarshaler is deprecated -resolver.Backend -resolver.GRPCLB Target is deprecated: Use the Target field in the BuildOptions instead. xxx_messageInfo_ ' "${SC_OUT}" diff --git a/vendor/gotest.tools/v3/fs/manifest.go b/vendor/gotest.tools/v3/fs/manifest.go index 44348c59e..d3ddcaca8 100644 --- a/vendor/gotest.tools/v3/fs/manifest.go +++ b/vendor/gotest.tools/v3/fs/manifest.go @@ -55,9 +55,9 @@ type dirEntry interface { Type() string } -// ManifestFromDir creates a Manifest by reading the directory at path. The +// ManifestFromDir creates a [Manifest] by reading the directory at path. The // manifest stores the structure and properties of files in the directory. -// ManifestFromDir can be used with Equal to compare two directories. +// ManifestFromDir can be used with [Equal] to compare two directories. func ManifestFromDir(t assert.TestingT, path string) Manifest { if ht, ok := t.(helperT); ok { ht.Helper() diff --git a/vendor/gotest.tools/v3/fs/ops.go b/vendor/gotest.tools/v3/fs/ops.go index 1bb72e362..9d86a696e 100644 --- a/vendor/gotest.tools/v3/fs/ops.go +++ b/vendor/gotest.tools/v3/fs/ops.go @@ -14,9 +14,9 @@ import ( const defaultFileMode = 0644 -// PathOp is a function which accepts a Path and performs an operation on that -// path. When called with real filesystem objects (File or Dir) a PathOp modifies -// the filesystem at the path. When used with a Manifest object a PathOp updates +// PathOp is a function which accepts a [Path] and performs an operation on that +// path. When called with real filesystem objects ([File] or [Dir]) a PathOp modifies +// the filesystem at the path. When used with a [Manifest] object a PathOp updates // the manifest to expect a value. type PathOp func(path Path) error @@ -38,7 +38,7 @@ type manifestDirectory interface { AddDirectory(path string, ops ...PathOp) error } -// WithContent writes content to a file at Path +// WithContent writes content to a file at [Path] func WithContent(content string) PathOp { return func(path Path) error { if m, ok := path.(manifestFile); ok { @@ -49,7 +49,7 @@ func WithContent(content string) PathOp { } } -// WithBytes write bytes to a file at Path +// WithBytes write bytes to a file at [Path] func WithBytes(raw []byte) PathOp { return func(path Path) error { if m, ok := path.(manifestFile); ok { @@ -60,7 +60,7 @@ func WithBytes(raw []byte) PathOp { } } -// WithReaderContent copies the reader contents to the file at Path +// WithReaderContent copies the reader contents to the file at [Path] func WithReaderContent(r io.Reader) PathOp { return func(path Path) error { if m, ok := path.(manifestFile); ok { @@ -77,7 +77,7 @@ func WithReaderContent(r io.Reader) PathOp { } } -// AsUser changes ownership of the file system object at Path +// AsUser changes ownership of the file system object at [Path] func AsUser(uid, gid int) PathOp { return func(path Path) error { if m, ok := path.(manifestResource); ok { @@ -132,7 +132,7 @@ func WithFiles(files map[string]string) PathOp { } } -// FromDir copies the directory tree from the source path into the new Dir +// FromDir copies the directory tree from the source path into the new [Dir] func FromDir(source string) PathOp { return func(path Path) error { if _, ok := path.(manifestDirectory); ok { @@ -142,7 +142,7 @@ func FromDir(source string) PathOp { } } -// WithDir creates a subdirectory in the directory at path. Additional PathOp +// WithDir creates a subdirectory in the directory at path. Additional [PathOp] // can be used to modify the subdirectory func WithDir(name string, ops ...PathOp) PathOp { const defaultMode = 0755 @@ -161,7 +161,7 @@ func WithDir(name string, ops ...PathOp) PathOp { } } -// Apply the PathOps to the File +// Apply the PathOps to the [File] func Apply(t assert.TestingT, path Path, ops ...PathOp) { if ht, ok := t.(helperT); ok { ht.Helper() @@ -178,7 +178,7 @@ func applyPathOps(path Path, ops []PathOp) error { return nil } -// WithMode sets the file mode on the directory or file at path +// WithMode sets the file mode on the directory or file at [Path] func WithMode(mode os.FileMode) PathOp { return func(path Path) error { if m, ok := path.(manifestResource); ok { @@ -241,7 +241,7 @@ func copyFile(source, dest string) error { // WithSymlink creates a symlink in the directory which links to target. // Target must be a path relative to the directory. // -// Note: the argument order is the inverse of os.Symlink to be consistent with +// Note: the argument order is the inverse of [os.Symlink] to be consistent with // the other functions in this package. func WithSymlink(path, target string) PathOp { return func(root Path) error { @@ -255,7 +255,7 @@ func WithSymlink(path, target string) PathOp { // WithHardlink creates a link in the directory which links to target. // Target must be a path relative to the directory. // -// Note: the argument order is the inverse of os.Link to be consistent with +// Note: the argument order is the inverse of [os.Link] to be consistent with // the other functions in this package. func WithHardlink(path, target string) PathOp { return func(root Path) error { diff --git a/vendor/gotest.tools/v3/fs/path.go b/vendor/gotest.tools/v3/fs/path.go index 550044a05..8f3bc922a 100644 --- a/vendor/gotest.tools/v3/fs/path.go +++ b/vendor/gotest.tools/v3/fs/path.go @@ -77,8 +77,8 @@ func (p *directoryPath) AddDirectory(path string, ops ...PathOp) error { return applyPathOps(exp, ops) } -// Expected returns a Manifest with a directory structured created by ops. The -// PathOp operations are applied to the manifest as expectations of the +// Expected returns a [Manifest] with a directory structured created by ops. The +// [PathOp] operations are applied to the manifest as expectations of the // filesystem structure and properties. func Expected(t assert.TestingT, ops ...PathOp) Manifest { if ht, ok := t.(helperT); ok { @@ -125,7 +125,7 @@ func normalizeID(id int) uint32 { var anyFileContent = io.NopCloser(bytes.NewReader(nil)) -// MatchAnyFileContent is a PathOp that updates a Manifest so that the file +// MatchAnyFileContent is a [PathOp] that updates a [Manifest] so that the file // at path may contain any content. func MatchAnyFileContent(path Path) error { if m, ok := path.(*filePath); ok { @@ -134,7 +134,7 @@ func MatchAnyFileContent(path Path) error { return nil } -// MatchContentIgnoreCarriageReturn is a PathOp that ignores cariage return +// MatchContentIgnoreCarriageReturn is a [PathOp] that ignores cariage return // discrepancies. func MatchContentIgnoreCarriageReturn(path Path) error { if m, ok := path.(*filePath); ok { @@ -145,7 +145,7 @@ func MatchContentIgnoreCarriageReturn(path Path) error { const anyFile = "*" -// MatchExtraFiles is a PathOp that updates a Manifest to allow a directory +// MatchExtraFiles is a [PathOp] that updates a [Manifest] to allow a directory // to contain unspecified files. func MatchExtraFiles(path Path) error { if m, ok := path.(*directoryPath); ok { @@ -156,14 +156,14 @@ func MatchExtraFiles(path Path) error { // CompareResult is the result of comparison. // -// See gotest.tools/assert/cmp.StringResult for a convenient implementation of +// See [gotest.tools/v3/assert/cmp.StringResult] for a convenient implementation of // this interface. type CompareResult interface { Success() bool FailureMessage() string } -// MatchFileContent is a PathOp that updates a Manifest to use the provided +// MatchFileContent is a [PathOp] that updates a [Manifest] to use the provided // function to determine if a file's content matches the expectation. func MatchFileContent(f func([]byte) CompareResult) PathOp { return func(path Path) error { @@ -174,7 +174,7 @@ func MatchFileContent(f func([]byte) CompareResult) PathOp { } } -// MatchFilesWithGlob is a PathOp that updates a Manifest to match files using +// MatchFilesWithGlob is a [PathOp] that updates a [Manifest] to match files using // glob pattern, and check them using the ops. func MatchFilesWithGlob(glob string, ops ...PathOp) PathOp { return func(path Path) error { @@ -188,7 +188,7 @@ func MatchFilesWithGlob(glob string, ops ...PathOp) PathOp { // anyFileMode is represented by uint32_max const anyFileMode os.FileMode = 4294967295 -// MatchAnyFileMode is a PathOp that updates a Manifest so that the resource at path +// MatchAnyFileMode is a [PathOp] that updates a [Manifest] so that the resource at path // will match any file mode. func MatchAnyFileMode(path Path) error { if m, ok := path.(manifestResource); ok { diff --git a/vendor/gotest.tools/v3/fs/report.go b/vendor/gotest.tools/v3/fs/report.go index 5e79e49b5..952aa26ad 100644 --- a/vendor/gotest.tools/v3/fs/report.go +++ b/vendor/gotest.tools/v3/fs/report.go @@ -17,9 +17,9 @@ import ( // Equal compares a directory to the expected structured described by a manifest // and returns success if they match. If they do not match the failure message // will contain all the differences between the directory structure and the -// expected structure defined by the Manifest. +// expected structure defined by the [Manifest]. // -// Equal is a cmp.Comparison which can be used with assert.Assert(). +// Equal is a [cmp.Comparison] which can be used with [gotest.tools/v3/assert.Assert]. func Equal(path string, expected Manifest) cmp.Comparison { return func() cmp.Result { actual, err := manifestFromDir(path) diff --git a/vendor/gotest.tools/v3/golden/golden.go b/vendor/gotest.tools/v3/golden/golden.go index ce96ba173..1ba1c1c93 100644 --- a/vendor/gotest.tools/v3/golden/golden.go +++ b/vendor/gotest.tools/v3/golden/golden.go @@ -6,7 +6,7 @@ Golden files can be automatically updated to match new values by running `go test pkgname -update`. To ensure the update is correct compare the diff of the old expected value to the new expected value. */ -package golden // import "gotest.tools/v3/golden" +package golden import ( "bytes" @@ -40,11 +40,19 @@ type helperT interface { // in the environment before running tests. // // The default value may change in a future major release. +// +// This does not affect the contents of the golden files themselves. And depending on the +// git settings on your system (or in github action platform default like windows), the +// golden files may contain CRLF line endings. You can avoid this by setting the +// .gitattributes file in your repo to use LF line endings for all files, or just the golden +// files, by adding the following line to your .gitattributes file: +// +// * text=auto eol=lf var NormalizeCRLFToLF = os.Getenv("GOTESTTOOLS_GOLDEN_NormalizeCRLFToLF") != "false" // FlagUpdate returns true when the -update flag has been set. func FlagUpdate() bool { - return source.Update + return source.IsUpdate() } // Open opens the file in ./testdata @@ -178,7 +186,7 @@ func compare(actual []byte, filename string) (cmp.Result, []byte) { } func update(filename string, actual []byte) error { - if !source.Update { + if !source.IsUpdate() { return nil } if dir := filepath.Dir(Path(filename)); dir != "." { diff --git a/vendor/gotest.tools/v3/internal/assert/result.go b/vendor/gotest.tools/v3/internal/assert/result.go index 360320614..bb8741eb4 100644 --- a/vendor/gotest.tools/v3/internal/assert/result.go +++ b/vendor/gotest.tools/v3/internal/assert/result.go @@ -26,7 +26,7 @@ func RunComparison( return true } - if source.Update { + if source.IsUpdate() { if updater, ok := result.(updateExpected); ok { const stackIndex = 3 // Assert/Check, assert, RunComparison err := updater.UpdatedExpected(stackIndex) diff --git a/vendor/gotest.tools/v3/internal/source/update.go b/vendor/gotest.tools/v3/internal/source/update.go index f2006aacf..5591bffd1 100644 --- a/vendor/gotest.tools/v3/internal/source/update.go +++ b/vendor/gotest.tools/v3/internal/source/update.go @@ -14,12 +14,32 @@ import ( "strings" ) -// Update is set by the -update flag. It indicates the user running the tests -// would like to update any golden values. +// IsUpdate is returns true if the -update flag is set. It indicates the user +// running the tests would like to update any golden values. +func IsUpdate() bool { + if Update { + return true + } + return flag.Lookup("update").Value.(flag.Getter).Get().(bool) +} + +// Update is a shim for testing, and for compatibility with the old -update-golden +// flag. var Update bool func init() { - flag.BoolVar(&Update, "update", false, "update golden values") + if f := flag.Lookup("update"); f != nil { + getter, ok := f.Value.(flag.Getter) + msg := "some other package defined an incompatible -update flag, expected a flag.Bool" + if !ok { + panic(msg) + } + if _, ok := getter.Get().(bool); !ok { + panic(msg) + } + return + } + flag.Bool("update", false, "update golden values") } // ErrNotFound indicates that UpdateExpectedValue failed to find the diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/deepcopy.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/deepcopy.go index 2bd5d5293..52c65ccbe 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/deepcopy.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/deepcopy.go @@ -291,9 +291,11 @@ func (in *JSONSchemaProps) DeepCopy() *JSONSchemaProps { } if in.XValidations != nil { - in, out := &in.XValidations, &out.XValidations - *out = make([]ValidationRule, len(*in)) - copy(*out, *in) + inValidations, outValidations := &in.XValidations, &out.XValidations + *outValidations = make([]ValidationRule, len(*inValidations)) + for i := range *inValidations { + in.XValidations[i].DeepCopyInto(&out.XValidations[i]) + } } return out diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/types_jsonschema.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/types_jsonschema.go index 04ce206bb..cc1c7437f 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/types_jsonschema.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/types_jsonschema.go @@ -16,6 +16,26 @@ limitations under the License. package apiextensions +// FieldValueErrorReason is a machine-readable value providing more detail about why a field failed the validation. +// +enum +type FieldValueErrorReason string + +const ( + // FieldValueRequired is used to report required values that are not + // provided (e.g. empty strings, null values, or empty arrays). + FieldValueRequired FieldValueErrorReason = "FieldValueRequired" + // FieldValueDuplicate is used to report collisions of values that must be + // unique (e.g. unique IDs). + FieldValueDuplicate FieldValueErrorReason = "FieldValueDuplicate" + // FieldValueInvalid is used to report malformed values (e.g. failed regex + // match, too long, out of bounds). + FieldValueInvalid FieldValueErrorReason = "FieldValueInvalid" + // FieldValueForbidden is used to report valid (as per formatting rules) + // values which would be accepted under some conditions, but which are not + // permitted by the current conditions (such as security policy). + FieldValueForbidden FieldValueErrorReason = "FieldValueForbidden" +) + // JSONSchemaProps is a JSON-Schema following Specification Draft 4 (http://json-schema.org/). type JSONSchemaProps struct { ID string @@ -208,6 +228,24 @@ type ValidationRule struct { // "x must be less than max ("+string(self.max)+")" // +optional MessageExpression string + // reason provides a machine-readable validation failure reason that is returned to the caller when a request fails this validation rule. + // The HTTP status code returned to the caller will match the reason of the reason of the first failed validation rule. + // The currently supported reasons are: "FieldValueInvalid", "FieldValueForbidden", "FieldValueRequired", "FieldValueDuplicate". + // If not set, default to use "FieldValueInvalid". + // All future added reasons must be accepted by clients when reading this value and unknown reasons should be treated as FieldValueInvalid. + // +optional + Reason *FieldValueErrorReason + // fieldPath represents the field path returned when the validation fails. + // It must be a relative JSON path (i.e. with array notation) scoped to the location of this x-kubernetes-validations extension in the schema and refer to an existing field. + // e.g. when validation checks if a specific attribute `foo` under a map `testMap`, the fieldPath could be set to `.testMap.foo` + // If the validation checks two lists must have unique attributes, the fieldPath could be set to either of the list: e.g. `.testList` + // It does not support list numeric index. + // It supports child operation to refer to an existing field currently. Refer to [JSONPath support in Kubernetes](https://kubernetes.io/docs/reference/kubectl/jsonpath/) for more info. + // Numeric index of array is not supported. + // For field name which contains special characters, use `['specialName']` to refer the field name. + // e.g. for attribute `foo.34$` appears in a list `testList`, the fieldPath could be set to `.testList['foo.34$']` + // +optional + FieldPath string } // JSON represents any valid JSON value. diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/deepcopy.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/deepcopy.go index 28dfb99f1..c548e642d 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/deepcopy.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/deepcopy.go @@ -251,9 +251,11 @@ func (in *JSONSchemaProps) DeepCopy() *JSONSchemaProps { } if in.XValidations != nil { - in, out := &in.XValidations, &out.XValidations - *out = make([]ValidationRule, len(*in)) - copy(*out, *in) + inValidations, outValidations := &in.XValidations, &out.XValidations + *outValidations = make([]ValidationRule, len(*inValidations)) + for i := range *inValidations { + in.XValidations[i].DeepCopyInto(&out.XValidations[i]) + } } return out diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.pb.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.pb.go index 5dbb38c8b..75a573a2d 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.pb.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.pb.go @@ -814,199 +814,202 @@ func init() { } var fileDescriptor_f5a35c9667703937 = []byte{ - // 3072 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0xdf, 0x6f, 0x24, 0x47, - 0xf1, 0xbf, 0x59, 0xff, 0x5a, 0xb7, 0xed, 0xb3, 0xdd, 0x77, 0xf6, 0x77, 0xce, 0xb9, 0xf3, 0xfa, - 0x36, 0xdf, 0x1c, 0x4e, 0x72, 0x59, 0x27, 0x26, 0x21, 0x47, 0x84, 0x40, 0x5e, 0xdb, 0x97, 0x38, - 0x67, 0x9f, 0xad, 0xde, 0xbb, 0x8b, 0x93, 0x20, 0x92, 0xf1, 0x4e, 0x7b, 0x3d, 0xf1, 0xfc, 0xba, - 0xee, 0x99, 0xb5, 0x2d, 0x81, 0x14, 0x81, 0x22, 0x20, 0x12, 0x84, 0x07, 0x14, 0x9e, 0x10, 0x42, - 0x28, 0x48, 0xf0, 0x00, 0x6f, 0xf0, 0x2f, 0xe4, 0x05, 0x29, 0x4f, 0x28, 0x12, 0xd2, 0x8a, 0x2c, - 0xff, 0x00, 0x12, 0x20, 0x84, 0x1f, 0x10, 0xea, 0x1f, 0xd3, 0xd3, 0x3b, 0xbb, 0x7b, 0x77, 0xb2, - 0xd7, 0xc9, 0xdb, 0x6e, 0x55, 0x75, 0x7d, 0xaa, 0xab, 0xab, 0xab, 0xab, 0xab, 0x07, 0x58, 0xfb, - 0x37, 0x68, 0xc9, 0x09, 0x16, 0xf6, 0xe3, 0x1d, 0x4c, 0x7c, 0x1c, 0x61, 0xba, 0x50, 0xc7, 0xbe, - 0x1d, 0x90, 0x05, 0xc9, 0xb0, 0x42, 0x07, 0x1f, 0x46, 0xd8, 0xa7, 0x4e, 0xe0, 0xd3, 0x67, 0xac, - 0xd0, 0xa1, 0x98, 0xd4, 0x31, 0x59, 0x08, 0xf7, 0x6b, 0x8c, 0x47, 0x5b, 0x05, 0x16, 0xea, 0xcf, - 0x2d, 0xd4, 0xb0, 0x8f, 0x89, 0x15, 0x61, 0xbb, 0x14, 0x92, 0x20, 0x0a, 0xe0, 0x0d, 0xa1, 0xa9, - 0xd4, 0x22, 0xf8, 0x96, 0xd2, 0x54, 0x0a, 0xf7, 0x6b, 0x8c, 0x47, 0x5b, 0x05, 0x4a, 0xf5, 0xe7, - 0x66, 0x9e, 0xa9, 0x39, 0xd1, 0x5e, 0xbc, 0x53, 0xaa, 0x06, 0xde, 0x42, 0x2d, 0xa8, 0x05, 0x0b, - 0x5c, 0xe1, 0x4e, 0xbc, 0xcb, 0xff, 0xf1, 0x3f, 0xfc, 0x97, 0x00, 0x9a, 0x79, 0x3e, 0x35, 0xd9, - 0xb3, 0xaa, 0x7b, 0x8e, 0x8f, 0xc9, 0x51, 0x6a, 0xa7, 0x87, 0x23, 0xab, 0x83, 0x79, 0x33, 0x0b, - 0xdd, 0x46, 0x91, 0xd8, 0x8f, 0x1c, 0x0f, 0xb7, 0x0d, 0xf8, 0xca, 0xc3, 0x06, 0xd0, 0xea, 0x1e, - 0xf6, 0xac, 0xec, 0xb8, 0xe2, 0xb1, 0x01, 0x26, 0x97, 0x03, 0xbf, 0x8e, 0x09, 0x9b, 0x20, 0xc2, - 0xf7, 0x63, 0x4c, 0x23, 0x58, 0x06, 0x7d, 0xb1, 0x63, 0x9b, 0xc6, 0x9c, 0x31, 0x3f, 0x5c, 0x7e, - 0xf6, 0xe3, 0x46, 0xe1, 0x5c, 0xb3, 0x51, 0xe8, 0xbb, 0xbb, 0xb6, 0x72, 0xdc, 0x28, 0x5c, 0xed, - 0x86, 0x14, 0x1d, 0x85, 0x98, 0x96, 0xee, 0xae, 0xad, 0x20, 0x36, 0x18, 0xbe, 0x0c, 0x26, 0x6d, - 0x4c, 0x1d, 0x82, 0xed, 0xa5, 0xad, 0xb5, 0x7b, 0x42, 0xbf, 0x99, 0xe3, 0x1a, 0x2f, 0x49, 0x8d, - 0x93, 0x2b, 0x59, 0x01, 0xd4, 0x3e, 0x06, 0x6e, 0x83, 0xa1, 0x60, 0xe7, 0x1d, 0x5c, 0x8d, 0xa8, - 0xd9, 0x37, 0xd7, 0x37, 0x3f, 0xb2, 0xf8, 0x4c, 0x29, 0x5d, 0x3c, 0x65, 0x02, 0x5f, 0x31, 0x39, - 0xd9, 0x12, 0xb2, 0x0e, 0x56, 0x93, 0x45, 0x2b, 0x8f, 0x4b, 0xb4, 0xa1, 0x4d, 0xa1, 0x05, 0x25, - 0xea, 0x8a, 0xbf, 0xca, 0x01, 0xa8, 0x4f, 0x9e, 0x86, 0x81, 0x4f, 0x71, 0x4f, 0x66, 0x4f, 0xc1, - 0x44, 0x95, 0x6b, 0x8e, 0xb0, 0x2d, 0x71, 0xcd, 0xdc, 0x49, 0xac, 0x37, 0x25, 0xfe, 0xc4, 0x72, - 0x46, 0x1d, 0x6a, 0x03, 0x80, 0x77, 0xc0, 0x20, 0xc1, 0x34, 0x76, 0x23, 0xb3, 0x6f, 0xce, 0x98, - 0x1f, 0x59, 0xbc, 0xde, 0x15, 0x8a, 0x87, 0x36, 0x0b, 0xbe, 0x52, 0xfd, 0xb9, 0x52, 0x25, 0xb2, - 0xa2, 0x98, 0x96, 0xcf, 0x4b, 0xa4, 0x41, 0xc4, 0x75, 0x20, 0xa9, 0xab, 0xf8, 0x5f, 0x03, 0x4c, - 0xe8, 0x5e, 0xaa, 0x3b, 0xf8, 0x00, 0x12, 0x30, 0x44, 0x44, 0xb0, 0x70, 0x3f, 0x8d, 0x2c, 0xde, - 0x2a, 0x9d, 0x74, 0x47, 0x95, 0xda, 0xe2, 0xaf, 0x3c, 0xc2, 0x96, 0x4b, 0xfe, 0x41, 0x09, 0x10, - 0xac, 0x83, 0x3c, 0x91, 0x6b, 0xc4, 0x03, 0x69, 0x64, 0x71, 0xbd, 0x37, 0xa0, 0x42, 0x67, 0x79, - 0xb4, 0xd9, 0x28, 0xe4, 0x93, 0x7f, 0x48, 0x61, 0x15, 0x7f, 0x91, 0x03, 0xb3, 0xcb, 0x31, 0x8d, - 0x02, 0x0f, 0x61, 0x1a, 0xc4, 0xa4, 0x8a, 0x97, 0x03, 0x37, 0xf6, 0xfc, 0x15, 0xbc, 0xeb, 0xf8, - 0x4e, 0xc4, 0x62, 0x74, 0x0e, 0xf4, 0xfb, 0x96, 0x87, 0x65, 0xcc, 0x8c, 0x4a, 0x4f, 0xf6, 0xdf, - 0xb6, 0x3c, 0x8c, 0x38, 0x87, 0x49, 0xb0, 0x10, 0x91, 0x3b, 0x40, 0x49, 0xdc, 0x39, 0x0a, 0x31, - 0xe2, 0x1c, 0x78, 0x0d, 0x0c, 0xee, 0x06, 0xc4, 0xb3, 0xc4, 0xea, 0x0d, 0xa7, 0xeb, 0x71, 0x93, - 0x53, 0x91, 0xe4, 0xc2, 0x17, 0xc0, 0x88, 0x8d, 0x69, 0x95, 0x38, 0x21, 0x83, 0x36, 0xfb, 0xb9, - 0xf0, 0x05, 0x29, 0x3c, 0xb2, 0x92, 0xb2, 0x90, 0x2e, 0x07, 0xaf, 0x83, 0x7c, 0x48, 0x9c, 0x80, - 0x38, 0xd1, 0x91, 0x39, 0x30, 0x67, 0xcc, 0x0f, 0x94, 0x27, 0xe4, 0x98, 0xfc, 0x96, 0xa4, 0x23, - 0x25, 0xc1, 0xa4, 0xdf, 0xa1, 0x81, 0xbf, 0x65, 0x45, 0x7b, 0xe6, 0x20, 0x47, 0x50, 0xd2, 0xaf, - 0x56, 0x36, 0x6f, 0x33, 0x3a, 0x52, 0x12, 0xc5, 0x3f, 0x1b, 0xc0, 0xcc, 0x7a, 0x28, 0x71, 0x2f, - 0xbc, 0x09, 0xf2, 0x34, 0x62, 0x39, 0xa7, 0x76, 0x24, 0xfd, 0xf3, 0x54, 0xa2, 0xaa, 0x22, 0xe9, - 0xc7, 0x8d, 0xc2, 0x74, 0x3a, 0x22, 0xa1, 0x72, 0xdf, 0xa8, 0xb1, 0x2c, 0xe4, 0x0e, 0xf0, 0xce, - 0x5e, 0x10, 0xec, 0xcb, 0xd5, 0x3f, 0x45, 0xc8, 0xbd, 0x26, 0x14, 0xa5, 0x98, 0x22, 0xe4, 0x24, - 0x19, 0x25, 0x40, 0xc5, 0xff, 0xe4, 0xb2, 0x13, 0xd3, 0x16, 0xfd, 0x6d, 0x90, 0x67, 0x5b, 0xc8, - 0xb6, 0x22, 0x4b, 0x6e, 0x82, 0x67, 0x1f, 0x6d, 0xc3, 0x89, 0xfd, 0xba, 0x81, 0x23, 0xab, 0x0c, - 0xa5, 0x2b, 0x40, 0x4a, 0x43, 0x4a, 0x2b, 0x3c, 0x04, 0xfd, 0x34, 0xc4, 0x55, 0x39, 0xdf, 0x7b, - 0xa7, 0x88, 0xf6, 0x2e, 0x73, 0xa8, 0x84, 0xb8, 0x9a, 0x06, 0x23, 0xfb, 0x87, 0x38, 0x22, 0x7c, - 0xd7, 0x00, 0x83, 0x94, 0xe7, 0x05, 0x99, 0x4b, 0xb6, 0xcf, 0x00, 0x3c, 0x93, 0x77, 0xc4, 0x7f, - 0x24, 0x71, 0x8b, 0xff, 0xcc, 0x81, 0xab, 0xdd, 0x86, 0x2e, 0x07, 0xbe, 0x2d, 0x16, 0x61, 0x4d, - 0xee, 0x2b, 0x11, 0x59, 0x2f, 0xe8, 0xfb, 0xea, 0xb8, 0x51, 0x78, 0xe2, 0xa1, 0x0a, 0xb4, 0x0d, - 0xf8, 0x55, 0x35, 0x65, 0xb1, 0x49, 0xaf, 0xb6, 0x1a, 0x76, 0xdc, 0x28, 0x8c, 0xab, 0x61, 0xad, - 0xb6, 0xc2, 0x3a, 0x80, 0xae, 0x45, 0xa3, 0x3b, 0xc4, 0xf2, 0xa9, 0x50, 0xeb, 0x78, 0x58, 0x7a, - 0xee, 0xa9, 0x47, 0x0b, 0x0a, 0x36, 0xa2, 0x3c, 0x23, 0x21, 0xe1, 0x7a, 0x9b, 0x36, 0xd4, 0x01, - 0x81, 0xe5, 0x0c, 0x82, 0x2d, 0xaa, 0xd2, 0x80, 0x96, 0xc3, 0x19, 0x15, 0x49, 0x2e, 0x7c, 0x12, - 0x0c, 0x79, 0x98, 0x52, 0xab, 0x86, 0xf9, 0xde, 0x1f, 0x4e, 0x0f, 0xc5, 0x0d, 0x41, 0x46, 0x09, - 0xbf, 0xf8, 0x2f, 0x03, 0x5c, 0xee, 0xe6, 0xb5, 0x75, 0x87, 0x46, 0xf0, 0x9b, 0x6d, 0x61, 0x5f, - 0x7a, 0xb4, 0x19, 0xb2, 0xd1, 0x3c, 0xe8, 0x55, 0x2a, 0x49, 0x28, 0x5a, 0xc8, 0x1f, 0x80, 0x01, - 0x27, 0xc2, 0x5e, 0x72, 0x5a, 0xa2, 0xde, 0x87, 0x5d, 0x79, 0x4c, 0xc2, 0x0f, 0xac, 0x31, 0x20, - 0x24, 0xf0, 0x8a, 0x1f, 0xe5, 0xc0, 0x95, 0x6e, 0x43, 0x58, 0x1e, 0xa7, 0xcc, 0xd9, 0xa1, 0x1b, - 0x13, 0xcb, 0x95, 0xc1, 0xa6, 0x9c, 0xbd, 0xc5, 0xa9, 0x48, 0x72, 0x59, 0xee, 0xa4, 0x8e, 0x5f, - 0x8b, 0x5d, 0x8b, 0xc8, 0x48, 0x52, 0x13, 0xae, 0x48, 0x3a, 0x52, 0x12, 0xb0, 0x04, 0x00, 0xdd, - 0x0b, 0x48, 0xc4, 0x31, 0x78, 0x85, 0x33, 0x5c, 0x3e, 0xcf, 0x32, 0x42, 0x45, 0x51, 0x91, 0x26, - 0xc1, 0x0e, 0x92, 0x7d, 0xc7, 0xb7, 0xe5, 0x82, 0xab, 0xbd, 0x7b, 0xcb, 0xf1, 0x6d, 0xc4, 0x39, - 0x0c, 0xdf, 0x75, 0x68, 0xc4, 0x28, 0x72, 0xb5, 0x5b, 0x1c, 0xce, 0x25, 0x95, 0x04, 0xc3, 0xaf, - 0xb2, 0x04, 0x1b, 0x10, 0x07, 0x53, 0x73, 0x30, 0xc5, 0x5f, 0x56, 0x54, 0xa4, 0x49, 0x14, 0xff, - 0xd2, 0xdf, 0x3d, 0x3e, 0x58, 0x02, 0x81, 0x8f, 0x83, 0x81, 0x1a, 0x09, 0xe2, 0x50, 0x7a, 0x49, - 0x79, 0xfb, 0x65, 0x46, 0x44, 0x82, 0x07, 0xbf, 0x0d, 0x06, 0x7c, 0x39, 0x61, 0x16, 0x41, 0xaf, - 0xf5, 0x7e, 0x99, 0xb9, 0xb7, 0x52, 0x74, 0xe1, 0x48, 0x01, 0x0a, 0x9f, 0x07, 0x03, 0xb4, 0x1a, - 0x84, 0x58, 0x3a, 0x71, 0x36, 0x11, 0xaa, 0x30, 0xe2, 0x71, 0xa3, 0x30, 0x96, 0xa8, 0xe3, 0x04, - 0x24, 0x84, 0xe1, 0xf7, 0x0d, 0x90, 0x97, 0xc7, 0x05, 0x35, 0x87, 0x78, 0x78, 0xbe, 0xde, 0x7b, - 0xbb, 0x65, 0xd9, 0x9b, 0xae, 0x99, 0x24, 0x50, 0xa4, 0xc0, 0xe1, 0x77, 0x0d, 0x00, 0xaa, 0xea, - 0xec, 0x32, 0x87, 0xb9, 0x0f, 0x7b, 0xb6, 0x55, 0xb4, 0x53, 0x51, 0x04, 0x42, 0x5a, 0x2a, 0x69, - 0xa8, 0xb0, 0x02, 0xa6, 0x42, 0x82, 0xb9, 0xee, 0xbb, 0xfe, 0xbe, 0x1f, 0x1c, 0xf8, 0x37, 0x1d, - 0xec, 0xda, 0xd4, 0x04, 0x73, 0xc6, 0x7c, 0xbe, 0x7c, 0x45, 0xda, 0x3f, 0xb5, 0xd5, 0x49, 0x08, - 0x75, 0x1e, 0x5b, 0x7c, 0xaf, 0x2f, 0x5b, 0x6b, 0x65, 0xcf, 0x0b, 0xf8, 0x81, 0x98, 0xbc, 0xc8, - 0xc3, 0xd4, 0x34, 0xf8, 0x42, 0xbc, 0xd9, 0xfb, 0x85, 0x50, 0xb9, 0x3e, 0x3d, 0xa4, 0x15, 0x89, - 0x22, 0xcd, 0x04, 0xf8, 0x53, 0x03, 0x8c, 0x59, 0xd5, 0x2a, 0x0e, 0x23, 0x6c, 0x8b, 0x6d, 0x9c, - 0x3b, 0xdb, 0xa8, 0x9e, 0x92, 0x06, 0x8d, 0x2d, 0xe9, 0xa8, 0xa8, 0xd5, 0x08, 0xf8, 0x12, 0x38, - 0x4f, 0xa3, 0x80, 0x60, 0x3b, 0x89, 0x20, 0x99, 0x5d, 0x60, 0xb3, 0x51, 0x38, 0x5f, 0x69, 0xe1, - 0xa0, 0x8c, 0x64, 0xf1, 0x93, 0x01, 0x50, 0x78, 0x48, 0x84, 0x3e, 0x42, 0xd1, 0x7b, 0x0d, 0x0c, - 0xf2, 0x99, 0xda, 0xdc, 0x21, 0x79, 0xed, 0xa8, 0xe7, 0x54, 0x24, 0xb9, 0xec, 0x78, 0x62, 0xf8, - 0xec, 0x78, 0xea, 0xe3, 0x82, 0xea, 0x78, 0xaa, 0x08, 0x32, 0x4a, 0xf8, 0x70, 0x11, 0x00, 0x1b, - 0x87, 0x04, 0xb3, 0x8c, 0x64, 0x9b, 0x43, 0x5c, 0x5a, 0xad, 0xcf, 0x8a, 0xe2, 0x20, 0x4d, 0x0a, - 0xde, 0x04, 0x30, 0xf9, 0xe7, 0x04, 0xfe, 0x6b, 0x16, 0xf1, 0x1d, 0xbf, 0x66, 0xe6, 0xb9, 0xd9, - 0xd3, 0xec, 0xb4, 0x5d, 0x69, 0xe3, 0xa2, 0x0e, 0x23, 0x60, 0x1d, 0x0c, 0x8a, 0x6b, 0x34, 0xcf, - 0x1b, 0x3d, 0xdc, 0x71, 0xf7, 0x2c, 0xd7, 0xb1, 0x39, 0x54, 0x19, 0x70, 0xf7, 0x70, 0x14, 0x24, - 0xd1, 0xe0, 0xfb, 0x06, 0x18, 0xa5, 0xf1, 0x0e, 0x91, 0xd2, 0x94, 0x67, 0xf5, 0x91, 0xc5, 0x3b, - 0xbd, 0x82, 0xaf, 0x68, 0xba, 0xcb, 0x13, 0xcd, 0x46, 0x61, 0x54, 0xa7, 0xa0, 0x16, 0x6c, 0xf8, - 0x07, 0x03, 0x98, 0x96, 0x2d, 0x42, 0xdf, 0x72, 0xb7, 0x88, 0xe3, 0x47, 0x98, 0x88, 0x0b, 0x91, - 0x38, 0x3e, 0x7a, 0x58, 0x2b, 0x66, 0xef, 0x59, 0xe5, 0x39, 0xb9, 0xd2, 0xe6, 0x52, 0x17, 0x0b, - 0x50, 0x57, 0xdb, 0x8a, 0xff, 0x36, 0xb2, 0xa9, 0x45, 0x9b, 0x65, 0xa5, 0x6a, 0xb9, 0x18, 0xae, - 0x80, 0x09, 0x56, 0xfd, 0x22, 0x1c, 0xba, 0x4e, 0xd5, 0xa2, 0xfc, 0xf6, 0x23, 0xa2, 0x5b, 0x5d, - 0xc3, 0x2b, 0x19, 0x3e, 0x6a, 0x1b, 0x01, 0x5f, 0x05, 0x50, 0x94, 0x85, 0x2d, 0x7a, 0x44, 0x25, - 0xa0, 0x0a, 0xbc, 0x4a, 0x9b, 0x04, 0xea, 0x30, 0x0a, 0x2e, 0x83, 0x49, 0xd7, 0xda, 0xc1, 0x6e, - 0x05, 0xbb, 0xb8, 0x1a, 0x05, 0x84, 0xab, 0x12, 0xf7, 0xc3, 0xa9, 0x66, 0xa3, 0x30, 0xb9, 0x9e, - 0x65, 0xa2, 0x76, 0xf9, 0xe2, 0xd5, 0xec, 0x5e, 0xd6, 0x27, 0x2e, 0x8a, 0xed, 0x0f, 0x73, 0x60, - 0xa6, 0x7b, 0x50, 0xc0, 0xef, 0xa8, 0xd2, 0x58, 0x54, 0x7c, 0xaf, 0x9f, 0x41, 0xe8, 0xc9, 0xeb, - 0x00, 0x68, 0xbf, 0x0a, 0xc0, 0x23, 0x76, 0x5e, 0x5b, 0x6e, 0x72, 0xed, 0xdf, 0x3e, 0x0b, 0x74, - 0xa6, 0xbf, 0x3c, 0x2c, 0xaa, 0x00, 0xcb, 0xe5, 0x87, 0xbe, 0xe5, 0xe2, 0xe2, 0x47, 0x6d, 0x57, - 0xdb, 0x74, 0xb3, 0xc2, 0x1f, 0x18, 0x60, 0x3c, 0x08, 0xb1, 0xbf, 0xb4, 0xb5, 0x76, 0xef, 0xcb, - 0x62, 0xd3, 0x4a, 0x07, 0xad, 0x9d, 0xdc, 0x44, 0x76, 0xbf, 0x16, 0xba, 0xb6, 0x48, 0x10, 0xd2, - 0xf2, 0x85, 0x66, 0xa3, 0x30, 0xbe, 0xd9, 0x8a, 0x82, 0xb2, 0xb0, 0x45, 0x0f, 0x4c, 0xad, 0x1e, - 0x46, 0x98, 0xf8, 0x96, 0xbb, 0x12, 0x54, 0x63, 0x0f, 0xfb, 0x91, 0xb0, 0x31, 0xd3, 0x2e, 0x30, - 0x1e, 0xb1, 0x5d, 0x70, 0x05, 0xf4, 0xc5, 0xc4, 0x95, 0x51, 0x3b, 0xa2, 0x9a, 0x60, 0x68, 0x1d, - 0x31, 0x7a, 0xf1, 0x2a, 0xe8, 0x67, 0x76, 0xc2, 0x4b, 0xa0, 0x8f, 0x58, 0x07, 0x5c, 0xeb, 0x68, - 0x79, 0x88, 0x89, 0x20, 0xeb, 0x00, 0x31, 0x5a, 0xf1, 0xef, 0x73, 0x60, 0x3c, 0x33, 0x17, 0x38, - 0x03, 0x72, 0xaa, 0xb3, 0x06, 0xa4, 0xd2, 0xdc, 0xda, 0x0a, 0xca, 0x39, 0x36, 0x7c, 0x51, 0x65, - 0x57, 0x01, 0x5a, 0x50, 0x87, 0x05, 0xa7, 0xb2, 0xb2, 0x2c, 0x55, 0xc7, 0x0c, 0x49, 0xd2, 0x23, - 0xb3, 0x01, 0xef, 0xca, 0x5d, 0x21, 0x6c, 0xc0, 0xbb, 0x88, 0xd1, 0x4e, 0xda, 0x2b, 0x49, 0x9a, - 0x35, 0x03, 0x8f, 0xd0, 0xac, 0x19, 0x7c, 0x60, 0xb3, 0xe6, 0x71, 0x30, 0x10, 0x39, 0x91, 0x8b, - 0xf9, 0x49, 0xa5, 0x15, 0xc3, 0x77, 0x18, 0x11, 0x09, 0x1e, 0xc4, 0x60, 0xc8, 0xc6, 0xbb, 0x56, - 0xec, 0x46, 0xfc, 0x50, 0x1a, 0x59, 0xfc, 0xfa, 0xe9, 0xa2, 0x47, 0x34, 0x33, 0x56, 0x84, 0x4a, - 0x94, 0xe8, 0x86, 0x4f, 0x80, 0x21, 0xcf, 0x3a, 0x74, 0xbc, 0xd8, 0xe3, 0x15, 0xa3, 0x21, 0xc4, - 0x36, 0x04, 0x09, 0x25, 0x3c, 0x96, 0x04, 0xf1, 0x61, 0xd5, 0x8d, 0xa9, 0x53, 0xc7, 0x92, 0x29, - 0x4b, 0x3a, 0x95, 0x04, 0x57, 0x33, 0x7c, 0xd4, 0x36, 0x82, 0x83, 0x39, 0x3e, 0x1f, 0x3c, 0xa2, - 0x81, 0x09, 0x12, 0x4a, 0x78, 0xad, 0x60, 0x52, 0x7e, 0xb4, 0x1b, 0x98, 0x1c, 0xdc, 0x36, 0x02, - 0x3e, 0x0d, 0x86, 0x3d, 0xeb, 0x70, 0x1d, 0xfb, 0xb5, 0x68, 0xcf, 0x1c, 0x9b, 0x33, 0xe6, 0xfb, - 0xca, 0x63, 0xcd, 0x46, 0x61, 0x78, 0x23, 0x21, 0xa2, 0x94, 0xcf, 0x85, 0x1d, 0x5f, 0x0a, 0x9f, - 0xd7, 0x84, 0x13, 0x22, 0x4a, 0xf9, 0xac, 0x32, 0x09, 0xad, 0x88, 0xed, 0x2b, 0x73, 0xbc, 0xf5, - 0xe2, 0xbc, 0x25, 0xc8, 0x28, 0xe1, 0xc3, 0x79, 0x90, 0xf7, 0xac, 0x43, 0x7e, 0xa7, 0x34, 0x27, - 0xb8, 0x5a, 0xde, 0x50, 0xdc, 0x90, 0x34, 0xa4, 0xb8, 0x5c, 0xd2, 0xf1, 0x85, 0xe4, 0xa4, 0x26, - 0x29, 0x69, 0x48, 0x71, 0x59, 0xfc, 0xc6, 0xbe, 0x73, 0x3f, 0xc6, 0x42, 0x18, 0x72, 0xcf, 0xa8, - 0xf8, 0xbd, 0x9b, 0xb2, 0x90, 0x2e, 0xc7, 0xee, 0x74, 0x5e, 0xec, 0x46, 0x4e, 0xe8, 0xe2, 0xcd, - 0x5d, 0xf3, 0x02, 0xf7, 0x3f, 0x2f, 0xe5, 0x37, 0x14, 0x15, 0x69, 0x12, 0xf0, 0x6d, 0xd0, 0x8f, - 0xfd, 0xd8, 0x33, 0x2f, 0xf2, 0xe3, 0xfb, 0xb4, 0xd1, 0xa7, 0xf6, 0xcb, 0xaa, 0x1f, 0x7b, 0x88, - 0x6b, 0x86, 0x2f, 0x82, 0x31, 0xcf, 0x3a, 0x64, 0x49, 0x00, 0x93, 0x88, 0x5d, 0x34, 0xa7, 0xf8, - 0xbc, 0x27, 0x59, 0x11, 0xbb, 0xa1, 0x33, 0x50, 0xab, 0x1c, 0x1f, 0xe8, 0xf8, 0xda, 0xc0, 0x69, - 0x6d, 0xa0, 0xce, 0x40, 0xad, 0x72, 0xcc, 0xc9, 0x04, 0xdf, 0x8f, 0x1d, 0x82, 0x6d, 0xf3, 0xff, - 0x78, 0xdd, 0x2b, 0xfb, 0xbb, 0x82, 0x86, 0x14, 0x17, 0xde, 0x4f, 0x5a, 0x0e, 0x26, 0xdf, 0x7c, - 0x5b, 0x3d, 0x4b, 0xdd, 0x9b, 0x64, 0x89, 0x10, 0xeb, 0x48, 0x9c, 0x2a, 0x7a, 0xb3, 0x01, 0xfa, - 0x60, 0xc0, 0x72, 0xdd, 0xcd, 0x5d, 0xf3, 0x12, 0xf7, 0x78, 0x0f, 0x4f, 0x0b, 0x95, 0x61, 0x96, - 0x98, 0x7e, 0x24, 0x60, 0x18, 0x5e, 0xe0, 0xb3, 0x58, 0x98, 0x39, 0x33, 0xbc, 0x4d, 0xa6, 0x1f, - 0x09, 0x18, 0x3e, 0x3f, 0xff, 0x68, 0x73, 0xd7, 0x7c, 0xec, 0xec, 0xe6, 0xc7, 0xf4, 0x23, 0x01, - 0x03, 0x6d, 0xd0, 0xe7, 0x07, 0x91, 0x79, 0xb9, 0xd7, 0x67, 0x2f, 0x3f, 0x4d, 0x6e, 0x07, 0x11, - 0x62, 0xea, 0xe1, 0x8f, 0x0c, 0x00, 0xc2, 0x34, 0x12, 0xaf, 0x9c, 0xb6, 0x05, 0x90, 0x41, 0x2b, - 0xa5, 0xd1, 0xbb, 0xea, 0x47, 0xe4, 0x28, 0xbd, 0xd7, 0x68, 0x51, 0xae, 0x19, 0x00, 0x7f, 0x6e, - 0x80, 0x8b, 0x7a, 0xb9, 0xab, 0x2c, 0x9b, 0xe5, 0x7e, 0xd8, 0xec, 0x61, 0x20, 0x97, 0x83, 0xc0, - 0x2d, 0x9b, 0xcd, 0x46, 0xe1, 0xe2, 0x52, 0x07, 0x40, 0xd4, 0xd1, 0x0c, 0xf8, 0x1b, 0x03, 0x4c, - 0xca, 0xec, 0xa8, 0x19, 0x57, 0xe0, 0x6e, 0x7b, 0xbb, 0x87, 0x6e, 0xcb, 0x42, 0x08, 0xef, 0xa9, - 0x57, 0xc6, 0x36, 0x3e, 0x6a, 0xb7, 0x0a, 0xfe, 0xde, 0x00, 0xa3, 0x36, 0x0e, 0xb1, 0x6f, 0x63, - 0xbf, 0xca, 0xcc, 0x9c, 0x3b, 0x6d, 0x5f, 0x21, 0x6b, 0xe6, 0x8a, 0xa6, 0x5d, 0x58, 0x58, 0x92, - 0x16, 0x8e, 0xea, 0xac, 0xe3, 0x46, 0x61, 0x3a, 0x1d, 0xaa, 0x73, 0x50, 0x8b, 0x81, 0xf0, 0xc7, - 0x06, 0x18, 0x4f, 0xdd, 0x2e, 0x0e, 0x88, 0xab, 0x67, 0xb3, 0xf0, 0xbc, 0x04, 0x5d, 0x6a, 0xc5, - 0x42, 0x59, 0x70, 0xf8, 0x5b, 0x83, 0x55, 0x5b, 0xc9, 0x5d, 0x8d, 0x9a, 0x45, 0xee, 0xc1, 0x37, - 0x7a, 0xe9, 0x41, 0xa5, 0x5c, 0x38, 0xf0, 0x7a, 0x5a, 0xc9, 0x29, 0xce, 0x71, 0xa3, 0x30, 0xa5, - 0xfb, 0x4f, 0x31, 0x90, 0x6e, 0x1c, 0x7c, 0xcf, 0x00, 0xa3, 0x38, 0x2d, 0x98, 0xa9, 0xf9, 0xf8, - 0x69, 0x5d, 0xd7, 0xb1, 0xfc, 0x16, 0xd7, 0x69, 0x8d, 0x45, 0x51, 0x0b, 0x2c, 0xab, 0xfd, 0xf0, - 0xa1, 0xe5, 0x85, 0x2e, 0x36, 0xff, 0xbf, 0x77, 0xb5, 0xdf, 0xaa, 0x50, 0x89, 0x12, 0xdd, 0xf0, - 0x3a, 0xc8, 0xfb, 0xb1, 0xeb, 0x5a, 0x3b, 0x2e, 0x36, 0x9f, 0xe0, 0x55, 0x84, 0xea, 0x2f, 0xde, - 0x96, 0x74, 0xa4, 0x24, 0xe0, 0x2e, 0x98, 0x3b, 0xbc, 0xa5, 0x3e, 0xbe, 0xe8, 0xd8, 0xc0, 0x33, - 0xaf, 0x71, 0x2d, 0x33, 0xcd, 0x46, 0x61, 0x7a, 0xbb, 0x73, 0x8b, 0xef, 0xa1, 0x3a, 0xe0, 0x9b, - 0xe0, 0x31, 0x4d, 0x66, 0xd5, 0xdb, 0xc1, 0xb6, 0x8d, 0xed, 0xe4, 0xa2, 0x65, 0x7e, 0x89, 0x43, - 0xa8, 0x7d, 0xbc, 0x9d, 0x15, 0x40, 0x0f, 0x1a, 0x0d, 0xd7, 0xc1, 0xb4, 0xc6, 0x5e, 0xf3, 0xa3, - 0x4d, 0x52, 0x89, 0x88, 0xe3, 0xd7, 0xcc, 0x79, 0xae, 0xf7, 0x62, 0xb2, 0xfb, 0xb6, 0x35, 0x1e, - 0xea, 0x32, 0x06, 0xbe, 0xd2, 0xa2, 0x8d, 0x3f, 0x5c, 0x58, 0xe1, 0x2d, 0x7c, 0x44, 0xcd, 0x27, - 0x79, 0x71, 0xc1, 0xd7, 0x79, 0x5b, 0xa3, 0xa3, 0x2e, 0xf2, 0xf0, 0x1b, 0xe0, 0x42, 0x86, 0xc3, - 0xee, 0x15, 0xe6, 0x53, 0xe2, 0x82, 0xc0, 0x2a, 0xd1, 0xed, 0x84, 0x88, 0x3a, 0x49, 0xc2, 0xaf, - 0x01, 0xa8, 0x91, 0x37, 0xac, 0x90, 0x8f, 0x7f, 0x5a, 0xdc, 0x55, 0xd8, 0x8a, 0x6e, 0x4b, 0x1a, - 0xea, 0x20, 0x07, 0x3f, 0x34, 0x5a, 0x66, 0x92, 0xde, 0x66, 0xa9, 0x79, 0x9d, 0x6f, 0xd8, 0x57, - 0x4e, 0x1e, 0x80, 0xa9, 0x32, 0x14, 0xbb, 0x58, 0xf3, 0xb0, 0x86, 0x82, 0xba, 0xa0, 0xcf, 0xb0, - 0xcb, 0x74, 0x26, 0x87, 0xc3, 0x09, 0xd0, 0xb7, 0x8f, 0xe5, 0xb3, 0x31, 0x62, 0x3f, 0xe1, 0x5b, - 0x60, 0xa0, 0x6e, 0xb9, 0x71, 0xd2, 0x0a, 0xe8, 0xdd, 0x59, 0x8f, 0x84, 0xde, 0x97, 0x72, 0x37, - 0x8c, 0x99, 0x0f, 0x0c, 0x30, 0xdd, 0xf9, 0x54, 0xf9, 0xa2, 0x2c, 0xfa, 0x99, 0x01, 0x26, 0xdb, - 0x0e, 0x90, 0x0e, 0xc6, 0xb8, 0xad, 0xc6, 0xdc, 0xeb, 0xe1, 0x49, 0x20, 0x36, 0x02, 0xaf, 0x68, - 0x75, 0xcb, 0x7e, 0x68, 0x80, 0x89, 0x6c, 0x62, 0xfe, 0x82, 0xbc, 0x54, 0x7c, 0x3f, 0x07, 0xa6, - 0x3b, 0xd7, 0xe0, 0xd0, 0x53, 0xdd, 0x85, 0x9e, 0x37, 0x68, 0x3a, 0xb5, 0x6c, 0xdf, 0x35, 0xc0, - 0xc8, 0x3b, 0x4a, 0x2e, 0x79, 0xcd, 0xec, 0x65, 0x57, 0x28, 0x39, 0xfa, 0x52, 0x06, 0x45, 0x3a, - 0x64, 0xf1, 0x77, 0x06, 0x98, 0xea, 0x78, 0x9c, 0xc3, 0x6b, 0x60, 0xd0, 0x72, 0xdd, 0xe0, 0x40, - 0x74, 0xf3, 0xb4, 0xb6, 0xfc, 0x12, 0xa7, 0x22, 0xc9, 0xd5, 0x7c, 0x96, 0xfb, 0x1c, 0x7c, 0x56, - 0xfc, 0xa3, 0x01, 0x2e, 0x3f, 0x28, 0xea, 0x3e, 0xef, 0x35, 0x9c, 0x07, 0x79, 0x59, 0x6c, 0x1f, - 0xf1, 0xf5, 0x93, 0xd9, 0x55, 0x66, 0x04, 0xfe, 0xb5, 0x8c, 0xf8, 0x55, 0xfc, 0xa5, 0x01, 0x26, - 0x2a, 0x98, 0xd4, 0x9d, 0x2a, 0x46, 0x78, 0x17, 0x13, 0xec, 0x57, 0x31, 0x5c, 0x00, 0xc3, 0xfc, - 0xb5, 0x31, 0xb4, 0xaa, 0xc9, 0x1b, 0xc9, 0xa4, 0x74, 0xf4, 0xf0, 0xed, 0x84, 0x81, 0x52, 0x19, - 0xf5, 0x9e, 0x92, 0xeb, 0xfa, 0x9e, 0x72, 0x19, 0xf4, 0x87, 0x69, 0x03, 0x38, 0xcf, 0xb8, 0xbc, - 0xe7, 0xcb, 0xa9, 0x9c, 0x1b, 0x90, 0x88, 0x77, 0xb9, 0x06, 0x24, 0x37, 0x20, 0x11, 0xe2, 0xd4, - 0xe2, 0xaf, 0x0d, 0x70, 0xbe, 0x35, 0x3f, 0x33, 0x40, 0x12, 0xbb, 0x6d, 0x0f, 0x38, 0x8c, 0x87, - 0x38, 0x47, 0xff, 0x6e, 0x20, 0xf7, 0xe0, 0xef, 0x06, 0xe0, 0xcb, 0x60, 0x52, 0xfe, 0x5c, 0x3d, - 0x0c, 0x09, 0xa6, 0xfc, 0x65, 0xb2, 0xaf, 0xf5, 0x7b, 0xbf, 0x8d, 0xac, 0x00, 0x6a, 0x1f, 0x53, - 0xfc, 0x93, 0x01, 0x2e, 0x24, 0xdf, 0xe7, 0xb8, 0x0e, 0xf6, 0xa3, 0xe5, 0xc0, 0xdf, 0x75, 0x6a, - 0xf0, 0x92, 0xe8, 0x48, 0x6a, 0x6d, 0xbe, 0xa4, 0x1b, 0x09, 0xef, 0x83, 0x21, 0x2a, 0xdc, 0x2f, - 0x23, 0xe3, 0xd5, 0x93, 0x47, 0x46, 0x76, 0x1d, 0x45, 0x41, 0x95, 0x50, 0x13, 0x1c, 0x16, 0x1c, - 0x55, 0xab, 0x1c, 0xfb, 0xb6, 0xec, 0x4a, 0x8f, 0x8a, 0xe0, 0x58, 0x5e, 0x12, 0x34, 0xa4, 0xb8, - 0xc5, 0x7f, 0x18, 0x60, 0xb2, 0xed, 0x7b, 0x23, 0xf8, 0x3d, 0x03, 0x8c, 0x56, 0xb5, 0xe9, 0xc9, - 0x2d, 0xb6, 0x71, 0xfa, 0x6f, 0x9a, 0x34, 0xa5, 0xa2, 0x2a, 0xd1, 0x29, 0xa8, 0x05, 0x14, 0x6e, - 0x03, 0xb3, 0x9a, 0xf9, 0xb4, 0x2f, 0xf3, 0x58, 0x78, 0xb9, 0xd9, 0x28, 0x98, 0xcb, 0x5d, 0x64, - 0x50, 0xd7, 0xd1, 0xe5, 0x6f, 0x7d, 0xfc, 0xd9, 0xec, 0xb9, 0x4f, 0x3e, 0x9b, 0x3d, 0xf7, 0xe9, - 0x67, 0xb3, 0xe7, 0xde, 0x6d, 0xce, 0x1a, 0x1f, 0x37, 0x67, 0x8d, 0x4f, 0x9a, 0xb3, 0xc6, 0xa7, - 0xcd, 0x59, 0xe3, 0xaf, 0xcd, 0x59, 0xe3, 0x27, 0x7f, 0x9b, 0x3d, 0xf7, 0xc6, 0x8d, 0x93, 0x7e, - 0xd0, 0xfb, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x41, 0x66, 0xfd, 0x82, 0x24, 0x2c, 0x00, 0x00, + // 3111 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0xdd, 0x6f, 0x5c, 0x47, + 0x15, 0xcf, 0x5d, 0x7b, 0xed, 0xf5, 0xd8, 0x89, 0xed, 0x49, 0x6c, 0x6e, 0xdc, 0xc4, 0xeb, 0x6c, + 0x68, 0x70, 0xdb, 0x74, 0xdd, 0x9a, 0x96, 0x86, 0x82, 0x40, 0x5e, 0xdb, 0x69, 0xdd, 0xd8, 0xb1, + 0x35, 0x9b, 0xa4, 0x6e, 0x8b, 0x68, 0xaf, 0xf7, 0x8e, 0xd7, 0xb7, 0xbe, 0x5f, 0x99, 0xb9, 0xd7, + 0x1f, 0x12, 0x48, 0x15, 0xa8, 0x02, 0x2a, 0x41, 0x79, 0xa8, 0xca, 0x13, 0x42, 0x08, 0xf5, 0x01, + 0x1e, 0xe0, 0x0d, 0xfe, 0x85, 0xbe, 0x20, 0xf5, 0x09, 0x55, 0x42, 0x5a, 0xd1, 0xe5, 0x1f, 0x40, + 0x02, 0x84, 0xf0, 0x03, 0x42, 0xf3, 0x71, 0xe7, 0xce, 0xde, 0xdd, 0x4d, 0x22, 0x7b, 0xdd, 0xbe, + 0xed, 0x9e, 0x73, 0xe6, 0xfc, 0xce, 0x9c, 0x39, 0x73, 0xe6, 0xcc, 0xb9, 0x03, 0xac, 0xdd, 0x1b, + 0xb4, 0xec, 0x04, 0x73, 0xbb, 0xf1, 0x16, 0x26, 0x3e, 0x8e, 0x30, 0x9d, 0xdb, 0xc3, 0xbe, 0x1d, + 0x90, 0x39, 0xc9, 0xb0, 0x42, 0x07, 0x1f, 0x44, 0xd8, 0xa7, 0x4e, 0xe0, 0xd3, 0xa7, 0xad, 0xd0, + 0xa1, 0x98, 0xec, 0x61, 0x32, 0x17, 0xee, 0xd6, 0x19, 0x8f, 0xb6, 0x0a, 0xcc, 0xed, 0x3d, 0x3b, + 0x57, 0xc7, 0x3e, 0x26, 0x56, 0x84, 0xed, 0x72, 0x48, 0x82, 0x28, 0x80, 0x37, 0x84, 0xa6, 0x72, + 0x8b, 0xe0, 0x9b, 0x4a, 0x53, 0x39, 0xdc, 0xad, 0x33, 0x1e, 0x6d, 0x15, 0x28, 0xef, 0x3d, 0x3b, + 0xf5, 0x74, 0xdd, 0x89, 0x76, 0xe2, 0xad, 0x72, 0x2d, 0xf0, 0xe6, 0xea, 0x41, 0x3d, 0x98, 0xe3, + 0x0a, 0xb7, 0xe2, 0x6d, 0xfe, 0x8f, 0xff, 0xe1, 0xbf, 0x04, 0xd0, 0xd4, 0x73, 0xa9, 0xc9, 0x9e, + 0x55, 0xdb, 0x71, 0x7c, 0x4c, 0x0e, 0x53, 0x3b, 0x3d, 0x1c, 0x59, 0x1d, 0xcc, 0x9b, 0x9a, 0xeb, + 0x36, 0x8a, 0xc4, 0x7e, 0xe4, 0x78, 0xb8, 0x6d, 0xc0, 0xd7, 0x1e, 0x36, 0x80, 0xd6, 0x76, 0xb0, + 0x67, 0x65, 0xc7, 0x95, 0x8e, 0x0c, 0x30, 0xbe, 0x18, 0xf8, 0x7b, 0x98, 0xb0, 0x09, 0x22, 0x7c, + 0x3f, 0xc6, 0x34, 0x82, 0x15, 0xd0, 0x17, 0x3b, 0xb6, 0x69, 0xcc, 0x18, 0xb3, 0x43, 0x95, 0x67, + 0x3e, 0x6e, 0x14, 0xcf, 0x34, 0x1b, 0xc5, 0xbe, 0xbb, 0x2b, 0x4b, 0x47, 0x8d, 0xe2, 0x95, 0x6e, + 0x48, 0xd1, 0x61, 0x88, 0x69, 0xf9, 0xee, 0xca, 0x12, 0x62, 0x83, 0xe1, 0x4b, 0x60, 0xdc, 0xc6, + 0xd4, 0x21, 0xd8, 0x5e, 0xd8, 0x58, 0xb9, 0x27, 0xf4, 0x9b, 0x39, 0xae, 0xf1, 0xa2, 0xd4, 0x38, + 0xbe, 0x94, 0x15, 0x40, 0xed, 0x63, 0xe0, 0x26, 0x18, 0x0c, 0xb6, 0xde, 0xc6, 0xb5, 0x88, 0x9a, + 0x7d, 0x33, 0x7d, 0xb3, 0xc3, 0xf3, 0x4f, 0x97, 0xd3, 0xc5, 0x53, 0x26, 0xf0, 0x15, 0x93, 0x93, + 0x2d, 0x23, 0x6b, 0x7f, 0x39, 0x59, 0xb4, 0xca, 0xa8, 0x44, 0x1b, 0x5c, 0x17, 0x5a, 0x50, 0xa2, + 0xae, 0xf4, 0x9b, 0x1c, 0x80, 0xfa, 0xe4, 0x69, 0x18, 0xf8, 0x14, 0xf7, 0x64, 0xf6, 0x14, 0x8c, + 0xd5, 0xb8, 0xe6, 0x08, 0xdb, 0x12, 0xd7, 0xcc, 0x1d, 0xc7, 0x7a, 0x53, 0xe2, 0x8f, 0x2d, 0x66, + 0xd4, 0xa1, 0x36, 0x00, 0x78, 0x07, 0x0c, 0x10, 0x4c, 0x63, 0x37, 0x32, 0xfb, 0x66, 0x8c, 0xd9, + 0xe1, 0xf9, 0xeb, 0x5d, 0xa1, 0x78, 0x68, 0xb3, 0xe0, 0x2b, 0xef, 0x3d, 0x5b, 0xae, 0x46, 0x56, + 0x14, 0xd3, 0xca, 0x39, 0x89, 0x34, 0x80, 0xb8, 0x0e, 0x24, 0x75, 0x95, 0xfe, 0x67, 0x80, 0x31, + 0xdd, 0x4b, 0x7b, 0x0e, 0xde, 0x87, 0x04, 0x0c, 0x12, 0x11, 0x2c, 0xdc, 0x4f, 0xc3, 0xf3, 0xb7, + 0xca, 0xc7, 0xdd, 0x51, 0xe5, 0xb6, 0xf8, 0xab, 0x0c, 0xb3, 0xe5, 0x92, 0x7f, 0x50, 0x02, 0x04, + 0xf7, 0x40, 0x81, 0xc8, 0x35, 0xe2, 0x81, 0x34, 0x3c, 0xbf, 0xda, 0x1b, 0x50, 0xa1, 0xb3, 0x32, + 0xd2, 0x6c, 0x14, 0x0b, 0xc9, 0x3f, 0xa4, 0xb0, 0x4a, 0xbf, 0xca, 0x81, 0xe9, 0xc5, 0x98, 0x46, + 0x81, 0x87, 0x30, 0x0d, 0x62, 0x52, 0xc3, 0x8b, 0x81, 0x1b, 0x7b, 0xfe, 0x12, 0xde, 0x76, 0x7c, + 0x27, 0x62, 0x31, 0x3a, 0x03, 0xfa, 0x7d, 0xcb, 0xc3, 0x32, 0x66, 0x46, 0xa4, 0x27, 0xfb, 0x6f, + 0x5b, 0x1e, 0x46, 0x9c, 0xc3, 0x24, 0x58, 0x88, 0xc8, 0x1d, 0xa0, 0x24, 0xee, 0x1c, 0x86, 0x18, + 0x71, 0x0e, 0xbc, 0x06, 0x06, 0xb6, 0x03, 0xe2, 0x59, 0x62, 0xf5, 0x86, 0xd2, 0xf5, 0xb8, 0xc9, + 0xa9, 0x48, 0x72, 0xe1, 0xf3, 0x60, 0xd8, 0xc6, 0xb4, 0x46, 0x9c, 0x90, 0x41, 0x9b, 0xfd, 0x5c, + 0xf8, 0xbc, 0x14, 0x1e, 0x5e, 0x4a, 0x59, 0x48, 0x97, 0x83, 0xd7, 0x41, 0x21, 0x24, 0x4e, 0x40, + 0x9c, 0xe8, 0xd0, 0xcc, 0xcf, 0x18, 0xb3, 0xf9, 0xca, 0x98, 0x1c, 0x53, 0xd8, 0x90, 0x74, 0xa4, + 0x24, 0x98, 0xf4, 0xdb, 0x34, 0xf0, 0x37, 0xac, 0x68, 0xc7, 0x1c, 0xe0, 0x08, 0x4a, 0xfa, 0x95, + 0xea, 0xfa, 0x6d, 0x46, 0x47, 0x4a, 0xa2, 0xf4, 0x17, 0x03, 0x98, 0x59, 0x0f, 0x25, 0xee, 0x85, + 0x37, 0x41, 0x81, 0x46, 0x2c, 0xe7, 0xd4, 0x0f, 0xa5, 0x7f, 0x9e, 0x4c, 0x54, 0x55, 0x25, 0xfd, + 0xa8, 0x51, 0x9c, 0x4c, 0x47, 0x24, 0x54, 0xee, 0x1b, 0x35, 0x96, 0x85, 0xdc, 0x3e, 0xde, 0xda, + 0x09, 0x82, 0x5d, 0xb9, 0xfa, 0x27, 0x08, 0xb9, 0x57, 0x85, 0xa2, 0x14, 0x53, 0x84, 0x9c, 0x24, + 0xa3, 0x04, 0xa8, 0xf4, 0xdf, 0x5c, 0x76, 0x62, 0xda, 0xa2, 0xbf, 0x05, 0x0a, 0x6c, 0x0b, 0xd9, + 0x56, 0x64, 0xc9, 0x4d, 0xf0, 0xcc, 0xa3, 0x6d, 0x38, 0xb1, 0x5f, 0xd7, 0x70, 0x64, 0x55, 0xa0, + 0x74, 0x05, 0x48, 0x69, 0x48, 0x69, 0x85, 0x07, 0xa0, 0x9f, 0x86, 0xb8, 0x26, 0xe7, 0x7b, 0xef, + 0x04, 0xd1, 0xde, 0x65, 0x0e, 0xd5, 0x10, 0xd7, 0xd2, 0x60, 0x64, 0xff, 0x10, 0x47, 0x84, 0xef, + 0x18, 0x60, 0x80, 0xf2, 0xbc, 0x20, 0x73, 0xc9, 0xe6, 0x29, 0x80, 0x67, 0xf2, 0x8e, 0xf8, 0x8f, + 0x24, 0x6e, 0xe9, 0x5f, 0x39, 0x70, 0xa5, 0xdb, 0xd0, 0xc5, 0xc0, 0xb7, 0xc5, 0x22, 0xac, 0xc8, + 0x7d, 0x25, 0x22, 0xeb, 0x79, 0x7d, 0x5f, 0x1d, 0x35, 0x8a, 0x8f, 0x3f, 0x54, 0x81, 0xb6, 0x01, + 0xbf, 0xae, 0xa6, 0x2c, 0x36, 0xe9, 0x95, 0x56, 0xc3, 0x8e, 0x1a, 0xc5, 0x51, 0x35, 0xac, 0xd5, + 0x56, 0xb8, 0x07, 0xa0, 0x6b, 0xd1, 0xe8, 0x0e, 0xb1, 0x7c, 0x2a, 0xd4, 0x3a, 0x1e, 0x96, 0x9e, + 0x7b, 0xf2, 0xd1, 0x82, 0x82, 0x8d, 0xa8, 0x4c, 0x49, 0x48, 0xb8, 0xda, 0xa6, 0x0d, 0x75, 0x40, + 0x60, 0x39, 0x83, 0x60, 0x8b, 0xaa, 0x34, 0xa0, 0xe5, 0x70, 0x46, 0x45, 0x92, 0x0b, 0x9f, 0x00, + 0x83, 0x1e, 0xa6, 0xd4, 0xaa, 0x63, 0xbe, 0xf7, 0x87, 0xd2, 0x43, 0x71, 0x4d, 0x90, 0x51, 0xc2, + 0x2f, 0xfd, 0xdb, 0x00, 0x97, 0xba, 0x79, 0x6d, 0xd5, 0xa1, 0x11, 0xfc, 0x4e, 0x5b, 0xd8, 0x97, + 0x1f, 0x6d, 0x86, 0x6c, 0x34, 0x0f, 0x7a, 0x95, 0x4a, 0x12, 0x8a, 0x16, 0xf2, 0xfb, 0x20, 0xef, + 0x44, 0xd8, 0x4b, 0x4e, 0x4b, 0xd4, 0xfb, 0xb0, 0xab, 0x9c, 0x95, 0xf0, 0xf9, 0x15, 0x06, 0x84, + 0x04, 0x5e, 0xe9, 0xa3, 0x1c, 0xb8, 0xdc, 0x6d, 0x08, 0xcb, 0xe3, 0x94, 0x39, 0x3b, 0x74, 0x63, + 0x62, 0xb9, 0x32, 0xd8, 0x94, 0xb3, 0x37, 0x38, 0x15, 0x49, 0x2e, 0xcb, 0x9d, 0xd4, 0xf1, 0xeb, + 0xb1, 0x6b, 0x11, 0x19, 0x49, 0x6a, 0xc2, 0x55, 0x49, 0x47, 0x4a, 0x02, 0x96, 0x01, 0xa0, 0x3b, + 0x01, 0x89, 0x38, 0x06, 0xaf, 0x70, 0x86, 0x2a, 0xe7, 0x58, 0x46, 0xa8, 0x2a, 0x2a, 0xd2, 0x24, + 0xd8, 0x41, 0xb2, 0xeb, 0xf8, 0xb6, 0x5c, 0x70, 0xb5, 0x77, 0x6f, 0x39, 0xbe, 0x8d, 0x38, 0x87, + 0xe1, 0xbb, 0x0e, 0x8d, 0x18, 0x45, 0xae, 0x76, 0x8b, 0xc3, 0xb9, 0xa4, 0x92, 0x60, 0xf8, 0x35, + 0x96, 0x60, 0x03, 0xe2, 0x60, 0x6a, 0x0e, 0xa4, 0xf8, 0x8b, 0x8a, 0x8a, 0x34, 0x89, 0xd2, 0x5f, + 0xfb, 0xbb, 0xc7, 0x07, 0x4b, 0x20, 0xf0, 0x2a, 0xc8, 0xd7, 0x49, 0x10, 0x87, 0xd2, 0x4b, 0xca, + 0xdb, 0x2f, 0x31, 0x22, 0x12, 0x3c, 0xf8, 0x3d, 0x90, 0xf7, 0xe5, 0x84, 0x59, 0x04, 0xbd, 0xda, + 0xfb, 0x65, 0xe6, 0xde, 0x4a, 0xd1, 0x85, 0x23, 0x05, 0x28, 0x7c, 0x0e, 0xe4, 0x69, 0x2d, 0x08, + 0xb1, 0x74, 0xe2, 0x74, 0x22, 0x54, 0x65, 0xc4, 0xa3, 0x46, 0xf1, 0x6c, 0xa2, 0x8e, 0x13, 0x90, + 0x10, 0x86, 0x3f, 0x32, 0x40, 0x41, 0x1e, 0x17, 0xd4, 0x1c, 0xe4, 0xe1, 0xf9, 0x5a, 0xef, 0xed, + 0x96, 0x65, 0x6f, 0xba, 0x66, 0x92, 0x40, 0x91, 0x02, 0x87, 0x3f, 0x30, 0x00, 0xa8, 0xa9, 0xb3, + 0xcb, 0x1c, 0xe2, 0x3e, 0xec, 0xd9, 0x56, 0xd1, 0x4e, 0x45, 0x11, 0x08, 0x69, 0xa9, 0xa4, 0xa1, + 0xc2, 0x2a, 0x98, 0x08, 0x09, 0xe6, 0xba, 0xef, 0xfa, 0xbb, 0x7e, 0xb0, 0xef, 0xdf, 0x74, 0xb0, + 0x6b, 0x53, 0x13, 0xcc, 0x18, 0xb3, 0x85, 0xca, 0x65, 0x69, 0xff, 0xc4, 0x46, 0x27, 0x21, 0xd4, + 0x79, 0x6c, 0xe9, 0xdd, 0xbe, 0x6c, 0xad, 0x95, 0x3d, 0x2f, 0xe0, 0xfb, 0x62, 0xf2, 0x22, 0x0f, + 0x53, 0xd3, 0xe0, 0x0b, 0xf1, 0x46, 0xef, 0x17, 0x42, 0xe5, 0xfa, 0xf4, 0x90, 0x56, 0x24, 0x8a, + 0x34, 0x13, 0xe0, 0x07, 0x06, 0x38, 0x6b, 0xd5, 0x6a, 0x38, 0x8c, 0xb0, 0x2d, 0xb6, 0x71, 0xee, + 0x74, 0xa3, 0x7a, 0x42, 0x1a, 0x74, 0x76, 0x41, 0x47, 0x45, 0xad, 0x46, 0xc0, 0x17, 0xc1, 0x39, + 0x1a, 0x05, 0x04, 0xdb, 0x49, 0x04, 0xc9, 0xec, 0x02, 0x9b, 0x8d, 0xe2, 0xb9, 0x6a, 0x0b, 0x07, + 0x65, 0x24, 0x4b, 0x9f, 0xe4, 0x41, 0xf1, 0x21, 0x11, 0xfa, 0x08, 0x45, 0xef, 0x35, 0x30, 0xc0, + 0x67, 0x6a, 0x73, 0x87, 0x14, 0xb4, 0xa3, 0x9e, 0x53, 0x91, 0xe4, 0xb2, 0xe3, 0x89, 0xe1, 0xb3, + 0xe3, 0xa9, 0x8f, 0x0b, 0xaa, 0xe3, 0xa9, 0x2a, 0xc8, 0x28, 0xe1, 0xc3, 0x79, 0x00, 0x6c, 0x1c, + 0x12, 0xcc, 0x32, 0x92, 0x6d, 0x0e, 0x72, 0x69, 0xb5, 0x3e, 0x4b, 0x8a, 0x83, 0x34, 0x29, 0x78, + 0x13, 0xc0, 0xe4, 0x9f, 0x13, 0xf8, 0xaf, 0x5a, 0xc4, 0x77, 0xfc, 0xba, 0x59, 0xe0, 0x66, 0x4f, + 0xb2, 0xd3, 0x76, 0xa9, 0x8d, 0x8b, 0x3a, 0x8c, 0x80, 0x7b, 0x60, 0x40, 0x5c, 0xa3, 0x79, 0xde, + 0xe8, 0xe1, 0x8e, 0xbb, 0x67, 0xb9, 0x8e, 0xcd, 0xa1, 0x2a, 0x80, 0xbb, 0x87, 0xa3, 0x20, 0x89, + 0x06, 0xdf, 0x33, 0xc0, 0x08, 0x8d, 0xb7, 0x88, 0x94, 0xa6, 0x3c, 0xab, 0x0f, 0xcf, 0xdf, 0xe9, + 0x15, 0x7c, 0x55, 0xd3, 0x5d, 0x19, 0x6b, 0x36, 0x8a, 0x23, 0x3a, 0x05, 0xb5, 0x60, 0xc3, 0x3f, + 0x1a, 0xc0, 0xb4, 0x6c, 0x11, 0xfa, 0x96, 0xbb, 0x41, 0x1c, 0x3f, 0xc2, 0x44, 0x5c, 0x88, 0xc4, + 0xf1, 0xd1, 0xc3, 0x5a, 0x31, 0x7b, 0xcf, 0xaa, 0xcc, 0xc8, 0x95, 0x36, 0x17, 0xba, 0x58, 0x80, + 0xba, 0xda, 0x56, 0xfa, 0x8f, 0x91, 0x4d, 0x2d, 0xda, 0x2c, 0xab, 0x35, 0xcb, 0xc5, 0x70, 0x09, + 0x8c, 0xb1, 0xea, 0x17, 0xe1, 0xd0, 0x75, 0x6a, 0x16, 0xe5, 0xb7, 0x1f, 0x11, 0xdd, 0xea, 0x1a, + 0x5e, 0xcd, 0xf0, 0x51, 0xdb, 0x08, 0xf8, 0x0a, 0x80, 0xa2, 0x2c, 0x6c, 0xd1, 0x23, 0x2a, 0x01, + 0x55, 0xe0, 0x55, 0xdb, 0x24, 0x50, 0x87, 0x51, 0x70, 0x11, 0x8c, 0xbb, 0xd6, 0x16, 0x76, 0xab, + 0xd8, 0xc5, 0xb5, 0x28, 0x20, 0x5c, 0x95, 0xb8, 0x1f, 0x4e, 0x34, 0x1b, 0xc5, 0xf1, 0xd5, 0x2c, + 0x13, 0xb5, 0xcb, 0x97, 0xae, 0x64, 0xf7, 0xb2, 0x3e, 0x71, 0x51, 0x6c, 0x7f, 0x98, 0x03, 0x53, + 0xdd, 0x83, 0x02, 0x7e, 0x5f, 0x95, 0xc6, 0xa2, 0xe2, 0x7b, 0xed, 0x14, 0x42, 0x4f, 0x5e, 0x07, + 0x40, 0xfb, 0x55, 0x00, 0x1e, 0xb2, 0xf3, 0xda, 0x72, 0x93, 0x6b, 0xff, 0xe6, 0x69, 0xa0, 0x33, + 0xfd, 0x95, 0x21, 0x51, 0x05, 0x58, 0x2e, 0x3f, 0xf4, 0x2d, 0x17, 0x97, 0x3e, 0x6a, 0xbb, 0xda, + 0xa6, 0x9b, 0x15, 0xfe, 0xd8, 0x00, 0xa3, 0x41, 0x88, 0xfd, 0x85, 0x8d, 0x95, 0x7b, 0x5f, 0x15, + 0x9b, 0x56, 0x3a, 0x68, 0xe5, 0xf8, 0x26, 0xb2, 0xfb, 0xb5, 0xd0, 0xb5, 0x41, 0x82, 0x90, 0x56, + 0xce, 0x37, 0x1b, 0xc5, 0xd1, 0xf5, 0x56, 0x14, 0x94, 0x85, 0x2d, 0x79, 0x60, 0x62, 0xf9, 0x20, + 0xc2, 0xc4, 0xb7, 0xdc, 0xa5, 0xa0, 0x16, 0x7b, 0xd8, 0x8f, 0x84, 0x8d, 0x99, 0x76, 0x81, 0xf1, + 0x88, 0xed, 0x82, 0xcb, 0xa0, 0x2f, 0x26, 0xae, 0x8c, 0xda, 0x61, 0xd5, 0x04, 0x43, 0xab, 0x88, + 0xd1, 0x4b, 0x57, 0x40, 0x3f, 0xb3, 0x13, 0x5e, 0x04, 0x7d, 0xc4, 0xda, 0xe7, 0x5a, 0x47, 0x2a, + 0x83, 0x4c, 0x04, 0x59, 0xfb, 0x88, 0xd1, 0x4a, 0xff, 0x98, 0x01, 0xa3, 0x99, 0xb9, 0xc0, 0x29, + 0x90, 0x53, 0x9d, 0x35, 0x20, 0x95, 0xe6, 0x56, 0x96, 0x50, 0xce, 0xb1, 0xe1, 0x0b, 0x2a, 0xbb, + 0x0a, 0xd0, 0xa2, 0x3a, 0x2c, 0x38, 0x95, 0x95, 0x65, 0xa9, 0x3a, 0x66, 0x48, 0x92, 0x1e, 0x99, + 0x0d, 0x78, 0x5b, 0xee, 0x0a, 0x61, 0x03, 0xde, 0x46, 0x8c, 0x76, 0xdc, 0x5e, 0x49, 0xd2, 0xac, + 0xc9, 0x3f, 0x42, 0xb3, 0x66, 0xe0, 0x81, 0xcd, 0x9a, 0xab, 0x20, 0x1f, 0x39, 0x91, 0x8b, 0xf9, + 0x49, 0xa5, 0x15, 0xc3, 0x77, 0x18, 0x11, 0x09, 0x1e, 0xc4, 0x60, 0xd0, 0xc6, 0xdb, 0x56, 0xec, + 0x46, 0xfc, 0x50, 0x1a, 0x9e, 0xff, 0xd6, 0xc9, 0xa2, 0x47, 0x34, 0x33, 0x96, 0x84, 0x4a, 0x94, + 0xe8, 0x86, 0x8f, 0x83, 0x41, 0xcf, 0x3a, 0x70, 0xbc, 0xd8, 0xe3, 0x15, 0xa3, 0x21, 0xc4, 0xd6, + 0x04, 0x09, 0x25, 0x3c, 0x96, 0x04, 0xf1, 0x41, 0xcd, 0x8d, 0xa9, 0xb3, 0x87, 0x25, 0x53, 0x96, + 0x74, 0x2a, 0x09, 0x2e, 0x67, 0xf8, 0xa8, 0x6d, 0x04, 0x07, 0x73, 0x7c, 0x3e, 0x78, 0x58, 0x03, + 0x13, 0x24, 0x94, 0xf0, 0x5a, 0xc1, 0xa4, 0xfc, 0x48, 0x37, 0x30, 0x39, 0xb8, 0x6d, 0x04, 0x7c, + 0x0a, 0x0c, 0x79, 0xd6, 0xc1, 0x2a, 0xf6, 0xeb, 0xd1, 0x8e, 0x79, 0x76, 0xc6, 0x98, 0xed, 0xab, + 0x9c, 0x6d, 0x36, 0x8a, 0x43, 0x6b, 0x09, 0x11, 0xa5, 0x7c, 0x2e, 0xec, 0xf8, 0x52, 0xf8, 0x9c, + 0x26, 0x9c, 0x10, 0x51, 0xca, 0x67, 0x95, 0x49, 0x68, 0x45, 0x6c, 0x5f, 0x99, 0xa3, 0xad, 0x17, + 0xe7, 0x0d, 0x41, 0x46, 0x09, 0x1f, 0xce, 0x82, 0x82, 0x67, 0x1d, 0xf0, 0x3b, 0xa5, 0x39, 0xc6, + 0xd5, 0xf2, 0x86, 0xe2, 0x9a, 0xa4, 0x21, 0xc5, 0xe5, 0x92, 0x8e, 0x2f, 0x24, 0xc7, 0x35, 0x49, + 0x49, 0x43, 0x8a, 0xcb, 0xe2, 0x37, 0xf6, 0x9d, 0xfb, 0x31, 0x16, 0xc2, 0x90, 0x7b, 0x46, 0xc5, + 0xef, 0xdd, 0x94, 0x85, 0x74, 0x39, 0x76, 0xa7, 0xf3, 0x62, 0x37, 0x72, 0x42, 0x17, 0xaf, 0x6f, + 0x9b, 0xe7, 0xb9, 0xff, 0x79, 0x29, 0xbf, 0xa6, 0xa8, 0x48, 0x93, 0x80, 0x6f, 0x81, 0x7e, 0xec, + 0xc7, 0x9e, 0x79, 0x81, 0x1f, 0xdf, 0x27, 0x8d, 0x3e, 0xb5, 0x5f, 0x96, 0xfd, 0xd8, 0x43, 0x5c, + 0x33, 0x7c, 0x01, 0x9c, 0xf5, 0xac, 0x03, 0x96, 0x04, 0x30, 0x89, 0xd8, 0x45, 0x73, 0x82, 0xcf, + 0x7b, 0x9c, 0x15, 0xb1, 0x6b, 0x3a, 0x03, 0xb5, 0xca, 0xf1, 0x81, 0x8e, 0xaf, 0x0d, 0x9c, 0xd4, + 0x06, 0xea, 0x0c, 0xd4, 0x2a, 0xc7, 0x9c, 0x4c, 0xf0, 0xfd, 0xd8, 0x21, 0xd8, 0x36, 0xbf, 0xc4, + 0xeb, 0x5e, 0xd9, 0xdf, 0x15, 0x34, 0xa4, 0xb8, 0xf0, 0x7e, 0xd2, 0x72, 0x30, 0xf9, 0xe6, 0xdb, + 0xe8, 0x59, 0xea, 0x5e, 0x27, 0x0b, 0x84, 0x58, 0x87, 0xe2, 0x54, 0xd1, 0x9b, 0x0d, 0xd0, 0x07, + 0x79, 0xcb, 0x75, 0xd7, 0xb7, 0xcd, 0x8b, 0xdc, 0xe3, 0x3d, 0x3c, 0x2d, 0x54, 0x86, 0x59, 0x60, + 0xfa, 0x91, 0x80, 0x61, 0x78, 0x81, 0xcf, 0x62, 0x61, 0xea, 0xd4, 0xf0, 0xd6, 0x99, 0x7e, 0x24, + 0x60, 0xf8, 0xfc, 0xfc, 0xc3, 0xf5, 0x6d, 0xf3, 0xb1, 0xd3, 0x9b, 0x1f, 0xd3, 0x8f, 0x04, 0x0c, + 0xb4, 0x41, 0x9f, 0x1f, 0x44, 0xe6, 0xa5, 0x5e, 0x9f, 0xbd, 0xfc, 0x34, 0xb9, 0x1d, 0x44, 0x88, + 0xa9, 0x87, 0x3f, 0x35, 0x00, 0x08, 0xd3, 0x48, 0xbc, 0x7c, 0xd2, 0x16, 0x40, 0x06, 0xad, 0x9c, + 0x46, 0xef, 0xb2, 0x1f, 0x91, 0xc3, 0xf4, 0x5e, 0xa3, 0x45, 0xb9, 0x66, 0x00, 0xfc, 0xa5, 0x01, + 0x2e, 0xe8, 0xe5, 0xae, 0xb2, 0x6c, 0x9a, 0xfb, 0x61, 0xbd, 0x87, 0x81, 0x5c, 0x09, 0x02, 0xb7, + 0x62, 0x36, 0x1b, 0xc5, 0x0b, 0x0b, 0x1d, 0x00, 0x51, 0x47, 0x33, 0xe0, 0x6f, 0x0d, 0x30, 0x2e, + 0xb3, 0xa3, 0x66, 0x5c, 0x91, 0xbb, 0xed, 0xad, 0x1e, 0xba, 0x2d, 0x0b, 0x21, 0xbc, 0xa7, 0xbe, + 0x32, 0xb6, 0xf1, 0x51, 0xbb, 0x55, 0xf0, 0x0f, 0x06, 0x18, 0xb1, 0x71, 0x88, 0x7d, 0x1b, 0xfb, + 0x35, 0x66, 0xe6, 0xcc, 0x49, 0xfb, 0x0a, 0x59, 0x33, 0x97, 0x34, 0xed, 0xc2, 0xc2, 0xb2, 0xb4, + 0x70, 0x44, 0x67, 0x1d, 0x35, 0x8a, 0x93, 0xe9, 0x50, 0x9d, 0x83, 0x5a, 0x0c, 0x84, 0x3f, 0x33, + 0xc0, 0x68, 0xea, 0x76, 0x71, 0x40, 0x5c, 0x39, 0x9d, 0x85, 0xe7, 0x25, 0xe8, 0x42, 0x2b, 0x16, + 0xca, 0x82, 0xc3, 0xdf, 0x19, 0xac, 0xda, 0x4a, 0xee, 0x6a, 0xd4, 0x2c, 0x71, 0x0f, 0xbe, 0xde, + 0x4b, 0x0f, 0x2a, 0xe5, 0xc2, 0x81, 0xd7, 0xd3, 0x4a, 0x4e, 0x71, 0x8e, 0x1a, 0xc5, 0x09, 0xdd, + 0x7f, 0x8a, 0x81, 0x74, 0xe3, 0xe0, 0xbb, 0x06, 0x18, 0xc1, 0x69, 0xc1, 0x4c, 0xcd, 0xab, 0x27, + 0x75, 0x5d, 0xc7, 0xf2, 0x5b, 0x5c, 0xa7, 0x35, 0x16, 0x45, 0x2d, 0xb0, 0xac, 0xf6, 0xc3, 0x07, + 0x96, 0x17, 0xba, 0xd8, 0xfc, 0x72, 0xef, 0x6a, 0xbf, 0x65, 0xa1, 0x12, 0x25, 0xba, 0xe1, 0x75, + 0x50, 0xf0, 0x63, 0xd7, 0xb5, 0xb6, 0x5c, 0x6c, 0x3e, 0xce, 0xab, 0x08, 0xd5, 0x5f, 0xbc, 0x2d, + 0xe9, 0x48, 0x49, 0xc0, 0x6d, 0x30, 0x73, 0x70, 0x4b, 0x3d, 0xbe, 0xe8, 0xd8, 0xc0, 0x33, 0xaf, + 0x71, 0x2d, 0x53, 0xcd, 0x46, 0x71, 0x72, 0xb3, 0x73, 0x8b, 0xef, 0xa1, 0x3a, 0xe0, 0x1b, 0xe0, + 0x31, 0x4d, 0x66, 0xd9, 0xdb, 0xc2, 0xb6, 0x8d, 0xed, 0xe4, 0xa2, 0x65, 0x7e, 0x85, 0x43, 0xa8, + 0x7d, 0xbc, 0x99, 0x15, 0x40, 0x0f, 0x1a, 0x0d, 0x57, 0xc1, 0xa4, 0xc6, 0x5e, 0xf1, 0xa3, 0x75, + 0x52, 0x8d, 0x88, 0xe3, 0xd7, 0xcd, 0x59, 0xae, 0xf7, 0x42, 0xb2, 0xfb, 0x36, 0x35, 0x1e, 0xea, + 0x32, 0x06, 0xbe, 0xdc, 0xa2, 0x8d, 0x7f, 0xb8, 0xb0, 0xc2, 0x5b, 0xf8, 0x90, 0x9a, 0x4f, 0xf0, + 0xe2, 0x82, 0xaf, 0xf3, 0xa6, 0x46, 0x47, 0x5d, 0xe4, 0xe1, 0xb7, 0xc1, 0xf9, 0x0c, 0x87, 0xdd, + 0x2b, 0xcc, 0x27, 0xc5, 0x05, 0x81, 0x55, 0xa2, 0x9b, 0x09, 0x11, 0x75, 0x92, 0x84, 0xdf, 0x04, + 0x50, 0x23, 0xaf, 0x59, 0x21, 0x1f, 0xff, 0x94, 0xb8, 0xab, 0xb0, 0x15, 0xdd, 0x94, 0x34, 0xd4, + 0x41, 0x0e, 0x7e, 0x68, 0xb4, 0xcc, 0x24, 0xbd, 0xcd, 0x52, 0xf3, 0x3a, 0xdf, 0xb0, 0x2f, 0x1f, + 0x3f, 0x00, 0x53, 0x65, 0x28, 0x76, 0xb1, 0xe6, 0x61, 0x0d, 0x05, 0x75, 0x41, 0x9f, 0x62, 0x97, + 0xe9, 0x4c, 0x0e, 0x87, 0x63, 0xa0, 0x6f, 0x17, 0xcb, 0xcf, 0xc6, 0x88, 0xfd, 0x84, 0x6f, 0x82, + 0xfc, 0x9e, 0xe5, 0xc6, 0x49, 0x2b, 0xa0, 0x77, 0x67, 0x3d, 0x12, 0x7a, 0x5f, 0xcc, 0xdd, 0x30, + 0xa6, 0xde, 0x37, 0xc0, 0x64, 0xe7, 0x53, 0xe5, 0x8b, 0xb2, 0xe8, 0x17, 0x06, 0x18, 0x6f, 0x3b, + 0x40, 0x3a, 0x18, 0xe3, 0xb6, 0x1a, 0x73, 0xaf, 0x87, 0x27, 0x81, 0xd8, 0x08, 0xbc, 0xa2, 0xd5, + 0x2d, 0xfb, 0x89, 0x01, 0xc6, 0xb2, 0x89, 0xf9, 0x0b, 0xf2, 0x52, 0xe9, 0xbd, 0x1c, 0x98, 0xec, + 0x5c, 0x83, 0x43, 0x4f, 0x75, 0x17, 0x7a, 0xde, 0xa0, 0xe9, 0xd4, 0xb2, 0x7d, 0xc7, 0x00, 0xc3, + 0x6f, 0x2b, 0xb9, 0xe4, 0x6b, 0x66, 0x2f, 0xbb, 0x42, 0xc9, 0xd1, 0x97, 0x32, 0x28, 0xd2, 0x21, + 0x4b, 0xbf, 0x37, 0xc0, 0x44, 0xc7, 0xe3, 0x1c, 0x5e, 0x03, 0x03, 0x96, 0xeb, 0x06, 0xfb, 0xa2, + 0x9b, 0xa7, 0xb5, 0xe5, 0x17, 0x38, 0x15, 0x49, 0xae, 0xe6, 0xb3, 0xdc, 0xe7, 0xe0, 0xb3, 0xd2, + 0x9f, 0x0c, 0x70, 0xe9, 0x41, 0x51, 0xf7, 0x79, 0xaf, 0xe1, 0x2c, 0x28, 0xc8, 0x62, 0xfb, 0x90, + 0xaf, 0x9f, 0xcc, 0xae, 0x32, 0x23, 0xf0, 0xd7, 0x32, 0xe2, 0x57, 0xe9, 0xd7, 0x06, 0x18, 0xab, + 0x62, 0xb2, 0xe7, 0xd4, 0x30, 0xc2, 0xdb, 0x98, 0x60, 0xbf, 0x86, 0xe1, 0x1c, 0x18, 0xe2, 0x5f, + 0x1b, 0x43, 0xab, 0x96, 0x7c, 0x23, 0x19, 0x97, 0x8e, 0x1e, 0xba, 0x9d, 0x30, 0x50, 0x2a, 0xa3, + 0xbe, 0xa7, 0xe4, 0xba, 0x7e, 0x4f, 0xb9, 0x04, 0xfa, 0xc3, 0xb4, 0x01, 0x5c, 0x60, 0x5c, 0xde, + 0xf3, 0xe5, 0x54, 0xce, 0x0d, 0x48, 0xc4, 0xbb, 0x5c, 0x79, 0xc9, 0x0d, 0x48, 0x84, 0x38, 0xb5, + 0xf4, 0x41, 0x0e, 0x9c, 0x6b, 0xcd, 0xcf, 0x0c, 0x90, 0xc4, 0x6e, 0xdb, 0x07, 0x1c, 0xc6, 0x43, + 0x9c, 0xa3, 0xbf, 0x1b, 0xc8, 0x3d, 0xf8, 0xdd, 0x00, 0x7c, 0x09, 0x8c, 0xcb, 0x9f, 0xcb, 0x07, + 0x21, 0xc1, 0x94, 0x7f, 0x99, 0xec, 0x6b, 0x7d, 0xef, 0xb7, 0x96, 0x15, 0x40, 0xed, 0x63, 0xe0, + 0x37, 0x32, 0x6f, 0x1a, 0xae, 0xa6, 0xef, 0x19, 0x58, 0x6d, 0xc7, 0x4b, 0x87, 0x7b, 0x6c, 0xcb, + 0x2f, 0x13, 0x12, 0x90, 0xcc, 0x43, 0x87, 0x39, 0x30, 0xb4, 0xcd, 0x04, 0x78, 0x9f, 0x3c, 0xdf, + 0xea, 0xf4, 0x9b, 0x09, 0x03, 0xa5, 0x32, 0xa5, 0x3f, 0x1b, 0xe0, 0x7c, 0xf2, 0x1a, 0xc8, 0x75, + 0xb0, 0x1f, 0x2d, 0x06, 0xfe, 0xb6, 0x53, 0x87, 0x17, 0x45, 0xff, 0x53, 0x6b, 0x2a, 0x26, 0xbd, + 0x4f, 0x78, 0x1f, 0x0c, 0x52, 0xb1, 0xd8, 0x32, 0x0e, 0x5f, 0x39, 0x7e, 0x1c, 0x66, 0xa3, 0x46, + 0x94, 0x6f, 0x09, 0x35, 0xc1, 0x61, 0xa1, 0x58, 0xb3, 0x2a, 0xb1, 0x6f, 0xcb, 0x1e, 0xf8, 0x88, + 0x08, 0xc5, 0xc5, 0x05, 0x41, 0x43, 0x8a, 0x5b, 0xfa, 0xa7, 0x01, 0xc6, 0xdb, 0x5e, 0x37, 0xc1, + 0x1f, 0x1a, 0x60, 0xa4, 0xa6, 0x4d, 0x4f, 0x6e, 0xe8, 0xb5, 0x93, 0xbf, 0xa0, 0xd2, 0x94, 0x8a, + 0x1a, 0x48, 0xa7, 0xa0, 0x16, 0x50, 0xb8, 0x09, 0xcc, 0x5a, 0xe6, 0x21, 0x61, 0xe6, 0xd3, 0xe4, + 0xa5, 0x66, 0xa3, 0x68, 0x2e, 0x76, 0x91, 0x41, 0x5d, 0x47, 0x57, 0xbe, 0xfb, 0xf1, 0x67, 0xd3, + 0x67, 0x3e, 0xf9, 0x6c, 0xfa, 0xcc, 0xa7, 0x9f, 0x4d, 0x9f, 0x79, 0xa7, 0x39, 0x6d, 0x7c, 0xdc, + 0x9c, 0x36, 0x3e, 0x69, 0x4e, 0x1b, 0x9f, 0x36, 0xa7, 0x8d, 0xbf, 0x35, 0xa7, 0x8d, 0x9f, 0xff, + 0x7d, 0xfa, 0xcc, 0xeb, 0x37, 0x8e, 0xfb, 0x7c, 0xf8, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x1d, + 0x01, 0xc1, 0x04, 0x92, 0x2c, 0x00, 0x00, } func (m *ConversionRequest) Marshal() (dAtA []byte, err error) { @@ -2630,6 +2633,18 @@ func (m *ValidationRule) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + i -= len(m.FieldPath) + copy(dAtA[i:], m.FieldPath) + i = encodeVarintGenerated(dAtA, i, uint64(len(m.FieldPath))) + i-- + dAtA[i] = 0x2a + if m.Reason != nil { + i -= len(*m.Reason) + copy(dAtA[i:], *m.Reason) + i = encodeVarintGenerated(dAtA, i, uint64(len(*m.Reason))) + i-- + dAtA[i] = 0x22 + } i -= len(m.MessageExpression) copy(dAtA[i:], m.MessageExpression) i = encodeVarintGenerated(dAtA, i, uint64(len(m.MessageExpression))) @@ -3346,6 +3361,12 @@ func (m *ValidationRule) Size() (n int) { n += 1 + l + sovGenerated(uint64(l)) l = len(m.MessageExpression) n += 1 + l + sovGenerated(uint64(l)) + if m.Reason != nil { + l = len(*m.Reason) + n += 1 + l + sovGenerated(uint64(l)) + } + l = len(m.FieldPath) + n += 1 + l + sovGenerated(uint64(l)) return n } @@ -3822,6 +3843,8 @@ func (this *ValidationRule) String() string { `Rule:` + fmt.Sprintf("%v", this.Rule) + `,`, `Message:` + fmt.Sprintf("%v", this.Message) + `,`, `MessageExpression:` + fmt.Sprintf("%v", this.MessageExpression) + `,`, + `Reason:` + valueToStringGenerated(this.Reason) + `,`, + `FieldPath:` + fmt.Sprintf("%v", this.FieldPath) + `,`, `}`, }, "") return s @@ -8920,6 +8943,71 @@ func (m *ValidationRule) Unmarshal(dAtA []byte) error { } m.MessageExpression = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Reason", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + s := FieldValueErrorReason(dAtA[iNdEx:postIndex]) + m.Reason = &s + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field FieldPath", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenerated + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenerated + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenerated + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.FieldPath = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenerated(dAtA[iNdEx:]) diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.proto b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.proto index 4632a83e5..578d018a7 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.proto +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/generated.proto @@ -678,6 +678,26 @@ message ValidationRule { // "x must be less than max ("+string(self.max)+")" // +optional optional string messageExpression = 3; + + // reason provides a machine-readable validation failure reason that is returned to the caller when a request fails this validation rule. + // The HTTP status code returned to the caller will match the reason of the reason of the first failed validation rule. + // The currently supported reasons are: "FieldValueInvalid", "FieldValueForbidden", "FieldValueRequired", "FieldValueDuplicate". + // If not set, default to use "FieldValueInvalid". + // All future added reasons must be accepted by clients when reading this value and unknown reasons should be treated as FieldValueInvalid. + // +optional + optional string reason = 4; + + // fieldPath represents the field path returned when the validation fails. + // It must be a relative JSON path (i.e. with array notation) scoped to the location of this x-kubernetes-validations extension in the schema and refer to an existing field. + // e.g. when validation checks if a specific attribute `foo` under a map `testMap`, the fieldPath could be set to `.testMap.foo` + // If the validation checks two lists must have unique attributes, the fieldPath could be set to either of the list: e.g. `.testList` + // It does not support list numeric index. + // It supports child operation to refer to an existing field currently. Refer to [JSONPath support in Kubernetes](https://kubernetes.io/docs/reference/kubectl/jsonpath/) for more info. + // Numeric index of array is not supported. + // For field name which contains special characters, use `['specialName']` to refer the field name. + // e.g. for attribute `foo.34$` appears in a list `testList`, the fieldPath could be set to `.testList['foo.34$']` + // +optional + optional string fieldPath = 5; } // WebhookClientConfig contains the information to make a TLS connection with the webhook. diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/types_jsonschema.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/types_jsonschema.go index b348d0d19..1c90d464a 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/types_jsonschema.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/types_jsonschema.go @@ -16,6 +16,26 @@ limitations under the License. package v1 +// FieldValueErrorReason is a machine-readable value providing more detail about why a field failed the validation. +// +enum +type FieldValueErrorReason string + +const ( + // FieldValueRequired is used to report required values that are not + // provided (e.g. empty strings, null values, or empty arrays). + FieldValueRequired FieldValueErrorReason = "FieldValueRequired" + // FieldValueDuplicate is used to report collisions of values that must be + // unique (e.g. unique IDs). + FieldValueDuplicate FieldValueErrorReason = "FieldValueDuplicate" + // FieldValueInvalid is used to report malformed values (e.g. failed regex + // match, too long, out of bounds). + FieldValueInvalid FieldValueErrorReason = "FieldValueInvalid" + // FieldValueForbidden is used to report valid (as per formatting rules) + // values which would be accepted under some conditions, but which are not + // permitted by the current conditions (such as security policy). + FieldValueForbidden FieldValueErrorReason = "FieldValueForbidden" +) + // JSONSchemaProps is a JSON-Schema following Specification Draft 4 (http://json-schema.org/). type JSONSchemaProps struct { ID string `json:"id,omitempty" protobuf:"bytes,1,opt,name=id"` @@ -247,6 +267,24 @@ type ValidationRule struct { // "x must be less than max ("+string(self.max)+")" // +optional MessageExpression string `json:"messageExpression,omitempty" protobuf:"bytes,3,opt,name=messageExpression"` + // reason provides a machine-readable validation failure reason that is returned to the caller when a request fails this validation rule. + // The HTTP status code returned to the caller will match the reason of the reason of the first failed validation rule. + // The currently supported reasons are: "FieldValueInvalid", "FieldValueForbidden", "FieldValueRequired", "FieldValueDuplicate". + // If not set, default to use "FieldValueInvalid". + // All future added reasons must be accepted by clients when reading this value and unknown reasons should be treated as FieldValueInvalid. + // +optional + Reason *FieldValueErrorReason `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"` + // fieldPath represents the field path returned when the validation fails. + // It must be a relative JSON path (i.e. with array notation) scoped to the location of this x-kubernetes-validations extension in the schema and refer to an existing field. + // e.g. when validation checks if a specific attribute `foo` under a map `testMap`, the fieldPath could be set to `.testMap.foo` + // If the validation checks two lists must have unique attributes, the fieldPath could be set to either of the list: e.g. `.testList` + // It does not support list numeric index. + // It supports child operation to refer to an existing field currently. Refer to [JSONPath support in Kubernetes](https://kubernetes.io/docs/reference/kubectl/jsonpath/) for more info. + // Numeric index of array is not supported. + // For field name which contains special characters, use `['specialName']` to refer the field name. + // e.g. for attribute `foo.34$` appears in a list `testList`, the fieldPath could be set to `.testList['foo.34$']` + // +optional + FieldPath string `json:"fieldPath,omitempty" protobuf:"bytes,5,opt,name=fieldPath"` } // JSON represents any valid JSON value. diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.conversion.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.conversion.go index cde5275ce..0a82e4d8c 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.conversion.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.conversion.go @@ -1259,6 +1259,8 @@ func autoConvert_v1_ValidationRule_To_apiextensions_ValidationRule(in *Validatio out.Rule = in.Rule out.Message = in.Message out.MessageExpression = in.MessageExpression + out.Reason = (*apiextensions.FieldValueErrorReason)(unsafe.Pointer(in.Reason)) + out.FieldPath = in.FieldPath return nil } @@ -1271,6 +1273,8 @@ func autoConvert_apiextensions_ValidationRule_To_v1_ValidationRule(in *apiextens out.Rule = in.Rule out.Message = in.Message out.MessageExpression = in.MessageExpression + out.Reason = (*FieldValueErrorReason)(unsafe.Pointer(in.Reason)) + out.FieldPath = in.FieldPath return nil } diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.deepcopy.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.deepcopy.go index e27daa9a3..b4347b8db 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.deepcopy.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.deepcopy.go @@ -614,6 +614,11 @@ func (in *ServiceReference) DeepCopy() *ServiceReference { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ValidationRule) DeepCopyInto(out *ValidationRule) { *out = *in + if in.Reason != nil { + in, out := &in.Reason, &out.Reason + *out = new(FieldValueErrorReason) + **out = **in + } return } @@ -632,7 +637,9 @@ func (in ValidationRules) DeepCopyInto(out *ValidationRules) { { in := &in *out = make(ValidationRules, len(*in)) - copy(*out, *in) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } return } } diff --git a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/zz_generated.deepcopy.go b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/zz_generated.deepcopy.go index 998c9dbe2..f8a5ffbfb 100644 --- a/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/zz_generated.deepcopy.go +++ b/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/zz_generated.deepcopy.go @@ -531,6 +531,11 @@ func (in *ServiceReference) DeepCopy() *ServiceReference { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ValidationRule) DeepCopyInto(out *ValidationRule) { *out = *in + if in.Reason != nil { + in, out := &in.Reason, &out.Reason + *out = new(FieldValueErrorReason) + **out = **in + } return } @@ -549,7 +554,9 @@ func (in ValidationRules) DeepCopyInto(out *ValidationRules) { { in := &in *out = make(ValidationRules, len(*in)) - copy(*out, *in) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } return } } diff --git a/vendor/modules.txt b/vendor/modules.txt index 139f4022d..51b846171 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,4 +1,4 @@ -# code.gitea.io/sdk/gitea v0.15.1 => code.gitea.io/sdk/gitea v0.15.1-0.20230509035020-970776d1c1e9 +# code.gitea.io/sdk/gitea v0.16.0 => code.gitea.io/sdk/gitea v0.15.1-0.20230509035020-970776d1c1e9 ## explicit; go 1.13 code.gitea.io/sdk/gitea # contrib.go.opencensus.io/exporter/ocagent v0.7.1-0.20200907061046-05415f1de66d @@ -15,7 +15,7 @@ contrib.go.opencensus.io/exporter/zipkin github.com/AlecAivazis/survey/v2 github.com/AlecAivazis/survey/v2/core github.com/AlecAivazis/survey/v2/terminal -# github.com/ProtonMail/go-crypto v0.0.0-20230717121422-5aa5874ade95 +# github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c ## explicit; go 1.13 github.com/ProtonMail/go-crypto/bitcurves github.com/ProtonMail/go-crypto/brainpool @@ -44,9 +44,6 @@ github.com/andybalholm/cascadia # github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230321174746-8dcc6526cfb1 ## explicit; go 1.18 github.com/antlr/antlr4/runtime/Go/antlr/v4 -# github.com/benbjohnson/clock v1.3.0 -## explicit; go 1.15 -github.com/benbjohnson/clock # github.com/beorn7/perks v1.0.1 ## explicit; go 1.11 github.com/beorn7/perks/quantile @@ -56,7 +53,7 @@ github.com/blang/semver/v4 # github.com/blendle/zapdriver v1.3.1 ## explicit; go 1.12 github.com/blendle/zapdriver -# github.com/bradleyfalzon/ghinstallation/v2 v2.6.0 +# github.com/bradleyfalzon/ghinstallation/v2 v2.7.0 ## explicit; go 1.13 github.com/bradleyfalzon/ghinstallation/v2 # github.com/census-instrumentation/opencensus-proto v0.4.1 @@ -111,20 +108,20 @@ github.com/davecgh/go-spew/spew # github.com/davidmz/go-pageant v1.0.2 ## explicit; go 1.16 github.com/davidmz/go-pageant -# github.com/emicklei/go-restful/v3 v3.10.2 +# github.com/emicklei/go-restful/v3 v3.11.0 ## explicit; go 1.13 github.com/emicklei/go-restful/v3 github.com/emicklei/go-restful/v3/log -# github.com/evanphx/json-patch v5.6.0+incompatible +# github.com/evanphx/json-patch v5.7.0+incompatible ## explicit github.com/evanphx/json-patch -# github.com/evanphx/json-patch/v5 v5.6.0 -## explicit; go 1.12 +# github.com/evanphx/json-patch/v5 v5.7.0 +## explicit; go 1.18 github.com/evanphx/json-patch/v5 # github.com/fvbommel/sortorder v1.1.0 ## explicit; go 1.13 github.com/fvbommel/sortorder -# github.com/gfleury/go-bitbucket-v1 v0.0.0-20230626192437-8d7be5866751 +# github.com/gfleury/go-bitbucket-v1 v0.0.0-20230830121038-6e30c5760c87 ## explicit; go 1.14 github.com/gfleury/go-bitbucket-v1 # github.com/go-fed/httpsig v1.1.0 @@ -178,7 +175,7 @@ github.com/golang/protobuf/ptypes github.com/golang/protobuf/ptypes/any github.com/golang/protobuf/ptypes/duration github.com/golang/protobuf/ptypes/timestamp -# github.com/google/cel-go v0.17.1 +# github.com/google/cel-go v0.18.1 ## explicit; go 1.18 github.com/google/cel-go/cel github.com/google/cel-go/checker @@ -200,14 +197,19 @@ github.com/google/cel-go/common/types/traits github.com/google/cel-go/interpreter github.com/google/cel-go/parser github.com/google/cel-go/parser/gen -# github.com/google/gnostic v0.6.9 +# github.com/google/gnostic v0.7.0 ## explicit; go 1.12 github.com/google/gnostic/compiler -github.com/google/gnostic/extensions -github.com/google/gnostic/jsonschema github.com/google/gnostic/openapiv2 github.com/google/gnostic/openapiv3 -# github.com/google/go-cmp v0.5.9 +# github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 +## explicit; go 1.18 +github.com/google/gnostic-models/compiler +github.com/google/gnostic-models/extensions +github.com/google/gnostic-models/jsonschema +github.com/google/gnostic-models/openapiv2 +github.com/google/gnostic-models/openapiv3 +# github.com/google/go-cmp v0.6.0 ## explicit; go 1.13 github.com/google/go-cmp/cmp github.com/google/go-cmp/cmp/cmpopts @@ -218,12 +220,15 @@ github.com/google/go-cmp/cmp/internal/value # github.com/google/go-containerregistry v0.16.1 ## explicit; go 1.18 github.com/google/go-containerregistry/pkg/name -# github.com/google/go-github/scrape v0.0.0-20230803055657-a8da03b06966 +# github.com/google/go-github/scrape v0.0.0-20231014221314-ee3f27345645 ## explicit; go 1.13 github.com/google/go-github/scrape -# github.com/google/go-github/v53 v53.2.0 +# github.com/google/go-github/v55 v55.0.0 ## explicit; go 1.17 -github.com/google/go-github/v53/github +github.com/google/go-github/v55/github +# github.com/google/go-github/v56 v56.0.0 +## explicit; go 1.17 +github.com/google/go-github/v56/github # github.com/google/go-querystring v1.1.0 ## explicit; go 1.10 github.com/google/go-querystring/query @@ -233,10 +238,10 @@ github.com/google/gofuzz github.com/google/gofuzz/bytesource # github.com/google/pprof v0.0.0-20230111200839-76d1ae5aea2b ## explicit; go 1.18 -# github.com/google/uuid v1.3.0 +# github.com/google/uuid v1.3.1 ## explicit github.com/google/uuid -# github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.2 +# github.com/grpc-ecosystem/grpc-gateway/v2 v2.18.0 ## explicit; go 1.17 github.com/grpc-ecosystem/grpc-gateway/v2/internal/httprule github.com/grpc-ecosystem/grpc-gateway/v2/runtime @@ -259,7 +264,7 @@ github.com/hashicorp/go-retryablehttp # github.com/hashicorp/go-version v1.6.0 ## explicit github.com/hashicorp/go-version -# github.com/hashicorp/golang-lru v0.6.0 +# github.com/hashicorp/golang-lru v1.0.2 ## explicit; go 1.12 github.com/hashicorp/golang-lru github.com/hashicorp/golang-lru/simplelru @@ -288,7 +293,7 @@ github.com/kballard/go-shellquote # github.com/kelseyhightower/envconfig v1.4.0 ## explicit github.com/kelseyhightower/envconfig -# github.com/ktrysmt/go-bitbucket v0.9.65 +# github.com/ktrysmt/go-bitbucket v0.9.70 ## explicit; go 1.14 github.com/ktrysmt/go-bitbucket # github.com/lunixbochs/vtclean v1.0.0 @@ -323,14 +328,10 @@ github.com/modern-go/reflect2 # github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 ## explicit github.com/munnerz/goautoneg -# github.com/onsi/ginkgo/v2 v2.9.7 -## explicit; go 1.18 -# github.com/onsi/gomega v1.27.8 -## explicit; go 1.18 # github.com/opencontainers/go-digest v1.0.0 ## explicit; go 1.13 github.com/opencontainers/go-digest -# github.com/openzipkin/zipkin-go v0.4.1 +# github.com/openzipkin/zipkin-go v0.4.2 ## explicit; go 1.18 github.com/openzipkin/zipkin-go github.com/openzipkin/zipkin-go/idgenerator @@ -344,20 +345,20 @@ github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/prometheus/client_golang v1.16.0 -## explicit; go 1.17 +# github.com/prometheus/client_golang v1.17.0 +## explicit; go 1.19 github.com/prometheus/client_golang/prometheus github.com/prometheus/client_golang/prometheus/internal github.com/prometheus/client_golang/prometheus/promhttp -# github.com/prometheus/client_model v0.4.0 -## explicit; go 1.18 +# github.com/prometheus/client_model v0.5.0 +## explicit; go 1.19 github.com/prometheus/client_model/go # github.com/prometheus/common v0.44.0 ## explicit; go 1.18 github.com/prometheus/common/expfmt github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg github.com/prometheus/common/model -# github.com/prometheus/procfs v0.11.1 +# github.com/prometheus/procfs v0.12.0 ## explicit; go 1.19 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs @@ -381,10 +382,11 @@ github.com/stoewer/go-strcase # github.com/stretchr/testify v1.8.4 ## explicit; go 1.20 github.com/stretchr/testify/assert -# github.com/tektoncd/pipeline v0.50.0 +# github.com/tektoncd/pipeline v0.52.1 ## explicit; go 1.19 github.com/tektoncd/pipeline/pkg/apis/config github.com/tektoncd/pipeline/pkg/apis/pipeline +github.com/tektoncd/pipeline/pkg/apis/pipeline/internal/checksum github.com/tektoncd/pipeline/pkg/apis/pipeline/pod github.com/tektoncd/pipeline/pkg/apis/pipeline/v1 github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1 @@ -425,7 +427,7 @@ github.com/tektoncd/pipeline/pkg/reconciler/pipeline/dag github.com/tektoncd/pipeline/pkg/result github.com/tektoncd/pipeline/pkg/spire/config github.com/tektoncd/pipeline/pkg/substitution -# github.com/xanzy/go-gitlab v0.90.0 +# github.com/xanzy/go-gitlab v0.93.1 ## explicit; go 1.18 github.com/xanzy/go-gitlab # github.com/xlzd/gotp v0.1.0 @@ -452,7 +454,7 @@ go.opencensus.io/trace go.opencensus.io/trace/internal go.opencensus.io/trace/propagation go.opencensus.io/trace/tracestate -# go.uber.org/atomic v1.10.0 +# go.uber.org/atomic v1.11.0 ## explicit; go 1.18 go.uber.org/atomic # go.uber.org/automaxprocs v1.5.3 @@ -463,7 +465,7 @@ go.uber.org/automaxprocs/maxprocs # go.uber.org/multierr v1.11.0 ## explicit; go 1.19 go.uber.org/multierr -# go.uber.org/zap v1.25.0 +# go.uber.org/zap v1.26.0 ## explicit; go 1.19 go.uber.org/zap go.uber.org/zap/buffer @@ -472,6 +474,7 @@ go.uber.org/zap/internal/bufferpool go.uber.org/zap/internal/color go.uber.org/zap/internal/exit go.uber.org/zap/internal/pool +go.uber.org/zap/internal/stacktrace go.uber.org/zap/internal/ztest go.uber.org/zap/zapcore go.uber.org/zap/zaptest @@ -495,7 +498,7 @@ golang.org/x/crypto/sha3 golang.org/x/crypto/ssh golang.org/x/crypto/ssh/agent golang.org/x/crypto/ssh/internal/bcrypt_pbkdf -# golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b +# golang.org/x/exp v0.0.0-20231006140011-7918f672742d ## explicit; go 1.20 golang.org/x/exp/constraints golang.org/x/exp/maps @@ -513,13 +516,13 @@ golang.org/x/net/idna golang.org/x/net/internal/timeseries golang.org/x/net/publicsuffix golang.org/x/net/trace -# golang.org/x/oauth2 v0.11.0 +# golang.org/x/oauth2 v0.13.0 ## explicit; go 1.18 golang.org/x/oauth2 golang.org/x/oauth2/bitbucket golang.org/x/oauth2/clientcredentials golang.org/x/oauth2/internal -# golang.org/x/sync v0.3.0 +# golang.org/x/sync v0.4.0 ## explicit; go 1.17 golang.org/x/sync/errgroup golang.org/x/sync/semaphore @@ -548,13 +551,13 @@ golang.org/x/text/width # golang.org/x/time v0.3.0 ## explicit golang.org/x/time/rate -# gomodules.xyz/jsonpatch/v2 v2.3.0 +# gomodules.xyz/jsonpatch/v2 v2.4.0 ## explicit; go 1.20 gomodules.xyz/jsonpatch/v2 -# google.golang.org/api v0.134.0 +# google.golang.org/api v0.147.0 ## explicit; go 1.19 google.golang.org/api/support/bundler -# google.golang.org/appengine v1.6.7 +# google.golang.org/appengine v1.6.8 ## explicit; go 1.11 google.golang.org/appengine/internal google.golang.org/appengine/internal/base @@ -563,17 +566,15 @@ google.golang.org/appengine/internal/log google.golang.org/appengine/internal/remote_api google.golang.org/appengine/internal/urlfetch google.golang.org/appengine/urlfetch -# google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 -## explicit; go 1.19 -# google.golang.org/genproto/googleapis/api v0.0.0-20230803162519-f966b187b2e5 +# google.golang.org/genproto/googleapis/api v0.0.0-20231012201019-e917dd12ba7a ## explicit; go 1.19 google.golang.org/genproto/googleapis/api/expr/v1alpha1 google.golang.org/genproto/googleapis/api/httpbody -# google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 +# google.golang.org/genproto/googleapis/rpc v0.0.0-20231012201019-e917dd12ba7a ## explicit; go 1.19 google.golang.org/genproto/googleapis/rpc/status -# google.golang.org/grpc v1.57.0 -## explicit; go 1.17 +# google.golang.org/grpc v1.58.3 +## explicit; go 1.19 google.golang.org/grpc google.golang.org/grpc/attributes google.golang.org/grpc/backoff @@ -604,6 +605,7 @@ google.golang.org/grpc/internal/grpclog google.golang.org/grpc/internal/grpcrand google.golang.org/grpc/internal/grpcsync google.golang.org/grpc/internal/grpcutil +google.golang.org/grpc/internal/idle google.golang.org/grpc/internal/metadata google.golang.org/grpc/internal/pretty google.golang.org/grpc/internal/resolver @@ -671,7 +673,7 @@ gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# gotest.tools/v3 v3.5.0 +# gotest.tools/v3 v3.5.1 ## explicit; go 1.17 gotest.tools/v3/assert gotest.tools/v3/assert/cmp @@ -683,7 +685,7 @@ gotest.tools/v3/internal/cleanup gotest.tools/v3/internal/difflib gotest.tools/v3/internal/format gotest.tools/v3/internal/source -# k8s.io/api v0.27.4 => k8s.io/api v0.25.9 +# k8s.io/api v0.28.2 => k8s.io/api v0.25.9 ## explicit; go 1.19 k8s.io/api/admission/v1 k8s.io/api/admissionregistration/v1 @@ -732,11 +734,11 @@ k8s.io/api/scheduling/v1beta1 k8s.io/api/storage/v1 k8s.io/api/storage/v1alpha1 k8s.io/api/storage/v1beta1 -# k8s.io/apiextensions-apiserver v0.27.4 +# k8s.io/apiextensions-apiserver v0.28.2 ## explicit; go 1.20 k8s.io/apiextensions-apiserver/pkg/apis/apiextensions k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1 -# k8s.io/apimachinery v0.27.4 => k8s.io/apimachinery v0.26.5 +# k8s.io/apimachinery v0.28.2 => k8s.io/apimachinery v0.26.5 ## explicit; go 1.19 k8s.io/apimachinery/pkg/api/equality k8s.io/apimachinery/pkg/api/errors @@ -1076,7 +1078,7 @@ k8s.io/klog/v2/internal/clock k8s.io/klog/v2/internal/dbg k8s.io/klog/v2/internal/serialize k8s.io/klog/v2/internal/severity -# k8s.io/kube-openapi v0.0.0-20230718181711-3c0fae5ee9fd => k8s.io/kube-openapi v0.0.0-20230515203736-54b630e78af5 +# k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 => k8s.io/kube-openapi v0.0.0-20230515203736-54b630e78af5 ## explicit; go 1.19 k8s.io/kube-openapi/pkg/builder3/util k8s.io/kube-openapi/pkg/cached @@ -1102,7 +1104,7 @@ k8s.io/utils/pointer k8s.io/utils/ptr k8s.io/utils/strings/slices k8s.io/utils/trace -# knative.dev/eventing v0.38.0 +# knative.dev/eventing v0.38.4 ## explicit; go 1.19 knative.dev/eventing/pkg/adapter/v2 knative.dev/eventing/pkg/adapter/v2/util/crstatusevent @@ -1113,7 +1115,7 @@ knative.dev/eventing/pkg/metrics knative.dev/eventing/pkg/metrics/source knative.dev/eventing/pkg/observability knative.dev/eventing/pkg/observability/client -# knative.dev/pkg v0.0.0-20230718152110-aef227e72ead => knative.dev/pkg v0.0.0-20230418073056-dfad48eaa5d0 +# knative.dev/pkg v0.0.0-20231016142534-0d0cd4e7dbef => knative.dev/pkg v0.0.0-20230418073056-dfad48eaa5d0 ## explicit; go 1.18 knative.dev/pkg/apis knative.dev/pkg/apis/duck