From b319459967ac66c8136d362e4bbf2513d435043f Mon Sep 17 00:00:00 2001 From: Ben Drucker Date: Mon, 13 Nov 2023 13:51:42 -0800 Subject: [PATCH] deps: vendor `genqlient` CLI --- client/meta/genqlient.go | 2 +- go.mod | 3 + go.sum | 5 + tools/tools.go | 1 + .../github.com/Khan/genqlient/.gitattributes | 1 + vendor/github.com/Khan/genqlient/.gitignore | 54 + .../github.com/Khan/genqlient/.golangci.yml | 112 + vendor/github.com/Khan/genqlient/Makefile | 13 + vendor/github.com/Khan/genqlient/README.md | 52 + .../Khan/genqlient/generate/config.go | 237 + .../Khan/genqlient/generate/convert.go | 879 ++ .../genqlient/generate/default_genqlient.yaml | 6 + .../Khan/genqlient/generate/description.go | 81 + .../Khan/genqlient/generate/errors.go | 181 + .../Khan/genqlient/generate/generate.go | 429 + .../genqlient/generate/genqlient_directive.go | 531 + .../Khan/genqlient/generate/header.go.tmpl | 10 + .../Khan/genqlient/generate/imports.go | 131 + .../Khan/genqlient/generate/main.go | 92 + .../Khan/genqlient/generate/marshal.go.tmpl | 95 + .../genqlient/generate/marshal_helper.go.tmpl | 44 + .../Khan/genqlient/generate/names.go | 180 + .../Khan/genqlient/generate/operation.go.tmpl | 49 + .../Khan/genqlient/generate/parse.go | 220 + .../Khan/genqlient/generate/stringlist.go | 25 + .../Khan/genqlient/generate/template.go | 53 + .../Khan/genqlient/generate/types.go | 558 + .../Khan/genqlient/generate/unmarshal.go.tmpl | 154 + .../generate/unmarshal_helper.go.tmpl | 36 + .../Khan/genqlient/generate/util.go | 77 + .../Khan/genqlient/generate/validation.go | 112 + vendor/github.com/Khan/genqlient/main.go | 19 + .../agnivade/levenshtein/.gitignore | 5 + .../agnivade/levenshtein/.travis.yml | 23 + .../agnivade/levenshtein/License.txt | 21 + .../github.com/agnivade/levenshtein/Makefile | 15 + .../github.com/agnivade/levenshtein/README.md | 80 + .../agnivade/levenshtein/levenshtein.go | 89 + vendor/github.com/alexflint/go-arg/.gitignore | 24 + vendor/github.com/alexflint/go-arg/LICENSE | 24 + vendor/github.com/alexflint/go-arg/README.md | 525 + vendor/github.com/alexflint/go-arg/doc.go | 39 + vendor/github.com/alexflint/go-arg/parse.go | 725 + vendor/github.com/alexflint/go-arg/reflect.go | 107 + .../github.com/alexflint/go-arg/sequence.go | 123 + .../github.com/alexflint/go-arg/subcommand.go | 37 + vendor/github.com/alexflint/go-arg/usage.go | 260 + .../github.com/alexflint/go-scalar/.gitignore | 24 + .../alexflint/go-scalar/.travis.yml | 9 + vendor/github.com/alexflint/go-scalar/LICENSE | 24 + .../github.com/alexflint/go-scalar/README.md | 28 + .../github.com/alexflint/go-scalar/scalar.go | 153 + .../gqlparser/v2/formatter/formatter.go | 636 + .../vektah/gqlparser/v2/lexer/blockstring.go | 58 + .../vektah/gqlparser/v2/lexer/lexer.go | 515 + .../vektah/gqlparser/v2/lexer/lexer_test.yml | 692 + .../vektah/gqlparser/v2/lexer/token.go | 148 + .../vektah/gqlparser/v2/parser/parser.go | 136 + .../vektah/gqlparser/v2/parser/query.go | 348 + .../vektah/gqlparser/v2/parser/query_test.yml | 544 + .../vektah/gqlparser/v2/parser/schema.go | 534 + .../gqlparser/v2/parser/schema_test.yml | 646 + .../vektah/gqlparser/v2/validator/error.go | 55 + .../gqlparser/v2/validator/messaging.go | 39 + .../vektah/gqlparser/v2/validator/prelude.go | 15 + .../gqlparser/v2/validator/prelude.graphql | 121 + .../validator/rules/fields_on_correct_type.go | 94 + .../rules/fragments_on_composite_types.go | 39 + .../validator/rules/known_argument_names.go | 57 + .../v2/validator/rules/known_directives.go | 47 + .../validator/rules/known_fragment_names.go | 19 + .../v2/validator/rules/known_root_type.go | 35 + .../v2/validator/rules/known_type_names.go | 59 + .../rules/lone_anonymous_operation.go | 19 + .../v2/validator/rules/no_fragment_cycles.go | 93 + .../validator/rules/no_undefined_variables.go | 28 + .../v2/validator/rules/no_unused_fragments.go | 30 + .../v2/validator/rules/no_unused_variables.go | 30 + .../rules/overlapping_fields_can_be_merged.go | 560 + .../rules/possible_fragment_spreads.go | 68 + .../rules/provided_required_arguments.go | 62 + .../v2/validator/rules/scalar_leafs.go | 36 + .../rules/single_field_subscriptions.go | 86 + .../validator/rules/unique_argument_names.go | 33 + .../rules/unique_directives_per_location.go | 24 + .../validator/rules/unique_fragment_names.go | 22 + .../rules/unique_input_field_names.go | 27 + .../validator/rules/unique_operation_names.go | 22 + .../validator/rules/unique_variable_names.go | 24 + .../validator/rules/values_of_correct_type.go | 168 + .../rules/variables_are_input_types.go | 28 + .../rules/variables_in_allowed_position.go | 38 + .../vektah/gqlparser/v2/validator/schema.go | 512 + .../gqlparser/v2/validator/schema_test.yml | 678 + .../gqlparser/v2/validator/suggestionList.go | 69 + .../gqlparser/v2/validator/validator.go | 44 + .../vektah/gqlparser/v2/validator/vars.go | 258 + .../vektah/gqlparser/v2/validator/walk.go | 292 + .../x/tools/go/ast/astutil/enclosing.go | 636 + .../x/tools/go/ast/astutil/imports.go | 485 + .../x/tools/go/ast/astutil/rewrite.go | 488 + .../golang.org/x/tools/go/ast/astutil/util.go | 18 + vendor/golang.org/x/tools/imports/forward.go | 77 + .../x/tools/internal/fastwalk/fastwalk.go | 196 + .../internal/fastwalk/fastwalk_darwin.go | 119 + .../fastwalk/fastwalk_dirent_fileno.go | 14 + .../internal/fastwalk/fastwalk_dirent_ino.go | 15 + .../fastwalk/fastwalk_dirent_namlen_bsd.go | 14 + .../fastwalk/fastwalk_dirent_namlen_linux.go | 29 + .../internal/fastwalk/fastwalk_portable.go | 38 + .../tools/internal/fastwalk/fastwalk_unix.go | 153 + .../x/tools/internal/gopathwalk/walk.go | 254 + .../x/tools/internal/imports/fix.go | 1738 +++ .../x/tools/internal/imports/imports.go | 351 + .../x/tools/internal/imports/mod.go | 716 + .../x/tools/internal/imports/mod_cache.go | 236 + .../x/tools/internal/imports/sortimports.go | 297 + .../x/tools/internal/imports/zstdlib.go | 11115 ++++++++++++++++ vendor/modules.txt | 21 + 119 files changed, 31874 insertions(+), 1 deletion(-) create mode 100644 vendor/github.com/Khan/genqlient/.gitattributes create mode 100644 vendor/github.com/Khan/genqlient/.gitignore create mode 100644 vendor/github.com/Khan/genqlient/.golangci.yml create mode 100644 vendor/github.com/Khan/genqlient/Makefile create mode 100644 vendor/github.com/Khan/genqlient/README.md create mode 100644 vendor/github.com/Khan/genqlient/generate/config.go create mode 100644 vendor/github.com/Khan/genqlient/generate/convert.go create mode 100644 vendor/github.com/Khan/genqlient/generate/default_genqlient.yaml create mode 100644 vendor/github.com/Khan/genqlient/generate/description.go create mode 100644 vendor/github.com/Khan/genqlient/generate/errors.go create mode 100644 vendor/github.com/Khan/genqlient/generate/generate.go create mode 100644 vendor/github.com/Khan/genqlient/generate/genqlient_directive.go create mode 100644 vendor/github.com/Khan/genqlient/generate/header.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/imports.go create mode 100644 vendor/github.com/Khan/genqlient/generate/main.go create mode 100644 vendor/github.com/Khan/genqlient/generate/marshal.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/marshal_helper.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/names.go create mode 100644 vendor/github.com/Khan/genqlient/generate/operation.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/parse.go create mode 100644 vendor/github.com/Khan/genqlient/generate/stringlist.go create mode 100644 vendor/github.com/Khan/genqlient/generate/template.go create mode 100644 vendor/github.com/Khan/genqlient/generate/types.go create mode 100644 vendor/github.com/Khan/genqlient/generate/unmarshal.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/unmarshal_helper.go.tmpl create mode 100644 vendor/github.com/Khan/genqlient/generate/util.go create mode 100644 vendor/github.com/Khan/genqlient/generate/validation.go create mode 100644 vendor/github.com/Khan/genqlient/main.go create mode 100644 vendor/github.com/agnivade/levenshtein/.gitignore create mode 100644 vendor/github.com/agnivade/levenshtein/.travis.yml create mode 100644 vendor/github.com/agnivade/levenshtein/License.txt create mode 100644 vendor/github.com/agnivade/levenshtein/Makefile create mode 100644 vendor/github.com/agnivade/levenshtein/README.md create mode 100644 vendor/github.com/agnivade/levenshtein/levenshtein.go create mode 100644 vendor/github.com/alexflint/go-arg/.gitignore create mode 100644 vendor/github.com/alexflint/go-arg/LICENSE create mode 100644 vendor/github.com/alexflint/go-arg/README.md create mode 100644 vendor/github.com/alexflint/go-arg/doc.go create mode 100644 vendor/github.com/alexflint/go-arg/parse.go create mode 100644 vendor/github.com/alexflint/go-arg/reflect.go create mode 100644 vendor/github.com/alexflint/go-arg/sequence.go create mode 100644 vendor/github.com/alexflint/go-arg/subcommand.go create mode 100644 vendor/github.com/alexflint/go-arg/usage.go create mode 100644 vendor/github.com/alexflint/go-scalar/.gitignore create mode 100644 vendor/github.com/alexflint/go-scalar/.travis.yml create mode 100644 vendor/github.com/alexflint/go-scalar/LICENSE create mode 100644 vendor/github.com/alexflint/go-scalar/README.md create mode 100644 vendor/github.com/alexflint/go-scalar/scalar.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/formatter/formatter.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml create mode 100644 vendor/github.com/vektah/gqlparser/v2/lexer/token.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/parser/parser.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/parser/query.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml create mode 100644 vendor/github.com/vektah/gqlparser/v2/parser/schema.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/error.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/messaging.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/prelude.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/schema.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/validator.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/vars.go create mode 100644 vendor/github.com/vektah/gqlparser/v2/validator/walk.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/enclosing.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/imports.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/rewrite.go create mode 100644 vendor/golang.org/x/tools/go/ast/astutil/util.go create mode 100644 vendor/golang.org/x/tools/imports/forward.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_darwin.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go create mode 100644 vendor/golang.org/x/tools/internal/gopathwalk/walk.go create mode 100644 vendor/golang.org/x/tools/internal/imports/fix.go create mode 100644 vendor/golang.org/x/tools/internal/imports/imports.go create mode 100644 vendor/golang.org/x/tools/internal/imports/mod.go create mode 100644 vendor/golang.org/x/tools/internal/imports/mod_cache.go create mode 100644 vendor/golang.org/x/tools/internal/imports/sortimports.go create mode 100644 vendor/golang.org/x/tools/internal/imports/zstdlib.go diff --git a/client/meta/genqlient.go b/client/meta/genqlient.go index bb139749..67c03c55 100644 --- a/client/meta/genqlient.go +++ b/client/meta/genqlient.go @@ -1,3 +1,3 @@ package meta -//go:generate go run -mod=mod github.com/Khan/genqlient +//go:generate go run github.com/Khan/genqlient diff --git a/go.mod b/go.mod index c374c6ac..185ea801 100644 --- a/go.mod +++ b/go.mod @@ -21,6 +21,9 @@ require ( github.com/Masterminds/sprig/v3 v3.2.2 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 // indirect github.com/agext/levenshtein v1.2.3 // indirect + github.com/agnivade/levenshtein v1.1.1 // indirect + github.com/alexflint/go-arg v1.4.2 // indirect + github.com/alexflint/go-scalar v1.0.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/armon/go-radix v1.0.0 // indirect github.com/bgentry/speakeasy v0.1.0 // indirect diff --git a/go.sum b/go.sum index 1756f0a1..06e3cabf 100644 --- a/go.sum +++ b/go.sum @@ -25,11 +25,14 @@ github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bradleyjkemp/cupaloy/v2 v2.6.0 h1:knToPYa2xtfg42U3I6punFEjaGFKWQRXJwj0JTv4mTs= github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/cloudflare/circl v1.1.0/go.mod h1:prBCrKB9DV4poKZY1l9zBXg2QJY7mvgRvtMxxK7fi4I= github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs= @@ -37,6 +40,8 @@ github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUK github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g= +github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= diff --git a/tools/tools.go b/tools/tools.go index 9ecd08bf..ea7dc48a 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -3,6 +3,7 @@ package tools import ( + _ "github.com/Khan/genqlient" _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs" _ "gotest.tools/gotestsum" ) diff --git a/vendor/github.com/Khan/genqlient/.gitattributes b/vendor/github.com/Khan/genqlient/.gitattributes new file mode 100644 index 00000000..32f1001b --- /dev/null +++ b/vendor/github.com/Khan/genqlient/.gitattributes @@ -0,0 +1 @@ +go.sum linguist-generated diff --git a/vendor/github.com/Khan/genqlient/.gitignore b/vendor/github.com/Khan/genqlient/.gitignore new file mode 100644 index 00000000..45f5ad69 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/.gitignore @@ -0,0 +1,54 @@ +generate/testdata/tmp +internal/lint/golangci-lint + +## Go standard gitignore +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +## VSCode standard gitignore +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +## JetBrains standard gitignore +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +.idea + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties diff --git a/vendor/github.com/Khan/genqlient/.golangci.yml b/vendor/github.com/Khan/genqlient/.golangci.yml new file mode 100644 index 00000000..2076d4b1 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/.golangci.yml @@ -0,0 +1,112 @@ +# For the full list of configuration options, see +# https://github.com/golangci/golangci-lint#config-file + +# See more about these linters at https://golangci-lint.run/usage/linters/ +linters: + fast: false + disable-all: true + enable: + # golangci enables these by default. + - errcheck + - gofumpt # (replaces gofmt) + - gosimple + - govet + - ineffassign + - staticcheck + - typecheck + - unused + # golangci disables these by default, but we use them. + - bodyclose + - depguard + - durationcheck + - errorlint + - exportloopref + - gocritic + - nakedret + - stylecheck + - unconvert + - unparam + - whitespace + - forbidigo + +linters-settings: + errcheck: + check-type-assertions: true # check for a := b.(T) + + errorlint: + errorf: false # it's valid to use %v instead of %w + + govet: + check-shadowing: true + enable-all: true + + # We have a ton of test-only packages; but make sure we keep prod deps small. + depguard: + list-type: whitelist + packages: + - github.com/Khan/genqlient + - github.com/vektah/gqlparser/v2 + - golang.org/x/tools + - gopkg.in/yaml.v2 + - github.com/alexflint/go-arg + + forbidigo: + forbid: + - '^print(|f|ln)$' + - '^fmt\.Print(|f|ln)$' + + + gocritic: + # Which checks should be enabled: + # See https://go-critic.github.io/overview#checks-overview + # and https://github.com/go-critic/go-critic#usage -> section "Tags". + # To check which checks are enabled: `GL_DEBUG=gocritic golangci-lint run` + enabled-tags: + - diagnostic + - performance + - style + + disabled-checks: + - builtinShadow + - commentedOutCode + - importShadow + - paramTypeCombine + - unnamedResult + - ifElseChain + - sloppyReassign + - typeDefFirst + + settings: # settings passed to gocritic + captLocal: # must be valid enabled check name + paramsOnly: true + + +issues: + exclude-rules: + # Test-only deps are not restricted. + - linters: + - depguard + path: _test\.go$|^internal/testutil/|^internal/integration/ + + # Ok to use fmt.Print in the examples, and in the CLI entrypoint. + - linters: + - forbidigo + path: ^example/|^generate/main\.go$ + + - linters: + - errcheck + path: _test\.go$ + # Unchecked type-asserts are ok in tests -- a panic will be plenty clear. + # An error message with no function name means an unchecked type-assert. + text: "^Error return value is not checked$" + + # Don't error if a test setup function always takes the same arguments. + - linters: + - unparam + path: _test\.go$ + + - linters: + - govet + # Only a big deal for runtime code. + path: ^generate/|^example/ + text: "^fieldalignment: struct with \\d+ pointer bytes could be \\d+$" diff --git a/vendor/github.com/Khan/genqlient/Makefile b/vendor/github.com/Khan/genqlient/Makefile new file mode 100644 index 00000000..c1117278 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/Makefile @@ -0,0 +1,13 @@ +example: + go generate ./... + go run ./example + +lint: + ( cd internal/lint && go build -o golangci-lint github.com/golangci/golangci-lint/cmd/golangci-lint ) + internal/lint/golangci-lint run ./... --fix + +check: lint + go test -cover ./... + go mod tidy + +.PHONY: example diff --git a/vendor/github.com/Khan/genqlient/README.md b/vendor/github.com/Khan/genqlient/README.md new file mode 100644 index 00000000..e1e9fa34 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/README.md @@ -0,0 +1,52 @@ +generated graphql client ⇒ genqlient + +[![Go Reference](https://pkg.go.dev/badge/github.com/Khan/genqlient.svg)](https://pkg.go.dev/github.com/Khan/genqlient) +[![Test Status](https://github.com/Khan/genqlient/actions/workflows/go.yml/badge.svg)](https://github.com/Khan/genqlient/actions) +[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](docs/CODE_OF_CONDUCT.md) +[![GoReportcard](https://goreportcard.com/badge/github.com/Khan/genqlient?status.svg)](https://goreportcard.com/report/github.com/Khan/genqlient) + +# genqlient: a truly type-safe Go GraphQL client + +## What is genqlient? + +genqlient is a Go library to easily generate type-safe code to query a GraphQL API. It takes advantage of the fact that both GraphQL and Go are typed languages to ensure at compile-time that your code is making a valid GraphQL query and using the result correctly, all with a minimum of boilerplate. + +genqlient provides: + +- Compile-time validation of GraphQL queries: never ship an invalid GraphQL query again! +- Type-safe response objects: genqlient generates the right type for each query, so you know the response will unmarshal correctly and never need to use `interface{}`. +- Production-readiness: genqlient is used in production at Khan Academy, where it supports millions of learners and teachers around the world. + +## How do I use genqlient? + +You can download and run genqlient the usual way: `go run github.com/Khan/genqlient`. To set your project up to use genqlient, see the [getting started guide](docs/INTRODUCTION.md), or the [example](example). For more complete documentation, see the [docs](docs). + +## How can I help? + +genqlient welcomes contributions! Check out the ([Contribution Guidelines](docs/CONTRIBUTING.md)), or file an issue [on GitHub](issues). + +## Why another GraphQL client? + +Most common Go GraphQL clients have you write code something like this: +```go +query := `query GetUser($id: ID!) { user(id: $id) { name } }` +variables := map[string]interface{}{"id": "123"} +var resp struct { + Me struct { + Name graphql.String + } +} +client.Query(ctx, query, &resp, variables) +fmt.Println(resp.Me.Name) +// Output: Luke Skywalker +``` + +This code works, but it has a few problems: + +- While the response struct is type-safe at the Go level; there's nothing to check that the schema looks like you expect. Maybe the field is called `fullName`, not `name`; or maybe you capitalized it wrong (since Go and GraphQL have different conventions); you won't know until runtime. +- The GraphQL variables aren't type-safe at all; you could have passed `{"id": true}` and again you won't know until runtime! +- You have to write everything twice, or hide the query in complicated struct tags, or give up what type safety you do have and resort to `interface{}`. + +These problems aren't a big deal in a small application, but for serious production-grade tools they're not ideal. And they should be entirely avoidable: GraphQL and Go are both typed languages; and GraphQL servers expose their schema in a standard, machine-readable format. We should be able to simply write a query and have that automatically validated against the schema and turned into a Go struct which we can use in our code. In fact, there's already good prior art to do this sort of thing: [99designs/gqlgen](https://github.com/99designs/gqlgen) is a popular server library that generates types, and Apollo has a [codegen tool](https://www.apollographql.com/docs/devtools/cli/#supported-commands) to generate similar client-types for several other languages. (See [docs/DESIGN.md](docs/DESIGN.md) for more prior art.) + +genqlient fills that gap: you just specify the query, and it generates type-safe helpers, validated against the schema, that make the query. diff --git a/vendor/github.com/Khan/genqlient/generate/config.go b/vendor/github.com/Khan/genqlient/generate/config.go new file mode 100644 index 00000000..37cbe7e8 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/config.go @@ -0,0 +1,237 @@ +package generate + +import ( + _ "embed" + "fmt" + "go/token" + "os" + "path/filepath" + "strings" + + "golang.org/x/tools/go/packages" + "gopkg.in/yaml.v2" +) + +var cfgFilenames = []string{".genqlient.yml", ".genqlient.yaml", "genqlient.yml", "genqlient.yaml"} + +// Config represents genqlient's configuration, generally read from +// genqlient.yaml. +// +// Callers must call [Config.ValidateAndFillDefaults] before using the config. +type Config struct { + // The following fields are documented in the [genqlient.yaml docs]. + // + // [genqlient.yaml docs]: https://github.com/Khan/genqlient/blob/main/docs/genqlient.yaml + Schema StringList `yaml:"schema"` + Operations StringList `yaml:"operations"` + Generated string `yaml:"generated"` + Package string `yaml:"package"` + ExportOperations string `yaml:"export_operations"` + ContextType string `yaml:"context_type"` + ClientGetter string `yaml:"client_getter"` + Bindings map[string]*TypeBinding `yaml:"bindings"` + PackageBindings []*PackageBinding `yaml:"package_bindings"` + Optional string `yaml:"optional"` + StructReferences bool `yaml:"use_struct_references"` + Extensions bool `yaml:"use_extensions"` + + // Set to true to use features that aren't fully ready to use. + // + // This is primarily intended for genqlient's own tests. These features + // are likely BROKEN and come with NO EXPECTATION OF COMPATIBILITY. Use + // them at your own risk! + AllowBrokenFeatures bool `yaml:"allow_broken_features"` + + // The directory of the config-file (relative to which all the other paths + // are resolved). Set by ValidateAndFillDefaults. + baseDir string +} + +// A TypeBinding represents a Go type to which genqlient will bind a particular +// GraphQL type, and is documented further in the [genqlient.yaml docs]. +// +// [genqlient.yaml docs]: https://github.com/Khan/genqlient/blob/main/docs/genqlient.yaml +type TypeBinding struct { + Type string `yaml:"type"` + ExpectExactFields string `yaml:"expect_exact_fields"` + Marshaler string `yaml:"marshaler"` + Unmarshaler string `yaml:"unmarshaler"` +} + +// A PackageBinding represents a Go package for which genqlient will +// automatically generate [TypeBinding] values, and is documented further in +// the [genqlient.yaml docs]. +// +// [genqlient.yaml docs]: https://github.com/Khan/genqlient/blob/main/docs/genqlient.yaml +type PackageBinding struct { + Package string `yaml:"package"` +} + +// pathJoin is like filepath.Join but 1) it only takes two argsuments, +// and b) if the second argument is an absolute path the first argument +// is ignored (similar to how python's os.path.join() works). +func pathJoin(a, b string) string { + if filepath.IsAbs(b) { + return b + } + return filepath.Join(a, b) +} + +// ValidateAndFillDefaults ensures that the configuration is valid, and fills +// in any options that were unspecified. +// +// The argument is the directory relative to which paths will be interpreted, +// typically the directory of the config file. +func (c *Config) ValidateAndFillDefaults(baseDir string) error { + c.baseDir = baseDir + for i := range c.Schema { + c.Schema[i] = pathJoin(baseDir, c.Schema[i]) + } + for i := range c.Operations { + c.Operations[i] = pathJoin(baseDir, c.Operations[i]) + } + c.Generated = pathJoin(baseDir, c.Generated) + if c.ExportOperations != "" { + c.ExportOperations = pathJoin(baseDir, c.ExportOperations) + } + + if c.ContextType == "" { + c.ContextType = "context.Context" + } + + if c.Package == "" { + abs, err := filepath.Abs(c.Generated) + if err != nil { + return errorf(nil, "unable to guess package-name: %v is not a valid identifier"+ + "\nSet package name in genqlient.yaml"+ + "\nExample: https://github.com/Khan/genqlient/blob/main/example/genqlient.yaml#L6", err) + } + + base := filepath.Base(filepath.Dir(abs)) + if !token.IsIdentifier(base) { + return errorf(nil, "unable to guess package-name: %v is not a valid identifier"+ + "\nSet package name in genqlient.yaml"+ + "\nExample: https://github.com/Khan/genqlient/blob/main/example/genqlient.yaml#L6", base) + } + + c.Package = base + } + + if len(c.PackageBindings) > 0 { + for _, binding := range c.PackageBindings { + if strings.HasSuffix(binding.Package, ".go") { + // total heuristic -- but this is an easy mistake to make and + // results in rather bizarre behavior from go/packages. + return errorf(nil, + "package %v looks like a file, but should be a package-name", + binding.Package) + } + + mode := packages.NeedDeps | packages.NeedTypes + pkgs, err := packages.Load(&packages.Config{ + Mode: mode, + }, binding.Package) + if err != nil { + return err + } + + if c.Bindings == nil { + c.Bindings = map[string]*TypeBinding{} + } + + for _, pkg := range pkgs { + p := pkg.Types + if p == nil || p.Scope() == nil || p.Scope().Len() == 0 { + return errorf(nil, "unable to bind package %s: no types found", binding.Package) + } + + for _, typ := range p.Scope().Names() { + if token.IsExported(typ) { + // Check if type is manual bindings + _, exist := c.Bindings[typ] + if !exist { + pathType := fmt.Sprintf("%s.%s", p.Path(), typ) + c.Bindings[typ] = &TypeBinding{ + Type: pathType, + } + } + } + } + } + } + } + + return nil +} + +// ReadAndValidateConfig reads the configuration from the given file, validates +// it, and returns it. +func ReadAndValidateConfig(filename string) (*Config, error) { + text, err := os.ReadFile(filename) + if err != nil { + return nil, errorf(nil, "unreadable config file %v: %v", filename, err) + } + + var config Config + err = yaml.UnmarshalStrict(text, &config) + if err != nil { + return nil, errorf(nil, "invalid config file %v: %v", filename, err) + } + + err = config.ValidateAndFillDefaults(filepath.Dir(filename)) + if err != nil { + return nil, errorf(nil, "invalid config file %v: %v", filename, err) + } + + return &config, nil +} + +// ReadAndValidateConfigFromDefaultLocations looks for a config file in the +// current directory, and all parent directories walking up the tree. The +// closest config file will be returned. +func ReadAndValidateConfigFromDefaultLocations() (*Config, error) { + cfgFile, err := findCfg() + if err != nil { + return nil, err + } + return ReadAndValidateConfig(cfgFile) +} + +//go:embed default_genqlient.yaml +var defaultConfig []byte + +func initConfig(filename string) error { + return os.WriteFile(filename, defaultConfig, 0o644) +} + +// findCfg searches for the config file in this directory and all parents up the tree +// looking for the closest match +func findCfg() (string, error) { + dir, err := os.Getwd() + if err != nil { + return "", errorf(nil, "unable to get working dir to findCfg: %v", err) + } + + cfg := findCfgInDir(dir) + + for cfg == "" && dir != filepath.Dir(dir) { + dir = filepath.Dir(dir) + cfg = findCfgInDir(dir) + } + + if cfg == "" { + return "", os.ErrNotExist + } + + return cfg, nil +} + +func findCfgInDir(dir string) string { + for _, cfgName := range cfgFilenames { + path := pathJoin(dir, cfgName) + if _, err := os.Stat(path); err == nil { + return path + } + } + return "" +} diff --git a/vendor/github.com/Khan/genqlient/generate/convert.go b/vendor/github.com/Khan/genqlient/generate/convert.go new file mode 100644 index 00000000..caf3754f --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/convert.go @@ -0,0 +1,879 @@ +package generate + +// This file implements the core type-generation logic of genqlient, whereby we +// traverse an operation-definition (and the schema against which it will be +// executed), and convert that into Go types. It returns data structures +// representing the types to be generated; these are defined, and converted +// into code, in types.go. +// +// The entrypoints are convertOperation, which builds the response-type for a +// query, and convertArguments, which builds the argument-types. + +import ( + "fmt" + "sort" + + "github.com/vektah/gqlparser/v2/ast" +) + +// getType returns the existing type in g.typeMap with the given name, if any, +// and an error if such type is incompatible with this one. +// +// This is useful as an early-out and a safety-check when generating types; if +// the type has already been generated we can skip generating it again. (This +// is necessary to handle recursive input types, and an optimization in other +// cases.) +func (g *generator) getType( + goName, graphQLName string, + selectionSet ast.SelectionSet, + pos *ast.Position, +) (goType, error) { + typ, ok := g.typeMap[goName] + if !ok { + return nil, nil + } + + if typ.GraphQLTypeName() != graphQLName { + return typ, errorf( + pos, "conflicting definition for %s; this can indicate either "+ + "a genqlient internal error, a conflict between user-specified "+ + "type-names, or some very tricksy GraphQL field/type names: "+ + "expected GraphQL type %s, got %s", + goName, typ.GraphQLTypeName(), graphQLName) + } + + expectedSelectionSet := typ.SelectionSet() + if err := selectionsMatch(pos, selectionSet, expectedSelectionSet); err != nil { + return typ, errorf( + pos, "conflicting definition for %s; this can indicate either "+ + "a genqlient internal error, a conflict between user-specified "+ + "type-names, or some very tricksy GraphQL field/type names: %v", + goName, err) + } + + return typ, nil +} + +// addType inserts the type into g.typeMap, checking for conflicts. +// +// The conflict-checking is as described in getType. Note we have to do it +// here again, even if the caller has already called getType, because the +// caller in between may have generated new types, which potentially creates +// new conflicts. +// +// Returns an already-existing type if found, and otherwise the given type. +func (g *generator) addType(typ goType, goName string, pos *ast.Position) (goType, error) { + otherTyp, err := g.getType(goName, typ.GraphQLTypeName(), typ.SelectionSet(), pos) + if otherTyp != nil || err != nil { + return otherTyp, err + } + g.typeMap[goName] = typ + return typ, nil +} + +// baseTypeForOperation returns the definition of the GraphQL type to which the +// root of the operation corresponds, e.g. the "Query" or "Mutation" type. +func (g *generator) baseTypeForOperation(operation ast.Operation) (*ast.Definition, error) { + switch operation { + case ast.Query: + return g.schema.Query, nil + case ast.Mutation: + return g.schema.Mutation, nil + case ast.Subscription: + if !g.Config.AllowBrokenFeatures { + return nil, errorf(nil, "genqlient does not yet support subscriptions") + } + return g.schema.Subscription, nil + default: + return nil, errorf(nil, "unexpected operation: %v", operation) + } +} + +// convertOperation builds the response-type into which the given operation's +// result will be unmarshaled. +func (g *generator) convertOperation( + operation *ast.OperationDefinition, + queryOptions *genqlientDirective, +) (goType, error) { + name := operation.Name + "Response" + namePrefix := newPrefixList(operation.Name) + if queryOptions.TypeName != "" { + name = queryOptions.TypeName + namePrefix = newPrefixList(queryOptions.TypeName) + } + + baseType, err := g.baseTypeForOperation(operation.Operation) + if err != nil { + return nil, errorf(operation.Position, "%v", err) + } + + // Instead of calling out to convertType/convertDefinition, we do our own + // thing, because we want to do a few things differently, and because we + // know we have an object type, so we can include only that case. + fields, err := g.convertSelectionSet( + namePrefix, operation.SelectionSet, baseType, queryOptions) + if err != nil { + return nil, err + } + + // It's not common to use a fragment-spread for the whole query, but you + // can if you want two queries to return the same type! + if queryOptions.GetFlatten() { + i, err := validateFlattenOption(baseType, operation.SelectionSet, operation.Position) + if err == nil { + return fields[i].GoType, nil + } + } + + goType := &goStructType{ + GoName: name, + descriptionInfo: descriptionInfo{ + CommentOverride: fmt.Sprintf( + "%v is returned by %v on success.", name, operation.Name), + GraphQLName: baseType.Name, + // omit the GraphQL description for baseType; it's uninteresting. + }, + Fields: fields, + Selection: operation.SelectionSet, + Generator: g, + } + + return g.addType(goType, goType.GoName, operation.Position) +} + +var builtinTypes = map[string]string{ + // GraphQL guarantees int32 is enough, but using int seems more idiomatic + "Int": "int", + "Float": "float64", + "String": "string", + "Boolean": "bool", + "ID": "string", +} + +// convertArguments builds the type of the GraphQL arguments to the given +// operation. +// +// This type is not exposed to the user; it's just used internally in the +// unmarshaler; and it's used as a container +func (g *generator) convertArguments( + operation *ast.OperationDefinition, + queryOptions *genqlientDirective, +) (*goStructType, error) { + if len(operation.VariableDefinitions) == 0 { + return nil, nil + } + name := "__" + operation.Name + "Input" + fields := make([]*goStructField, len(operation.VariableDefinitions)) + for i, arg := range operation.VariableDefinitions { + if goKeywords[arg.Variable] { + return nil, errorf(arg.Position, "variable name must not be a go keyword") + } + + _, options, err := g.parsePrecedingComment(arg, nil, arg.Position, queryOptions) + if err != nil { + return nil, err + } + + goName := upperFirst(arg.Variable) + // Some of the arguments don't apply here, namely the name-prefix (see + // names.go) and the selection-set (we use all the input type's fields, + // and so on recursively). See also the `case ast.InputObject` in + // convertDefinition, below. + goTyp, err := g.convertType(nil, arg.Type, nil, options, queryOptions) + if err != nil { + return nil, err + } + + fields[i] = &goStructField{ + GoName: goName, + GoType: goTyp, + JSONName: arg.Variable, + GraphQLName: arg.Variable, + Omitempty: options.GetOmitempty(), + } + } + goTyp := &goStructType{ + GoName: name, + Fields: fields, + Selection: nil, + IsInput: true, + descriptionInfo: descriptionInfo{ + CommentOverride: fmt.Sprintf("%s is used internally by genqlient", name), + // fake name, used by addType + GraphQLName: name, + }, + Generator: g, + } + goTypAgain, err := g.addType(goTyp, goTyp.GoName, operation.Position) + if err != nil { + return nil, err + } + goTyp, ok := goTypAgain.(*goStructType) + if !ok { + return nil, errorf( + operation.Position, "internal error: input type was %T", goTypAgain) + } + return goTyp, nil +} + +// convertType decides the Go type we will generate corresponding to a +// particular GraphQL type. In this context, "type" represents the type of a +// field, and may be a list or a reference to a named type, with or without the +// "non-null" annotation. +func (g *generator) convertType( + namePrefix *prefixList, + typ *ast.Type, + selectionSet ast.SelectionSet, + options, queryOptions *genqlientDirective, +) (goType, error) { + // We check for local bindings here, so that you can bind, say, a + // `[String!]` to a struct instead of a slice. Global bindings can only + // bind GraphQL named types, at least for now. + localBinding := options.Bind + if localBinding != "" && localBinding != "-" { + goRef, err := g.ref(localBinding) + // TODO(benkraft): Add syntax to specify a custom (un)marshaler, if + // it proves useful. + return &goOpaqueType{GoRef: goRef, GraphQLName: typ.Name()}, err + } + + if typ.Elem != nil { + // Type is a list. + elem, err := g.convertType( + namePrefix, typ.Elem, selectionSet, options, queryOptions) + return &goSliceType{elem}, err + } + + // If this is a builtin type or custom scalar, just refer to it. + def := g.schema.Types[typ.Name()] + goTyp, err := g.convertDefinition( + namePrefix, def, typ.Position, selectionSet, options, queryOptions) + + if g.getStructReference(def) { + if options.Pointer == nil || *options.Pointer { + goTyp = &goPointerType{goTyp} + } + if options.Omitempty == nil || *options.Omitempty { + oe := true + options.Omitempty = &oe + } + } else if options.GetPointer() || (!typ.NonNull && g.Config.Optional == "pointer") { + // Whatever we get, wrap it in a pointer. (Because of the way the + // options work, recursing here isn't as connvenient.) + // Note this does []*T or [][]*T, not e.g. *[][]T. See #16. + goTyp = &goPointerType{goTyp} + } + return goTyp, err +} + +// getStructReference decides if a field should be of pointer type and have the omitempty flag set. +func (g *generator) getStructReference( + def *ast.Definition, +) bool { + return g.Config.StructReferences && + (def.Kind == ast.Object || def.Kind == ast.InputObject) +} + +// convertDefinition decides the Go type we will generate corresponding to a +// particular GraphQL named type. +// +// In this context, "definition" (and "named type") refer to an +// *ast.Definition, which represents the definition of a type in the GraphQL +// schema, which may be referenced by a field-type (see convertType). +func (g *generator) convertDefinition( + namePrefix *prefixList, + def *ast.Definition, + pos *ast.Position, + selectionSet ast.SelectionSet, + options, queryOptions *genqlientDirective, +) (goType, error) { + // Check if we should use an existing type. (This is usually true for + // GraphQL scalars, but we allow you to bind non-scalar types too, if you + // want, subject to the caveats described in Config.Bindings.) Local + // bindings are checked in the caller (convertType) and never get here, + // unless the binding is "-" which means "ignore the global binding". + globalBinding, ok := g.Config.Bindings[def.Name] + if ok && options.Bind != "-" { + if options.TypeName != "" { + // The option position (in the query) is more useful here. + return nil, errorf(options.pos, + "typename option conflicts with global binding for %s; "+ + "use `bind: \"-\"` to override it", def.Name) + } + if def.Kind == ast.Object || def.Kind == ast.Interface || def.Kind == ast.Union { + err := g.validateBindingSelection( + def.Name, globalBinding, pos, selectionSet) + if err != nil { + return nil, err + } + } + goRef, err := g.ref(globalBinding.Type) + return &goOpaqueType{ + GoRef: goRef, + GraphQLName: def.Name, + Marshaler: globalBinding.Marshaler, + Unmarshaler: globalBinding.Unmarshaler, + }, err + } + goBuiltinName, ok := builtinTypes[def.Name] + if ok && options.TypeName == "" { + return &goOpaqueType{GoRef: goBuiltinName, GraphQLName: def.Name}, nil + } + + // Determine the name to use for this type. + var name string + if options.TypeName != "" { + if goKeywords[options.TypeName] { + return nil, errorf(pos, "typename option must not be a go keyword") + } + // If the user specified a name, use it! + name = options.TypeName + if namePrefix != nil && namePrefix.head == name && namePrefix.tail == nil { + // Special case: if this name is also the only component of the + // name-prefix, append the type-name anyway. This happens when you + // assign a type name to an interface type, and we are generating + // one of its implementations. + name = makeLongTypeName(namePrefix, def.Name) + } + // (But the prefix is shared.) + namePrefix = newPrefixList(options.TypeName) + } else if def.Kind == ast.InputObject || def.Kind == ast.Enum { + // If we're an input-object or enum, there is only one type we will + // ever possibly generate for this type, so we don't need any of the + // qualifiers. This is especially helpful because the caller is very + // likely to need to reference these types in their code. + name = upperFirst(def.Name) + // (namePrefix is ignored in this case.) + } else { + // Else, construct a name using the usual algorithm (see names.go). + name = makeTypeName(namePrefix, def.Name) + } + + // If we already generated the type, we can skip it as long as it matches + // (and must fail if it doesn't). (This can happen for input/enum types, + // types of fields of interfaces, when options.TypeName is set, or, of + // course, on invalid configuration or internal error.) + existing, err := g.getType(name, def.Name, selectionSet, pos) + if existing != nil || err != nil { + return existing, err + } + + desc := descriptionInfo{ + // TODO(benkraft): Copy any comment above this selection-set? + GraphQLDescription: def.Description, + GraphQLName: def.Name, + } + + // The struct option basically means "treat this as if it were an object". + // (It only applies if valid; this is important if you said the whole + // query should have `struct: true`.) + kind := def.Kind + if options.GetStruct() && validateStructOption(def, selectionSet, pos) == nil { + kind = ast.Object + } + switch kind { + case ast.Object: + fields, err := g.convertSelectionSet( + namePrefix, selectionSet, def, queryOptions) + if err != nil { + return nil, err + } + if options.GetFlatten() { + // As with struct, flatten only applies if valid, important if you + // applied it to the whole query. + // TODO(benkraft): This is a slightly fragile way to do this; + // figure out a good way to do it before/while constructing the + // fields, rather than after. + i, err := validateFlattenOption(def, selectionSet, pos) + if err == nil { + return fields[i].GoType, nil + } + } + + goType := &goStructType{ + GoName: name, + Fields: fields, + Selection: selectionSet, + descriptionInfo: desc, + Generator: g, + } + return g.addType(goType, goType.GoName, pos) + + case ast.InputObject: + goType := &goStructType{ + GoName: name, + Fields: make([]*goStructField, len(def.Fields)), + descriptionInfo: desc, + IsInput: true, + Generator: g, + } + // To handle recursive types, we need to add the type to the type-map + // *before* converting its fields. + _, err := g.addType(goType, goType.GoName, pos) + if err != nil { + return nil, err + } + + for i, field := range def.Fields { + _, fieldOptions, err := g.parsePrecedingComment( + field, def, field.Position, queryOptions) + if err != nil { + return nil, err + } + + goName := upperFirst(field.Name) + // Several of the arguments don't really make sense here: + // (note field.Type is necessarily a scalar, input, or enum) + // - namePrefix is ignored for input types and enums (see + // names.go) and for scalars (they use client-specified + // names) + // - selectionSet is ignored for input types, because we + // just use all fields of the type; and it's nonexistent + // for scalars and enums, our only other possible types + // TODO(benkraft): Can we refactor to avoid passing the values that + // will be ignored? We know field.Type is a scalar, enum, or input + // type. But plumbing that is a bit tricky in practice. + fieldGoType, err := g.convertType( + namePrefix, field.Type, nil, fieldOptions, queryOptions) + if err != nil { + return nil, err + } + + goType.Fields[i] = &goStructField{ + GoName: goName, + GoType: fieldGoType, + JSONName: field.Name, + GraphQLName: field.Name, + Description: field.Description, + Omitempty: fieldOptions.GetOmitempty(), + } + } + return goType, nil + + case ast.Interface, ast.Union: + sharedFields, err := g.convertSelectionSet( + namePrefix, selectionSet, def, queryOptions) + if err != nil { + return nil, err + } + // Flatten can only flatten if there is only one field (plus perhaps + // __typename), and it's shared. + if options.GetFlatten() { + i, err := validateFlattenOption(def, selectionSet, pos) + if err == nil { + return sharedFields[i].GoType, nil + } + } + + implementationTypes := g.schema.GetPossibleTypes(def) + // Make sure we generate stable output by sorting the types by name when we get them + sort.Slice(implementationTypes, func(i, j int) bool { return implementationTypes[i].Name < implementationTypes[j].Name }) + goType := &goInterfaceType{ + GoName: name, + SharedFields: sharedFields, + Implementations: make([]*goStructType, len(implementationTypes)), + Selection: selectionSet, + descriptionInfo: desc, + } + + for i, implDef := range implementationTypes { + // TODO(benkraft): In principle we should skip generating a Go + // field for __typename each of these impl-defs if you didn't + // request it (and it was automatically added by + // preprocessQueryDocument). But in practice it doesn't really + // hurt, and would be extra work to avoid, so we just leave it. + implTyp, err := g.convertDefinition( + namePrefix, implDef, pos, selectionSet, options, queryOptions) + if err != nil { + return nil, err + } + + implStructTyp, ok := implTyp.(*goStructType) + if !ok { // (should never happen on a valid schema) + return nil, errorf( + pos, "interface %s had non-object implementation %s", + def.Name, implDef.Name) + } + goType.Implementations[i] = implStructTyp + } + return g.addType(goType, goType.GoName, pos) + + case ast.Enum: + goType := &goEnumType{ + GoName: name, + GraphQLName: def.Name, + Description: def.Description, + Values: make([]goEnumValue, len(def.EnumValues)), + } + for i, val := range def.EnumValues { + goType.Values[i] = goEnumValue{Name: val.Name, Description: val.Description} + } + return g.addType(goType, goType.GoName, pos) + + case ast.Scalar: + if builtinTypes[def.Name] != "" { + // In this case, the user asked for a custom Go type-name + // for a built-in type, e.g. `type MyString string`. + goType := &goTypenameForBuiltinType{ + GoTypeName: name, + GoBuiltinName: builtinTypes[def.Name], + GraphQLName: def.Name, + } + return g.addType(goType, goType.GoTypeName, pos) + } + + // (If you had an entry in bindings, we would have returned it above.) + return nil, errorf( + pos, "unknown scalar %v: please add it to \"bindings\" in genqlient.yaml"+ + "\nExample: https://github.com/Khan/genqlient/blob/main/example/genqlient.yaml#L12", def.Name) + default: + return nil, errorf(pos, "unexpected kind: %v", def.Kind) + } +} + +// convertSelectionSet converts a GraphQL selection-set into a list of +// corresponding Go struct-fields (and their Go types) +// +// A selection-set is a list of fields within braces like `{ myField }`, as +// appears at the toplevel of a query, in a field's sub-selections, or within +// an inline or named fragment. +// +// containingTypedef is the type-def whose fields we are selecting, and may be +// an object type or an interface type. In the case of interfaces, we'll call +// convertSelectionSet once for the interface, and once for each +// implementation. +func (g *generator) convertSelectionSet( + namePrefix *prefixList, + selectionSet ast.SelectionSet, + containingTypedef *ast.Definition, + queryOptions *genqlientDirective, +) ([]*goStructField, error) { + fields := make([]*goStructField, 0, len(selectionSet)) + for _, selection := range selectionSet { + _, selectionOptions, err := g.parsePrecedingComment( + selection, nil, selection.GetPosition(), queryOptions) + if err != nil { + return nil, err + } + + switch selection := selection.(type) { + case *ast.Field: + field, err := g.convertField( + namePrefix, selection, selectionOptions, queryOptions) + if err != nil { + return nil, err + } + fields = append(fields, field) + case *ast.FragmentSpread: + maybeField, err := g.convertFragmentSpread(selection, containingTypedef) + if err != nil { + return nil, err + } else if maybeField != nil { + fields = append(fields, maybeField) + } + case *ast.InlineFragment: + // (Note this will return nil, nil if the fragment doesn't apply to + // this type.) + fragmentFields, err := g.convertInlineFragment( + namePrefix, selection, containingTypedef, queryOptions) + if err != nil { + return nil, err + } + fields = append(fields, fragmentFields...) + default: + return nil, errorf(nil, "invalid selection type: %T", selection) + } + } + + // We need to deduplicate, if you asked for + // { id, id, id, ... on SubType { id } } + // (which, yes, is legal) we'll treat that as just { id }. + uniqFields := make([]*goStructField, 0, len(selectionSet)) + fragmentNames := make(map[string]bool, len(selectionSet)) + fieldNames := make(map[string]bool, len(selectionSet)) + for _, field := range fields { + // If you embed a field twice via a named fragment, we keep both, even + // if there are complicated overlaps, since they are separate types to + // us. (See also the special handling for IsEmbedded in + // unmarshal.go.tmpl.) + // + // But if you spread the samenamed fragment twice, e.g. + // { ...MyFragment, ... on SubType { ...MyFragment } } + // we'll still deduplicate that. + if field.JSONName == "" { + name := field.GoType.Reference() + if fragmentNames[name] { + continue + } + uniqFields = append(uniqFields, field) + fragmentNames[name] = true + continue + } + + // GraphQL (and, effectively, JSON) requires that all fields with the + // same alias (JSON-name) must be the same (i.e. refer to the same + // field), so that's how we deduplicate. + if fieldNames[field.JSONName] { + // GraphQL (and, effectively, JSON) forbids you from having two + // fields with the same alias (JSON-name) that refer to different + // GraphQL fields. But it does allow you to have the same field + // with different selections (subject to some additional rules). + // We say: that's too complicated! and allow duplicate fields + // only if they're "leaf" types (enum or scalar). + switch field.GoType.Unwrap().(type) { + case *goOpaqueType, *goEnumType: + // Leaf field; we can just deduplicate. + // Note GraphQL already guarantees that the conflicting field + // has scalar/enum type iff this field does: + // https://spec.graphql.org/draft/#SameResponseShape() + continue + case *goStructType, *goInterfaceType: + // TODO(benkraft): Keep track of the position of each + // selection, so we can put this error on the right line. + return nil, errorf(nil, + "genqlient doesn't allow duplicate fields with different selections "+ + "(see https://github.com/Khan/genqlient/issues/64); "+ + "duplicate field: %s.%s", containingTypedef.Name, field.JSONName) + default: + return nil, errorf(nil, "unexpected field-type: %T", field.GoType.Unwrap()) + } + } + uniqFields = append(uniqFields, field) + fieldNames[field.JSONName] = true + } + return uniqFields, nil +} + +// fragmentMatches returns true if the given fragment is "active" when applied +// to the given type. +// +// "Active" here means "the fragment's fields will be returned on all objects +// of the given type", which is true when the given type is or implements +// the fragment's type. This is distinct from the rules for when a fragment +// spread is legal, which is true when the fragment would be active for *any* +// of the concrete types the spread-context could have (see the [GraphQL spec] +// or docs/DESIGN.md). +// +// containingTypedef is as described in convertInlineFragment, below. +// fragmentTypedef is the definition of the fragment's type-condition, i.e. the +// definition of MyType in a fragment `on MyType`. +// +// [GraphQL spec]: https://spec.graphql.org/draft/#sec-Fragment-Spreads +func fragmentMatches(containingTypedef, fragmentTypedef *ast.Definition) bool { + if containingTypedef.Name == fragmentTypedef.Name { + return true + } + for _, iface := range containingTypedef.Interfaces { + // Note we don't need to recurse into the interfaces here, because in + // GraphQL types must list all the interfaces they implement, including + // all types those interfaces implement ([spec]). Actually, at present + // gqlparser doesn't even support interfaces implementing other + // interfaces, but our code would handle that too. + // + // [spec]: https://spec.graphql.org/draft/#sec-Interfaces.Interfaces-Implementing-Interfaces + if iface == fragmentTypedef.Name { + return true + } + } + return false +} + +// convertInlineFragment converts a single GraphQL inline fragment +// (`... on MyType { myField }`) into Go struct-fields. +// +// containingTypedef is the type-def corresponding to the type into which we +// are spreading; it may be either an interface type (when spreading into one) +// or an object type (when writing the implementations of such an interface, or +// when using an inline fragment in an object type which is rare). If the +// given fragment does not apply to that type, this function returns nil, nil. +// +// In general, we treat such fragments' fields as if they were fields of the +// parent selection-set (except of course they are only included in types the +// fragment matches); see docs/DESIGN.md for more. +func (g *generator) convertInlineFragment( + namePrefix *prefixList, + fragment *ast.InlineFragment, + containingTypedef *ast.Definition, + queryOptions *genqlientDirective, +) ([]*goStructField, error) { + // You might think fragmentTypedef is just fragment.ObjectDefinition, but + // actually that's the type into which the fragment is spread. + fragmentTypedef := g.schema.Types[fragment.TypeCondition] + if !fragmentMatches(containingTypedef, fragmentTypedef) { + return nil, nil + } + return g.convertSelectionSet(namePrefix, fragment.SelectionSet, + containingTypedef, queryOptions) +} + +// convertFragmentSpread converts a single GraphQL fragment-spread +// (`...MyFragment`) into a Go struct-field. If the fragment does not apply to +// this type, returns nil. +// +// containingTypedef is as described in convertInlineFragment, above. +func (g *generator) convertFragmentSpread( + fragmentSpread *ast.FragmentSpread, + containingTypedef *ast.Definition, +) (*goStructField, error) { + if !fragmentMatches(containingTypedef, fragmentSpread.Definition.Definition) { + return nil, nil + } + + typ, ok := g.typeMap[fragmentSpread.Name] + if !ok { + // If we haven't yet, convert the fragment itself. Note that fragments + // aren't allowed to have cycles, so this won't recurse forever. + var err error + typ, err = g.convertNamedFragment(fragmentSpread.Definition) + if err != nil { + return nil, err + } + } + + iface, ok := typ.(*goInterfaceType) + if ok && containingTypedef.Kind == ast.Object { + // If the containing type is concrete, and the fragment spread is + // abstract, refer directly to the appropriate implementation, to save + // the caller having to do type-assertions that will always succeed. + // + // That is, if you do + // fragment F on I { ... } + // query Q { a { ...F } } + // for the fragment we generate + // type F interface { ... } + // type FA struct { ... } + // // (other implementations) + // when you spread F into a context of type A, we embed FA, not F. + for _, impl := range iface.Implementations { + if impl.GraphQLName == containingTypedef.Name { + typ = impl + } + } + } + + // TODO(benkraft): Set directive here if we ever allow @genqlient + // directives on fragment-spreads. + return &goStructField{GoName: "" /* i.e. embedded */, GoType: typ}, nil +} + +// convertNamedFragment converts a single GraphQL named fragment-definition +// (`fragment MyFragment on MyType { ... }`) into a Go struct. +func (g *generator) convertNamedFragment(fragment *ast.FragmentDefinition) (goType, error) { + typ := g.schema.Types[fragment.TypeCondition] + + comment, directive, err := g.parsePrecedingComment(fragment, nil, fragment.Position, nil) + if err != nil { + return nil, err + } + + desc := descriptionInfo{ + CommentOverride: comment, + GraphQLName: typ.Name, + GraphQLDescription: typ.Description, + FragmentName: fragment.Name, + } + + // The rest basically follows how we convert a definition, except that + // things like type-names are a bit different. + + fields, err := g.convertSelectionSet( + newPrefixList(fragment.Name), fragment.SelectionSet, typ, directive) + if err != nil { + return nil, err + } + if directive.GetFlatten() { + // Flatten on a fragment-definition is a bit weird -- it makes one + // fragment effectively an alias for another -- but no reason we can't + // allow it. + i, err := validateFlattenOption(typ, fragment.SelectionSet, fragment.Position) + if err == nil { + return fields[i].GoType, nil + } + } + + switch typ.Kind { + case ast.Object: + goType := &goStructType{ + GoName: fragment.Name, + Fields: fields, + Selection: fragment.SelectionSet, + descriptionInfo: desc, + Generator: g, + } + g.typeMap[fragment.Name] = goType + return goType, nil + case ast.Interface, ast.Union: + implementationTypes := g.schema.GetPossibleTypes(typ) + goType := &goInterfaceType{ + GoName: fragment.Name, + SharedFields: fields, + Implementations: make([]*goStructType, len(implementationTypes)), + Selection: fragment.SelectionSet, + descriptionInfo: desc, + } + g.typeMap[fragment.Name] = goType + + for i, implDef := range implementationTypes { + implFields, err := g.convertSelectionSet( + newPrefixList(fragment.Name), fragment.SelectionSet, implDef, directive) + if err != nil { + return nil, err + } + + implDesc := desc + implDesc.GraphQLName = implDef.Name + + implTyp := &goStructType{ + GoName: fragment.Name + upperFirst(implDef.Name), + Fields: implFields, + Selection: fragment.SelectionSet, + descriptionInfo: implDesc, + Generator: g, + } + goType.Implementations[i] = implTyp + g.typeMap[implTyp.GoName] = implTyp + } + + return goType, nil + default: + return nil, errorf(fragment.Position, "invalid type for fragment: %v is a %v", + fragment.TypeCondition, typ.Kind) + } +} + +// convertField converts a single GraphQL operation-field into a Go +// struct-field (and its type). +// +// Note that input-type fields are handled separately (inline in +// convertDefinition), because they come from the type-definition, not the +// operation. +func (g *generator) convertField( + namePrefix *prefixList, + field *ast.Field, + fieldOptions, queryOptions *genqlientDirective, +) (*goStructField, error) { + if field.Definition == nil { + // Unclear why gqlparser hasn't already rejected this, + // but empirically it might not. + return nil, errorf( + field.Position, "undefined field %v", field.Alias) + } + + goName := upperFirst(field.Alias) + namePrefix = nextPrefix(namePrefix, field) + + fieldGoType, err := g.convertType( + namePrefix, field.Definition.Type, field.SelectionSet, + fieldOptions, queryOptions) + if err != nil { + return nil, err + } + + return &goStructField{ + GoName: goName, + GoType: fieldGoType, + JSONName: field.Alias, + GraphQLName: field.Name, + Description: field.Definition.Description, + }, nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/default_genqlient.yaml b/vendor/github.com/Khan/genqlient/generate/default_genqlient.yaml new file mode 100644 index 00000000..bfc5f265 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/default_genqlient.yaml @@ -0,0 +1,6 @@ +# Default genqlient config; for full documentation see: +# https://github.com/Khan/genqlient/blob/main/docs/genqlient.yaml +schema: schema.graphql +operations: +- genqlient.graphql +generated: generated.go diff --git a/vendor/github.com/Khan/genqlient/generate/description.go b/vendor/github.com/Khan/genqlient/generate/description.go new file mode 100644 index 00000000..13581771 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/description.go @@ -0,0 +1,81 @@ +package generate + +// Code relating to generating GoDoc from GraphQL descriptions. +// +// For fields, and types where we just copy the "whole" type (enum and +// input-object), this is easy: we just use the GraphQL description. But for +// struct types, there are often more useful things we can say. + +import ( + "fmt" + "strings" +) + +// descriptionInfo is embedded in types whose descriptions may be more complex +// than just a copy of the GraphQL doc. +type descriptionInfo struct { + // user-specified comment for this type + CommentOverride string + // name of the corresponding GraphQL type + GraphQLName string + // GraphQL schema's description of the type .GraphQLName, if any + GraphQLDescription string + // name of the corresponding GraphQL fragment (on .GraphQLName), if any + FragmentName string +} + +func maybeAddTypeDescription(info descriptionInfo, description string) string { + if info.GraphQLDescription == "" { + return description + } + return fmt.Sprintf( + "%v\nThe GraphQL type's documentation follows.\n\n%v", + description, info.GraphQLDescription) +} + +func fragmentDescription(info descriptionInfo) string { + return maybeAddTypeDescription(info, fmt.Sprintf( + "%v includes the GraphQL fields of %v requested by the fragment %v.", + info.FragmentName, info.GraphQLName, info.FragmentName)) +} + +func structDescription(typ *goStructType) string { + switch { + case typ.CommentOverride != "": + return typ.CommentOverride + case typ.IsInput: + // Input types have all their fields, just use the GraphQL description. + return typ.GraphQLDescription + case typ.FragmentName != "": + return fragmentDescription(typ.descriptionInfo) + default: + // For types where we only have some fields, note that, along with + // the GraphQL documentation (if any). We don't want to just use + // the GraphQL documentation, since it may refer to fields we + // haven't selected, say. + return maybeAddTypeDescription(typ.descriptionInfo, fmt.Sprintf( + "%v includes the requested fields of the GraphQL type %v.", + typ.GoName, typ.GraphQLName)) + } +} + +func interfaceDescription(typ *goInterfaceType) string { + goImplNames := make([]string, len(typ.Implementations)) + for i, impl := range typ.Implementations { + goImplNames[i] = impl.Reference() + } + implementationList := fmt.Sprintf( + "\n\n%v is implemented by the following types:\n\t%v", + typ.GoName, strings.Join(goImplNames, "\n\t")) + + switch { + case typ.CommentOverride != "": + return typ.CommentOverride + implementationList + case typ.FragmentName != "": + return fragmentDescription(typ.descriptionInfo) + implementationList + default: + return maybeAddTypeDescription(typ.descriptionInfo, fmt.Sprintf( + "%v includes the requested fields of the GraphQL interface %v.%v", + typ.GoName, typ.GraphQLName, implementationList)) + } +} diff --git a/vendor/github.com/Khan/genqlient/generate/errors.go b/vendor/github.com/Khan/genqlient/generate/errors.go new file mode 100644 index 00000000..d732fe44 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/errors.go @@ -0,0 +1,181 @@ +package generate + +import ( + "bytes" + "errors" + "fmt" + "go/scanner" + "math" + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type errorPos struct { + filename string + line, col int +} + +func (pos *errorPos) String() string { + filename, lineOffset := splitFilename(pos.filename) + line := lineOffset + pos.line + if line != 0 { + return fmt.Sprintf("%v:%v", filename, line) + } else { + return filename + } +} + +type genqlientError struct { + pos *errorPos + msg string + wrapped error +} + +func splitFilename(filename string) (name string, lineOffset int) { + split := strings.Split(filename, ":") + if len(split) != 2 { + return filename, 0 + } + + offset, err := strconv.Atoi(split[1]) + if err != nil { + return split[0], 0 + } + return split[0], offset - 1 +} + +func (err *genqlientError) Error() string { + if err.pos != nil { + return err.pos.String() + ": " + err.msg + } else { + return err.msg + } +} + +func (err *genqlientError) Unwrap() error { + return err.wrapped +} + +func errorf(pos *ast.Position, msg string, args ...interface{}) error { + // TODO: alternately accept a filename only, or maybe even a go-parser pos + + // We do all our own wrapping, because if the wrapped error already has a + // pos, we want to extract it out and put it at the front, not in the + // middle. + + var wrapped error + var wrapIndex int + for i, arg := range args { + if wrapped == nil { + var ok bool + wrapped, ok = arg.(error) + if ok { + wrapIndex = i + } + } + } + + var wrappedGenqlient *genqlientError + isGenqlient := errors.As(wrapped, &wrappedGenqlient) + var wrappedGraphQL *gqlerror.Error + isGraphQL := errors.As(wrapped, &wrappedGraphQL) + if !isGraphQL { + var wrappedGraphQLList gqlerror.List + isGraphQLList := errors.As(wrapped, &wrappedGraphQLList) + if isGraphQLList && len(wrappedGraphQLList) > 0 { + isGraphQL = true + wrappedGraphQL = wrappedGraphQLList[0] + } + } + + var errPos *errorPos + if pos != nil { + errPos = &errorPos{ + filename: pos.Src.Name, + line: pos.Line, + col: pos.Column, + } + } else if isGenqlient { + errPos = wrappedGenqlient.pos + } else if isGraphQL { + filename, _ := wrappedGraphQL.Extensions["file"].(string) + if filename != "" { + var loc gqlerror.Location + if len(wrappedGraphQL.Locations) > 0 { + loc = wrappedGraphQL.Locations[0] + } + errPos = &errorPos{ + filename: filename, + line: loc.Line, + col: loc.Column, + } + } + } + + if wrapped != nil { + errText := wrapped.Error() + if isGenqlient { + errText = wrappedGenqlient.msg + } else if isGraphQL { + errText = wrappedGraphQL.Message + } + args[wrapIndex] = errText + } + + msg = fmt.Sprintf(msg, args...) + + return &genqlientError{ + msg: msg, + pos: errPos, + wrapped: wrapped, + } +} + +// goSourceError processes the error(s) returned by go tooling (gofmt, etc.) +// into a nice error message. +// +// In practice, such errors are genqlient internal errors, but it's still +// useful to format them nicely for debugging. +func goSourceError( + failedOperation string, // e.g. "gofmt", for the error message + source []byte, + err error, +) error { + var errTexts []string + var scanErrs scanner.ErrorList + var scanErr *scanner.Error + var badLines map[int]bool + + if errors.As(err, &scanErrs) { + errTexts = make([]string, len(scanErrs)) + badLines = make(map[int]bool, len(scanErrs)) + for i, scanErr := range scanErrs { + errTexts[i] = err.Error() + badLines[scanErr.Pos.Line] = true + } + } else if errors.As(err, &scanErr) { + errTexts = []string{scanErr.Error()} + badLines = map[int]bool{scanErr.Pos.Line: true} + } else { + errTexts = []string{err.Error()} + } + + lines := bytes.SplitAfter(source, []byte("\n")) + lineNoWidth := int(math.Ceil(math.Log10(float64(len(lines) + 1)))) + for i, line := range lines { + prefix := " " + if badLines[i] { + prefix = "> " + } + lineNo := strconv.Itoa(i + 1) + padding := strings.Repeat(" ", lineNoWidth-len(lineNo)) + lines[i] = []byte(fmt.Sprintf("%s%s%s | %s", prefix, padding, lineNo, line)) + } + + return errorf(nil, + "genqlient internal error: failed to %s code:\n\t%s---source code---\n%s", + failedOperation, strings.Join(errTexts, "\n\t"), bytes.Join(lines, nil)) +} diff --git a/vendor/github.com/Khan/genqlient/generate/generate.go b/vendor/github.com/Khan/genqlient/generate/generate.go new file mode 100644 index 00000000..22ee618d --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/generate.go @@ -0,0 +1,429 @@ +package generate + +// This file implements the main entrypoint and framework for the genqlient +// code-generation process. See comments in Generate for the high-level +// overview. + +import ( + "bytes" + "encoding/json" + "go/format" + "io" + "sort" + "strings" + "text/template" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/formatter" + "github.com/vektah/gqlparser/v2/validator" + "golang.org/x/tools/imports" +) + +// generator is the context for the codegen process (and ends up getting passed +// to the template). +type generator struct { + // The config for which we are generating code. + Config *Config + // The list of operations for which to generate code. + Operations []*operation + // The types needed for these operations. + typeMap map[string]goType + // Imports needed for these operations, path -> alias and alias -> true + imports map[string]string + usedAliases map[string]bool + // True if we've already written out the imports (in which case they can't + // be modified). + importsLocked bool + // Cache of loaded templates. + templateCache map[string]*template.Template + // Schema we are generating code against + schema *ast.Schema + // Named fragments (map by name), so we can look them up from spreads. + // TODO(benkraft): In theory we shouldn't need this, we can just use + // ast.FragmentSpread.Definition, but for some reason it doesn't seem to be + // set consistently, even post-validation. + fragments map[string]*ast.FragmentDefinition +} + +// JSON tags in operation are for ExportOperations (see Config for details). +type operation struct { + // The type of the operation (query, mutation, or subscription). + Type ast.Operation `json:"-"` + // The name of the operation, from GraphQL. + Name string `json:"operationName"` + // The documentation for the operation, from GraphQL. + Doc string `json:"-"` + // The body of the operation to send. + Body string `json:"query"` + // The type of the argument to the operation, which we use both internally + // and to construct the arguments. We do it this way so we can use the + // machinery we have for handling (and, specifically, json-marshaling) + // types. + Input *goStructType `json:"-"` + // The type-name for the operation's response type. + ResponseName string `json:"-"` + // The original filename from which we got this query. + SourceFilename string `json:"sourceLocation"` + // The config within which we are generating code. + Config *Config `json:"-"` +} + +type exportedOperations struct { + Operations []*operation `json:"operations"` +} + +func newGenerator( + config *Config, + schema *ast.Schema, + fragments ast.FragmentDefinitionList, +) *generator { + g := generator{ + Config: config, + typeMap: map[string]goType{}, + imports: map[string]string{}, + usedAliases: map[string]bool{}, + templateCache: map[string]*template.Template{}, + schema: schema, + fragments: make(map[string]*ast.FragmentDefinition, len(fragments)), + } + + for _, fragment := range fragments { + g.fragments[fragment.Name] = fragment + } + + return &g +} + +func (g *generator) WriteTypes(w io.Writer) error { + names := make([]string, 0, len(g.typeMap)) + for name := range g.typeMap { + names = append(names, name) + } + // Sort alphabetically by type-name. Sorting somehow deterministically is + // important to ensure generated code is deterministic. Alphabetical is + // nice because it's easy, and in the current naming scheme, it's even + // vaguely aligned to the structure of the queries. + sort.Strings(names) + + for _, name := range names { + err := g.typeMap[name].WriteDefinition(w, g) + if err != nil { + return err + } + // Make sure we have blank lines between types (and between the last + // type and the first operation) + _, err = io.WriteString(w, "\n\n") + if err != nil { + return err + } + } + return nil +} + +// usedFragmentNames returns the named-fragments used by (i.e. spread into) +// this operation. +func (g *generator) usedFragments(op *ast.OperationDefinition) ast.FragmentDefinitionList { + var retval, queue ast.FragmentDefinitionList + seen := map[string]bool{} + + var observers validator.Events + // Fragment-spreads are easy to find; just ask for them! + observers.OnFragmentSpread(func(_ *validator.Walker, fragmentSpread *ast.FragmentSpread) { + if seen[fragmentSpread.Name] { + return + } + def := g.fragments[fragmentSpread.Name] + seen[fragmentSpread.Name] = true + retval = append(retval, def) + queue = append(queue, def) + }) + + doc := ast.QueryDocument{Operations: ast.OperationList{op}} + validator.Walk(g.schema, &doc, &observers) + // Well, easy-ish: we also have to look recursively. + // Note GraphQL guarantees there are no cycles among fragments: + // https://spec.graphql.org/draft/#sec-Fragment-spreads-must-not-form-cycles + for len(queue) > 0 { + doc = ast.QueryDocument{Fragments: ast.FragmentDefinitionList{queue[0]}} + validator.Walk(g.schema, &doc, &observers) // traversal is the same + queue = queue[1:] + } + + return retval +} + +// Preprocess each query to make any changes that genqlient needs. +// +// At present, the only change is that we add __typename, if not already +// requested, to each field of interface type, so we can use the right types +// when unmarshaling. +func (g *generator) preprocessQueryDocument(doc *ast.QueryDocument) { + var observers validator.Events + // We want to ensure that everywhere you ask for some list of fields (a + // selection-set) from an interface (or union) type, you ask for its + // __typename field. There are four places we might find a selection-set: + // at the toplevel of a query, on a field, or in an inline or named + // fragment. The toplevel of a query must be an object type, so we don't + // need to consider that. And fragments must (if used at all) be spread + // into some parent selection-set, so we'll add __typename there (if + // needed). Note this does mean abstract-typed fragments spread into + // object-typed scope will *not* have access to `__typename`, but they + // indeed don't need it, since we do know the type in that context. + // TODO(benkraft): We should omit __typename if you asked for + // `# @genqlient(struct: true)`. + observers.OnField(func(_ *validator.Walker, field *ast.Field) { + // We are interested in a field from the query like + // field { subField ... } + // where the schema looks like + // type ... { # or interface/union + // field: FieldType # or [FieldType!]! etc. + // } + // interface FieldType { # or union + // subField: ... + // } + // If FieldType is an interface/union, and none of the subFields is + // __typename, we want to change the query to + // field { __typename subField ... } + + fieldType := g.schema.Types[field.Definition.Type.Name()] + if fieldType.Kind != ast.Interface && fieldType.Kind != ast.Union { + return // a concrete type + } + + hasTypename := false + for _, selection := range field.SelectionSet { + // Check if we already selected __typename. We ignore fragments, + // because we want __typename as a toplevel field. + subField, ok := selection.(*ast.Field) + if ok && subField.Name == "__typename" { + hasTypename = true + } + } + if !hasTypename { + // Ok, we need to add the field! + field.SelectionSet = append(ast.SelectionSet{ + &ast.Field{ + Alias: "__typename", Name: "__typename", + // Fake definition for the magic field __typename cribbed + // from gqlparser's validator/walk.go, equivalent to + // __typename: String + // TODO(benkraft): This should in principle be + // __typename: String! + // But genqlient doesn't care, so we just match gqlparser. + Definition: &ast.FieldDefinition{ + Name: "__typename", + Type: ast.NamedType("String", nil /* pos */), + }, + // Definition of the object that contains this field, i.e. + // FieldType. + ObjectDefinition: fieldType, + }, + }, field.SelectionSet...) + } + }) + validator.Walk(g.schema, doc, &observers) +} + +// validateOperation checks for a few classes of operations that gqlparser +// considers valid but we don't allow, and returns an error if this operation +// is invalid for genqlient's purposes. +func (g *generator) validateOperation(op *ast.OperationDefinition) error { + _, err := g.baseTypeForOperation(op.Operation) + if err != nil { + // (e.g. operation has subscriptions, which we don't support) + return err + } + + if op.Name == "" { + return errorf(op.Position, "operations must have operation-names") + } else if goKeywords[op.Name] { + return errorf(op.Position, "operation name must not be a go keyword") + } + + return nil +} + +// addOperation adds to g.Operations the information needed to generate a +// genqlient entrypoint function for the given operation. It also adds to +// g.typeMap any types referenced by the operation, except for types belonging +// to named fragments, which are added separately by Generate via +// convertFragment. +func (g *generator) addOperation(op *ast.OperationDefinition) error { + if err := g.validateOperation(op); err != nil { + return err + } + + queryDoc := &ast.QueryDocument{ + Operations: ast.OperationList{op}, + Fragments: g.usedFragments(op), + } + g.preprocessQueryDocument(queryDoc) + + var builder strings.Builder + f := formatter.NewFormatter(&builder) + f.FormatQueryDocument(queryDoc) + + commentLines, directive, err := g.parsePrecedingComment(op, nil, op.Position, nil) + if err != nil { + return err + } + + inputType, err := g.convertArguments(op, directive) + if err != nil { + return err + } + + responseType, err := g.convertOperation(op, directive) + if err != nil { + return err + } + + var docComment string + if len(commentLines) > 0 { + docComment = "// " + strings.ReplaceAll(commentLines, "\n", "\n// ") + } + + // If the filename is a pseudo-filename filename.go:startline, just + // put the filename in the export; we don't figure out the line offset + // anyway, and if you want to check those exports in they will change a + // lot if they have line numbers. + // TODO: refactor to use the errorPos machinery for this + sourceFilename := op.Position.Src.Name + if i := strings.LastIndex(sourceFilename, ":"); i != -1 { + sourceFilename = sourceFilename[:i] + } + + g.Operations = append(g.Operations, &operation{ + Type: op.Operation, + Name: op.Name, + Doc: docComment, + // The newline just makes it format a little nicer. We add it here + // rather than in the template so exported operations will match + // *exactly* what we send to the server. + Body: "\n" + builder.String(), + Input: inputType, + ResponseName: responseType.Reference(), + SourceFilename: sourceFilename, + Config: g.Config, // for the convenience of the template + }) + + return nil +} + +// Generate is the main programmatic entrypoint to genqlient, and generates and +// returns Go source code based on the given configuration. +// +// See [Config] for more on creating a configuration. The return value is a +// map from filename to the generated file-content (e.g. Go source). Callers +// who don't want to manage reading and writing the files should call [Main]. +func Generate(config *Config) (map[string][]byte, error) { + // Step 1: Read in the schema and operations from the files defined by the + // config (and validate the operations against the schema). This is all + // defined in parse.go. + schema, err := getSchema(config.Schema) + if err != nil { + return nil, err + } + + document, err := getAndValidateQueries(config.baseDir, config.Operations, schema) + if err != nil { + return nil, err + } + + // TODO(benkraft): we could also allow this, and generate an empty file + // with just the package-name, if it turns out to be more convenient that + // way. (As-is, we generate a broken file, with just (unused) imports.) + if len(document.Operations) == 0 { + // Hard to have a position when there are no operations :( + return nil, errorf(nil, + "no queries found, looked in: %v (configure this in genqlient.yaml)", + strings.Join(config.Operations, ", ")) + } + + // Step 2: For each operation and fragment, convert it into data structures + // representing Go types (defined in types.go). The bulk of this logic is + // in convert.go, and it additionally updates g.typeMap to include all the + // types it needs. + g := newGenerator(config, schema, document.Fragments) + for _, op := range document.Operations { + if err = g.addOperation(op); err != nil { + return nil, err + } + } + + // Step 3: Glue it all together! + // + // First, write the types (from g.typeMap) and operations to a temporary + // buffer, since they affect what imports we'll put in the header. + var bodyBuf bytes.Buffer + err = g.WriteTypes(&bodyBuf) + if err != nil { + return nil, err + } + + // Sort operations to guarantee a stable order + sort.Slice(g.Operations, func(i, j int) bool { + return g.Operations[i].Name < g.Operations[j].Name + }) + + for _, operation := range g.Operations { + err = g.render("operation.go.tmpl", &bodyBuf, operation) + if err != nil { + return nil, err + } + } + + // The header also needs to reference some context types, which it does + // after it writes the imports, so we need to preregister those imports. + if g.Config.ContextType != "-" { + _, err = g.ref("context.Context") + if err != nil { + return nil, err + } + if g.Config.ContextType != "context.Context" { + _, err = g.ref(g.Config.ContextType) + if err != nil { + return nil, err + } + } + } + + // Now really glue it all together, and format. + var buf bytes.Buffer + err = g.render("header.go.tmpl", &buf, g) + if err != nil { + return nil, err + } + _, err = io.Copy(&buf, &bodyBuf) + if err != nil { + return nil, err + } + + unformatted := buf.Bytes() + formatted, err := format.Source(unformatted) + if err != nil { + return nil, goSourceError("gofmt", unformatted, err) + } + importsed, err := imports.Process(config.Generated, formatted, nil) + if err != nil { + return nil, goSourceError("goimports", formatted, err) + } + + retval := map[string][]byte{ + config.Generated: importsed, + } + + if config.ExportOperations != "" { + // We use MarshalIndent so that the file is human-readable and + // slightly more likely to be git-mergeable (if you check it in). In + // general it's never going to be used anywhere where space is an + // issue -- it doesn't go in your binary or anything. + retval[config.ExportOperations], err = json.MarshalIndent( + exportedOperations{Operations: g.Operations}, "", " ") + if err != nil { + return nil, errorf(nil, "unable to export queries: %v", err) + } + } + + return retval, nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/genqlient_directive.go b/vendor/github.com/Khan/genqlient/generate/genqlient_directive.go new file mode 100644 index 00000000..af0110b8 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/genqlient_directive.go @@ -0,0 +1,531 @@ +package generate + +import ( + "fmt" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/parser" +) + +// Represents the genqlient directive, described in detail in +// docs/genqlient_directive.graphql. +type genqlientDirective struct { + pos *ast.Position + Omitempty *bool + Pointer *bool + Struct *bool + Flatten *bool + Bind string + TypeName string + // FieldDirectives contains the directives to be + // applied to specific fields via the "for" option. + // Map from type-name -> field-name -> directive. + FieldDirectives map[string]map[string]*genqlientDirective +} + +func newGenqlientDirective(pos *ast.Position) *genqlientDirective { + return &genqlientDirective{ + pos: pos, + FieldDirectives: make(map[string]map[string]*genqlientDirective), + } +} + +// Helper for String, returns the directive but without the @genqlient(). +func (dir *genqlientDirective) argsString() string { + var parts []string + if dir.Omitempty != nil { + parts = append(parts, fmt.Sprintf("omitempty: %v", *dir.Omitempty)) + } + if dir.Pointer != nil { + parts = append(parts, fmt.Sprintf("pointer: %v", *dir.Pointer)) + } + if dir.Struct != nil { + parts = append(parts, fmt.Sprintf("struct: %v", *dir.Struct)) + } + if dir.Flatten != nil { + parts = append(parts, fmt.Sprintf("flatten: %v", *dir.Flatten)) + } + if dir.Bind != "" { + parts = append(parts, fmt.Sprintf("bind: %v", dir.Bind)) + } + if dir.TypeName != "" { + parts = append(parts, fmt.Sprintf("typename: %v", dir.TypeName)) + } + return strings.Join(parts, ", ") +} + +// String is useful for debugging. +func (dir *genqlientDirective) String() string { + lines := []string{fmt.Sprintf("@genqlient(%s)", dir.argsString())} + for typeName, dirs := range dir.FieldDirectives { + for fieldName, fieldDir := range dirs { + lines = append(lines, fmt.Sprintf("@genqlient(for: %s.%s, %s)", + typeName, fieldName, fieldDir.argsString())) + } + } + return strings.Join(lines, "\n") +} + +func (dir *genqlientDirective) GetOmitempty() bool { return dir.Omitempty != nil && *dir.Omitempty } +func (dir *genqlientDirective) GetPointer() bool { return dir.Pointer != nil && *dir.Pointer } +func (dir *genqlientDirective) GetStruct() bool { return dir.Struct != nil && *dir.Struct } +func (dir *genqlientDirective) GetFlatten() bool { return dir.Flatten != nil && *dir.Flatten } + +func setBool(optionName string, dst **bool, v *ast.Value, pos *ast.Position) error { + if *dst != nil { + return errorf(pos, "conflicting values for %v", optionName) + } + ei, err := v.Value(nil) // no vars allowed + if err != nil { + return errorf(pos, "invalid boolean value %v: %v", v, err) + } + if b, ok := ei.(bool); ok { + *dst = &b + return nil + } + return errorf(pos, "expected boolean, got non-boolean value %T(%v)", ei, ei) +} + +func setString(optionName string, dst *string, v *ast.Value, pos *ast.Position) error { + if *dst != "" { + return errorf(pos, "conflicting values for %v", optionName) + } + ei, err := v.Value(nil) // no vars allowed + if err != nil { + return errorf(pos, "invalid string value %v: %v", v, err) + } + if b, ok := ei.(string); ok { + *dst = b + return nil + } + return errorf(pos, "expected string, got non-string value %T(%v)", ei, ei) +} + +// add adds to this genqlientDirective struct the settings from then given +// GraphQL directive. +// +// If there are multiple genqlient directives are applied to the same node, +// e.g. +// +// # @genqlient(...) +// # @genqlient(...) +// +// add will be called several times. In this case, conflicts between the +// options are an error. +func (dir *genqlientDirective) add(graphQLDirective *ast.Directive, pos *ast.Position) error { + if graphQLDirective.Name != "genqlient" { + // Actually we just won't get here; we only get here if the line starts + // with "# @genqlient", unless there's some sort of bug. + return errorf(pos, "the only valid comment-directive is @genqlient, got %v", graphQLDirective.Name) + } + + // First, see if this directive has a "for" option; + // if it does, the rest of our work will operate on the + // appropriate place in FieldDirectives. + var err error + forField := "" + for _, arg := range graphQLDirective.Arguments { + if arg.Name == "for" { + if forField != "" { + return errorf(pos, `@genqlient directive had "for:" twice`) + } + err = setString("for", &forField, arg.Value, pos) + if err != nil { + return err + } + } + } + if forField != "" { + forParts := strings.Split(forField, ".") + if len(forParts) != 2 { + return errorf(pos, `for must be of the form "MyType.myField"`) + } + typeName, fieldName := forParts[0], forParts[1] + + fieldDir := newGenqlientDirective(pos) + if dir.FieldDirectives[typeName] == nil { + dir.FieldDirectives[typeName] = make(map[string]*genqlientDirective) + } + dir.FieldDirectives[typeName][fieldName] = fieldDir + + // Now, the rest of the function will operate on fieldDir. + dir = fieldDir + } + + // Now parse the rest of the arguments. + for _, arg := range graphQLDirective.Arguments { + switch arg.Name { + // TODO(benkraft): Use reflect and struct tags? + case "omitempty": + err = setBool("omitempty", &dir.Omitempty, arg.Value, pos) + case "pointer": + err = setBool("pointer", &dir.Pointer, arg.Value, pos) + case "struct": + err = setBool("struct", &dir.Struct, arg.Value, pos) + case "flatten": + err = setBool("flatten", &dir.Flatten, arg.Value, pos) + case "bind": + err = setString("bind", &dir.Bind, arg.Value, pos) + case "typename": + err = setString("typename", &dir.TypeName, arg.Value, pos) + case "for": + // handled above + default: + return errorf(pos, "unknown argument %v for @genqlient", arg.Name) + } + if err != nil { + return err + } + } + + return nil +} + +func (dir *genqlientDirective) validate(node interface{}, schema *ast.Schema) error { + // TODO(benkraft): This function has a lot of duplicated checks, figure out + // how to organize them better to avoid the duplication. + for typeName, byField := range dir.FieldDirectives { + typ, ok := schema.Types[typeName] + if !ok { + return errorf(dir.pos, `for got invalid type-name "%s"`, typeName) + } + for fieldName, fieldDir := range byField { + var field *ast.FieldDefinition + for _, typeField := range typ.Fields { + if typeField.Name == fieldName { + field = typeField + break + } + } + if field == nil { + return errorf(fieldDir.pos, + `for got invalid field-name "%s" for type "%s"`, + fieldName, typeName) + } + + // All options except struct and flatten potentially apply. (I + // mean in theory you could apply them here, but since they require + // per-use validation, it would be a bit tricky, and the use case + // is not clear.) + if fieldDir.Struct != nil || fieldDir.Flatten != nil { + return errorf(fieldDir.pos, "struct and flatten can't be used via for") + } + + if fieldDir.Omitempty != nil && field.Type.NonNull { + return errorf(fieldDir.pos, "omitempty may only be used on optional arguments") + } + + if fieldDir.TypeName != "" && fieldDir.Bind != "" && fieldDir.Bind != "-" { + return errorf(fieldDir.pos, "typename and bind may not be used together") + } + } + } + + switch node := node.(type) { + case *ast.OperationDefinition: + if dir.Bind != "" { + return errorf(dir.pos, "bind may not be applied to the entire operation") + } + + // Anything else is valid on the entire operation; it will just apply + // to whatever it is relevant to. + return nil + case *ast.FragmentDefinition: + if dir.Bind != "" { + // TODO(benkraft): Implement this if people find it useful. + return errorf(dir.pos, "bind is not implemented for named fragments") + } + + if dir.Struct != nil { + return errorf(dir.pos, "struct is only applicable to fields, not frragment-definitions") + } + + // Like operations, anything else will just apply to the entire + // fragment. + return nil + case *ast.VariableDefinition: + if dir.Omitempty != nil && node.Type.NonNull { + return errorf(dir.pos, "omitempty may only be used on optional arguments") + } + + if dir.Struct != nil { + return errorf(dir.pos, "struct is only applicable to fields, not variable-definitions") + } + + if dir.Flatten != nil { + return errorf(dir.pos, "flatten is only applicable to fields, not variable-definitions") + } + + if len(dir.FieldDirectives) > 0 { + return errorf(dir.pos, "for is only applicable to operations and arguments") + } + + if dir.TypeName != "" && dir.Bind != "" && dir.Bind != "-" { + return errorf(dir.pos, "typename and bind may not be used together") + } + + return nil + case *ast.Field: + if dir.Omitempty != nil { + return errorf(dir.pos, "omitempty is not applicable to variables, not fields") + } + + typ := schema.Types[node.Definition.Type.Name()] + if dir.Struct != nil { + if err := validateStructOption(typ, node.SelectionSet, dir.pos); err != nil { + return err + } + } + + if dir.Flatten != nil { + if _, err := validateFlattenOption(typ, node.SelectionSet, dir.pos); err != nil { + return err + } + } + + if len(dir.FieldDirectives) > 0 { + return errorf(dir.pos, "for is only applicable to operations and arguments") + } + + if dir.TypeName != "" && dir.Bind != "" && dir.Bind != "-" { + return errorf(dir.pos, "typename and bind may not be used together") + } + + return nil + default: + return errorf(dir.pos, "invalid @genqlient directive location: %T", node) + } +} + +func validateStructOption( + typ *ast.Definition, + selectionSet ast.SelectionSet, + pos *ast.Position, +) error { + if typ.Kind != ast.Interface && typ.Kind != ast.Union { + return errorf(pos, "struct is only applicable to interface-typed fields") + } + + // Make sure that all the requested fields apply to the interface itself + // (not just certain implementations). + for _, selection := range selectionSet { + switch selection.(type) { + case *ast.Field: + // fields are fine. + case *ast.InlineFragment, *ast.FragmentSpread: + // Fragments aren't allowed. In principle we could allow them under + // the condition that the fragment applies to the whole interface + // (not just one implementation; and so on recursively), and for + // fragment spreads additionally that the fragment has the same + // option applied to it, but it seems more trouble than it's worth + // right now. + return errorf(pos, "struct is not allowed for types with fragments") + } + } + return nil +} + +func validateFlattenOption( + typ *ast.Definition, + selectionSet ast.SelectionSet, + pos *ast.Position, +) (index int, err error) { + index = -1 + if len(selectionSet) == 0 { + return -1, errorf(pos, "flatten is not allowed for leaf fields") + } + + for i, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + // If the field is auto-added __typename, ignore it for flattening + // purposes. + if selection.Name == "__typename" && selection.Position == nil { + continue + } + // Type-wise, it's no harder to implement flatten for fields, but + // it requires new logic in UnmarshalJSON. We can add that if it + // proves useful relative to its complexity. + return -1, errorf(pos, "flatten is not yet supported for fields (only fragment spreads)") + + case *ast.InlineFragment: + // Inline fragments aren't allowed. In principle there's nothing + // stopping us from allowing them (under the same type-match + // conditions as fragment spreads), but there's little value to it. + return -1, errorf(pos, "flatten is not allowed for selections with inline fragments") + + case *ast.FragmentSpread: + if index != -1 { + return -1, errorf(pos, "flatten is not allowed for fields with multiple selections") + } else if !fragmentMatches(typ, selection.Definition.Definition) { + // We don't let you flatten + // field { # type: FieldType + // ...Fragment # type: FragmentType + // } + // unless FragmentType implements FieldType, because otherwise + // what do we do if we get back a type that doesn't implement + // FragmentType? + return -1, errorf(pos, + "flatten is not allowed for fields with fragment-spreads "+ + "unless the field-type implements the fragment-type; "+ + "field-type %s does not implement fragment-type %s", + typ.Name, selection.Definition.Definition.Name) + } + index = i + } + } + return index, nil +} + +func fillDefaultBool(target **bool, defaults ...*bool) { + if *target != nil { + return + } + + for _, val := range defaults { + if val != nil { + *target = val + return + } + } +} + +func fillDefaultString(target *string, defaults ...string) { + if *target != "" { + return + } + + for _, val := range defaults { + if val != "" { + *target = val + return + } + } +} + +// merge updates the receiver, which is a directive applied to some node, with +// the information from the directive applied to the fragment or operation +// containing that node. (The update is in-place.) +// +// Note this has slightly different semantics than .add(), see inline for +// details. +// +// parent is as described in parsePrecedingComment. operationDirective is the +// directive applied to this operation or fragment. +func (dir *genqlientDirective) mergeOperationDirective( + node interface{}, + parentIfInputField *ast.Definition, + operationDirective *genqlientDirective, +) { + // We'll set forField to the `@genqlient(for: "", ...)` + // directive from our operation/fragment, if any. + var forField *genqlientDirective + switch field := node.(type) { + case *ast.Field: // query field + typeName := field.ObjectDefinition.Name + forField = operationDirective.FieldDirectives[typeName][field.Name] + case *ast.FieldDefinition: // input-type field + forField = operationDirective.FieldDirectives[parentIfInputField.Name][field.Name] + } + // Just to simplify nil-checking in the code below: + if forField == nil { + forField = newGenqlientDirective(nil) + } + + // Now fill defaults; in general local directive wins over the "for" field + // directive wins over the operation directive. + fillDefaultBool(&dir.Omitempty, forField.Omitempty, operationDirective.Omitempty) + fillDefaultBool(&dir.Pointer, forField.Pointer, operationDirective.Pointer) + // struct and flatten aren't settable via "for". + fillDefaultBool(&dir.Struct, operationDirective.Struct) + fillDefaultBool(&dir.Flatten, operationDirective.Flatten) + fillDefaultString(&dir.Bind, forField.Bind, operationDirective.Bind) + // typename isn't settable on the operation (when set there it replies to + // the response-type). + fillDefaultString(&dir.TypeName, forField.TypeName) +} + +// parsePrecedingComment looks at the comment right before this node, and +// returns the genqlient directive applied to it (or an empty one if there is +// none), the remaining human-readable comment (or "" if there is none), and an +// error if the directive is invalid. +// +// queryOptions are the options to be applied to this entire query (or +// fragment); the local options will be merged into those. It should be nil if +// we are parsing the directive on the entire query. +// +// parentIfInputField need only be set if node is an input-type field; it +// should be the type containing this field. (We can get this from gqlparser +// in other cases, but not input-type fields.) +func (g *generator) parsePrecedingComment( + node interface{}, + parentIfInputField *ast.Definition, + pos *ast.Position, + queryOptions *genqlientDirective, +) (comment string, directive *genqlientDirective, err error) { + directive = newGenqlientDirective(pos) + hasDirective := false + + // For directives on genqlient-generated nodes, we don't actually need to + // parse anything. (But we do need to merge below.) + var commentLines []string + if pos != nil && pos.Src != nil { + sourceLines := strings.Split(pos.Src.Input, "\n") + for i := pos.Line - 1; i > 0; i-- { + line := strings.TrimSpace(sourceLines[i-1]) + trimmed := strings.TrimSpace(strings.TrimPrefix(line, "#")) + if strings.HasPrefix(line, "# @genqlient") { + hasDirective = true + var graphQLDirective *ast.Directive + graphQLDirective, err = parseDirective(trimmed, pos) + if err != nil { + return "", nil, err + } + err = directive.add(graphQLDirective, pos) + if err != nil { + return "", nil, err + } + } else if strings.HasPrefix(line, "#") { + commentLines = append(commentLines, trimmed) + } else { + break + } + } + } + + if hasDirective { // (else directive is empty) + err = directive.validate(node, g.schema) + if err != nil { + return "", nil, err + } + } + + if queryOptions != nil { + // If we are part of an operation/fragment, merge its options in. + directive.mergeOperationDirective(node, parentIfInputField, queryOptions) + + // TODO(benkraft): Really we should do all the validation after + // merging, probably? But this is the only check that can fail only + // after merging, and it's a bit tricky because the "does not apply" + // checks may need to happen before merging so we know where the + // directive "is". + if directive.TypeName != "" && directive.Bind != "" && directive.Bind != "-" { + return "", nil, errorf(directive.pos, "typename and bind may not be used together") + } + } + + reverse(commentLines) + + return strings.TrimSpace(strings.Join(commentLines, "\n")), directive, nil +} + +func parseDirective(line string, pos *ast.Position) (*ast.Directive, error) { + // HACK: parse the "directive" by making a fake query containing it. + fakeQuery := fmt.Sprintf("query %v { field }", line) + doc, err := parser.ParseQuery(&ast.Source{Input: fakeQuery}) + if err != nil { + return nil, errorf(pos, "invalid genqlient directive: %v", err) + } + return doc.Operations[0].Directives[0], nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/header.go.tmpl b/vendor/github.com/Khan/genqlient/generate/header.go.tmpl new file mode 100644 index 00000000..aa453635 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/header.go.tmpl @@ -0,0 +1,10 @@ +// Code generated by github.com/Khan/genqlient, DO NOT EDIT. + +package {{.Config.Package}} + +{{.Imports}} + +{{if and (ne .Config.ContextType "-") (ne .Config.ContextType "context.Context") }} +// Check that context_type from genqlient.yaml implements context.Context. +var _ {{ref "context.Context"}} = ({{ref .Config.ContextType}})(nil) +{{end}} diff --git a/vendor/github.com/Khan/genqlient/generate/imports.go b/vendor/github.com/Khan/genqlient/generate/imports.go new file mode 100644 index 00000000..5d6f0a29 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/imports.go @@ -0,0 +1,131 @@ +package generate + +import ( + "fmt" + "go/token" + "go/types" + "regexp" + "strconv" + "strings" + "unicode" +) + +// makeIdentifier takes a string and returns a valid go identifier like it. +// +// If the string is an identifier, return the input. Otherwise, munge it to +// make a valid identifier, which at worst (if the input is entirely emoji, +// say) means coming up with one out of whole cloth. This identifier need not +// be particularly unique; the caller may add a suffix. +func makeIdentifier(candidateIdentifier string) string { + if token.IsIdentifier(candidateIdentifier) { + return candidateIdentifier + } + + var goodChars strings.Builder + for _, c := range candidateIdentifier { + // modified from token.IsIdentifier + if unicode.IsLetter(c) || c == '_' || + // digits only valid after first char + goodChars.Len() > 0 && unicode.IsDigit(c) { + goodChars.WriteRune(c) + } + } + if goodChars.Len() > 0 { + return goodChars.String() + } + + return "alias" +} + +func (g *generator) addImportFor(pkgPath string) (alias string) { + pkgName := makeIdentifier(pkgPath[strings.LastIndex(pkgPath, "/")+1:]) + alias = pkgName + suffix := 2 + for g.usedAliases[alias] { + alias = pkgName + strconv.Itoa(suffix) + suffix++ + } + + g.imports[pkgPath] = alias + g.usedAliases[alias] = true + return alias +} + +var _sliceOrMapPrefixRegexp = regexp.MustCompile(`^(\*|\[\d*\]|map\[string\])*`) + +// ref takes a Go fully-qualified name, ensures that any necessary symbols are +// imported, and returns an appropriate reference. +// +// Ideally, we want to allow a reference to basically an arbitrary symbol. +// But that's very hard, because it might be quite complicated, like +// +// struct{ F []map[mypkg.K]otherpkg.V } +// +// Now in practice, using an unnamed struct is not a great idea, but we do +// want to allow as much as we can that encoding/json knows how to work +// with, since you would reasonably expect us to accept, say, +// map[string][]interface{}. So we allow: +// - any named type (mypkg.T) +// - any predeclared basic type (string, int, etc.) +// - interface{} +// - for any allowed type T, *T, []T, [N]T, and map[string]T +// +// which effectively excludes: +// - unnamed struct types +// - map[K]V where K is a named type wrapping string +// - any nonstandard spelling of those (interface {/* hi */}, +// map[ string ]T) +// +// (This is documented in docs/genqlient.yaml) +func (g *generator) ref(fullyQualifiedName string) (qualifiedName string, err error) { + errorMsg := `invalid type-name "%v" (%v); expected a builtin, ` + + `path/to/package.Name, interface{}, or a slice, map, or pointer of those` + + if strings.Contains(fullyQualifiedName, " ") { + return "", errorf(nil, errorMsg, fullyQualifiedName, "contains spaces") + } + + prefix := _sliceOrMapPrefixRegexp.FindString(fullyQualifiedName) + nameToImport := fullyQualifiedName[len(prefix):] + + i := strings.LastIndex(nameToImport, ".") + if i == -1 { + if nameToImport != "interface{}" && types.Universe.Lookup(nameToImport) == nil { + return "", errorf(nil, errorMsg, fullyQualifiedName, + fmt.Sprintf(`unknown type-name "%v"`, nameToImport)) + } + return fullyQualifiedName, nil + } + + pkgPath := nameToImport[:i] + localName := nameToImport[i+1:] + alias, ok := g.imports[pkgPath] + if !ok { + if g.importsLocked { + return "", errorf(nil, + `genqlient internal error: imports locked but package "%v" has not been imported`, pkgPath) + } + alias = g.addImportFor(pkgPath) + } + return prefix + alias + "." + localName, nil +} + +// Returns the import-clause to use in the generated code. +func (g *generator) Imports() string { + g.importsLocked = true + if len(g.imports) == 0 { + return "" + } + + var builder strings.Builder + builder.WriteString("import (\n") + for path, alias := range g.imports { + if path == alias || strings.HasSuffix(path, "/"+alias) { + builder.WriteString("\t" + strconv.Quote(path) + "\n") + } else { + builder.WriteString("\t" + alias + " " + strconv.Quote(path) + "\n") + } + } + builder.WriteString(")\n\n") + return builder.String() +} diff --git a/vendor/github.com/Khan/genqlient/generate/main.go b/vendor/github.com/Khan/genqlient/generate/main.go new file mode 100644 index 00000000..4c2d43ec --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/main.go @@ -0,0 +1,92 @@ +// Package generate provides programmatic access to genqlient's functionality, +// and documentation of its configuration options. For general usage +// documentation, see the project [GitHub]. +// +// [GitHub]: https://github.com/Khan/genqlient +package generate + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/alexflint/go-arg" +) + +func readConfigGenerateAndWrite(configFilename string) error { + var config *Config + var err error + if configFilename != "" { + config, err = ReadAndValidateConfig(configFilename) + if err != nil { + return err + } + } else { + config, err = ReadAndValidateConfigFromDefaultLocations() + if err != nil { + return err + } + } + + generated, err := Generate(config) + if err != nil { + return err + } + + for filename, content := range generated { + err = os.MkdirAll(filepath.Dir(filename), 0o755) + if err != nil { + return errorf(nil, + "could not create parent directory for generated file %v: %v", + filename, err) + } + + err = os.WriteFile(filename, content, 0o644) + if err != nil { + return errorf(nil, "could not write generated file %v: %v", + filename, err) + } + } + return nil +} + +type cliArgs struct { + ConfigFilename string `arg:"positional" placeholder:"CONFIG" default:"" help:"path to genqlient configuration (default: genqlient.yaml in current or any parent directory)"` + Init bool `arg:"--init" help:"write out and use a default config file"` +} + +func (cliArgs) Description() string { + return strings.TrimSpace(` +Generates GraphQL client code for a given schema and queries. +See https://github.com/Khan/genqlient for full documentation. +`) +} + +// Main is the command-line entrypoint to genqlient; it's equivalent to calling +// +// go run github.com/Khan/genqlient +// +// For lower-level control over genqlient's operation, see [Generate]. +func Main() { + exitIfError := func(err error) { + if err != nil { + fmt.Println(err) + os.Exit(1) + } + } + + var args cliArgs + arg.MustParse(&args) + if args.Init { + filename := args.ConfigFilename + if filename == "" { + filename = "genqlient.yaml" + } + + err := initConfig(filename) + exitIfError(err) + } + err := readConfigGenerateAndWrite(args.ConfigFilename) + exitIfError(err) +} diff --git a/vendor/github.com/Khan/genqlient/generate/marshal.go.tmpl b/vendor/github.com/Khan/genqlient/generate/marshal.go.tmpl new file mode 100644 index 00000000..87718578 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/marshal.go.tmpl @@ -0,0 +1,95 @@ +{{/* We generate MarshalJSON for much the same reasons as UnmarshalJSON -- see + unmarshal.go.tmpl for details. (Note we generate both even if genqlient + itself needs only UnmarshalJSON, for the benefit of callers who want to, + for example, put genqlient responses in a cache.) But our implementation + for marshaling is quite different. + + Specifically, the treatment of field-visibility with embedded fields must + differ from both ordinary encoding/json and unmarshaling: we need to + choose exactly one conflicting field in all cases (whereas Go chooses at + most one and when unmarshaling we choose them all). See + goStructType.FlattenedFields in types.go for more discussion of embedding + and visibility. + + To accomplish that, we essentially flatten out all the embedded fields + when marshaling, following those precedence rules. Then we basically + follow what we do in unmarshaling, but in reverse order: first we marshal + the special fields, then we glue everything together with the ordinary + fields. + + We do one other thing differently, for the benefit of the marshal-helper + in marshal_helper.go.tmpl. While when unmarshaling it's easy to unmarshal + out the __typename field, then unmarshal out everything else, with + marshaling we can't do the same (at least not without some careful + JSON-stitching; the considerations are basically the same as those + discussed in FlattenedFields). So we write out a helper method + __premarshalJSON() which basically does all but the final JSON-marshal. + (Then the real MarshalJSON() just calls that, and then marshals.) + Thus a marshal-helper for this type, if any, can call __premarshalJSON() + directly, and embed its result. */}} + +type __premarshal{{.GoName}} struct{ + {{range .FlattenedFields -}} + {{if .NeedsMarshaling -}} + {{.GoName}} {{repeat .GoType.SliceDepth "[]"}}{{ref "encoding/json.RawMessage"}} `json:"{{.JSONName}}{{if .Omitempty -}},omitempty{{end}}"` + {{else}} + {{.GoName}} {{.GoType.Reference}} `json:"{{.JSONName}}{{if .Omitempty -}},omitempty{{end}}"` + {{end}} + {{end}} +} + +func (v *{{.GoName}}) MarshalJSON() ([]byte, error) { + premarshaled, err := v.__premarshalJSON() + if err != nil { + return nil, err + } + return json.Marshal(premarshaled) +} + +func (v *{{.GoName}}) __premarshalJSON() (*__premarshal{{.GoName}}, error) { + var retval __premarshal{{.GoName}} + + {{range $field := .FlattenedFields -}} + {{if $field.NeedsMarshaling -}} + { + {{/* Here dst is the json.RawMessage, and src is the Go type. */}} + dst := &retval.{{$field.GoName}} + src := v.{{$field.Selector}} + {{range $i := intRange $field.GoType.SliceDepth -}} + *dst = make( + {{repeat (sub $field.GoType.SliceDepth $i) "[]"}}{{ref "encoding/json.RawMessage"}}, + len(src)) + for i, src := range src { + dst := &(*dst)[i] + {{end -}} + {{/* src now has type ; dst is json.RawMessage */ -}} + {{if $field.GoType.IsPointer -}} + {{/* If you passed a pointer, and it's nil, don't call the + marshaler. This matches json.Marshal's behavior. */ -}} + if src != nil { + {{end -}} + var err error + *dst, err = {{$field.Marshaler $.Generator}}( + {{/* src is the struct-field (or field-element, etc.). + We want to pass a pointer to the type you specified, so if + there's a pointer on the field that's exactly what we want, + and if not we need to take the address. */ -}} + {{if not $field.GoType.IsPointer}}&{{end}}src) + if err != nil { + return nil, fmt.Errorf( + "unable to marshal {{$.GoName}}.{{$field.Selector}}: %w", err) + } + {{if $field.GoType.IsPointer -}} + }{{/* end if src != nil */}} + {{end -}} + {{range $i := intRange $field.GoType.SliceDepth -}} + } + {{end -}} + } + {{else -}} + retval.{{$field.GoName}} = v.{{$field.Selector}} + {{end -}} + {{end -}} + + return &retval, nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/marshal_helper.go.tmpl b/vendor/github.com/Khan/genqlient/generate/marshal_helper.go.tmpl new file mode 100644 index 00000000..ec4e7239 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/marshal_helper.go.tmpl @@ -0,0 +1,44 @@ +{{/* This is somewhat parallel to unmarshal_helper.go.tmpl, but, as usual, in + reverse. Note the helper accepts a pointer-to-interface, for + consistency with unmarshaling and with the API we expect of custom + marshalers. */}} + +func __marshal{{.GoName}}(v *{{.GoName}}) ([]byte, error) { + {{/* Determine the GraphQL typename, which the unmarshaler will need should + it be called on our output. */}} + var typename string + switch v := (*v).(type) { + {{range .Implementations -}} + case *{{.GoName}}: + typename = "{{.GraphQLName}}" + + {{/* Now actually do the marshal, with the concrete type. (Go only + marshals embeds the way we want if they're structs.) Except that + won't work right if the implementation-type has its own + MarshalJSON method (maybe it in turn has an interface-typed + field), so we call the helper __premarshalJSON directly (see + marshal.go.tmpl). */}} + {{if .NeedsMarshaling -}} + premarshaled, err := v.__premarshalJSON() + if err != nil { + return nil, err + } + result := struct { + TypeName string `json:"__typename"` + *__premarshal{{.GoName}} + }{typename, premarshaled} + {{else -}} + result := struct { + TypeName string `json:"__typename"` + *{{.GoName}} + }{typename, v} + {{end -}} + return json.Marshal(result) + {{end -}} + case nil: + return []byte("null"), nil + default: + return nil, {{ref "fmt.Errorf"}}( + `unexpected concrete type for {{.GoName}}: "%T"`, v) + } +} diff --git a/vendor/github.com/Khan/genqlient/generate/names.go b/vendor/github.com/Khan/genqlient/generate/names.go new file mode 100644 index 00000000..6410bc3e --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/names.go @@ -0,0 +1,180 @@ +package generate + +// This file generates the names for genqlient's generated types. This is +// somewhat tricky because the names need to be unique, stable, and, to the +// extent possible, human-readable and -writable. See docs/DESIGN.md for an +// overview of the considerations; in short, we need long names. +// +// Specifically, the names we generate are of the form: +// MyOperationMyFieldMyTypeMySubFieldMySubType +// We need the "MyOperation" prefix because different operations may have +// different fields selected in the same "location" within the query. We need +// to include the field-path, because even within the same query, the same +// GraphQL type may have different selections in different locations. +// Including the types along the path is only strictly necessary in the case of +// interfaces, where, in a query like +// query Q { +// f { # type: I +// ... on T { g { h } } +// ... on U { g { h } } +// } +// } +// the type of .f.g may be different depending on whether the +// concrete type is a T or a U; if we simply called the types QFG they'd +// collide. We could in principle omit the types where there are no interfaces +// in sight, but having the last thing in the name be the actual GraphQL type +// name (MySubType in the first example) makes things more readable, and the +// value of consistency seems greater than the value of brevity, given the +// types are quite verbose either way. Note that in all cases the "MyField" is +// the alias of the field -- the name it has in this query -- since you could +// have `query Q { a: f { b }, c: f { d } }` and Q.A and Q.C must have +// different types. +// +// One subtlety in the above description is: is the "MyType" the interface or +// the implementation? When it's a suffix, the answer is both: we generate +// both MyFieldMyInterface and MyFieldMyImplementation, and the latter, in Go, +// implements the former. (See docs/DESIGN.md for more.) But as an infix, we +// use the type on which the field is requested. Concretely, the following +// schema and query: +// type Query { f: I } +// interface I { g: G } +// type T implements I { g: G, h: H } +// type U implements I { g: G, h: H } +// type G { g1: String, g2: String } +// type H { h1: String, h2: String, h3: String, h4: String } +// +// query Q { +// f { +// g { g1 g2 } +// ... on T { h { h1 h2 } } +// ... on U { h { h3 h4 } } +// } +// } +// The field g must have type QFIG (not QFTG and QFHG), so that QFI's method +// GetG() can return a consistent type. But the fields h must have types QFTH +// and QFUH (not QFIH), because the two are different: the former has fields h1 +// and h2, whereas the latter has fields h3 and h4. So, in summary, since `g` +// is selected in a context of type I, it uses that (interface) type in its +// type-name, and `h` is selected in contexts of types T and U, they use those +// (implementation) types in their type-names. +// +// We do shorten the names in one case: if the name of a field ends with +// the name of its type, we omit the type name, avoiding types like +// MyQueryUserUser when querying a field of type user and value user. Note we +// do not do this for field names, both because it's less common, and because +// in `query Q { user { user { id } } }` we do need both QUser and QUserUser -- +// they have different fields. +// +// Note that there are a few potential collisions from this algorithm: +// - When generating Go types for GraphQL interface types, we generate both +// ...MyFieldMyInterfaceType and ...MyFieldMyImplType. If an interface's +// name is a suffix of its implementation's name, and both are suffixes of a +// field of that type, we'll shorten both, resulting in a collision. +// - Names of different casing (e.g. fields `myField` and `MyField`) can +// collide (the first is standard usage but both are legal). +// - We don't put a special character between parts, so fields like +// query Q { +// ab { ... } # type: C +// abc { ... } # type: C +// a { ... } # type: BC +// } +// can collide. +// All cases seem fairly rare in practice; eventually we'll likely allow users +// the ability to specify their own names, which they could use to avoid this +// (see [issue #12]). +// TODO(benkraft): We should probably at least try to detect it and bail. +// +// [issue #12]: https://github.com/Khan/genqlient/issues/12 +// +// To implement all of the above, as we traverse the operation (and schema) in +// convert.go, we keep track of a list of parts to prefix to our type-names. +// The list always ends with a field, not a type; and we extend it when +// traversing fields, to allow for correct handling of the interface case +// discussed above. This file implements the actual maintenance of that +// prefix, and the code to compute the actual type-name from it. +// +// Note that input objects and enums are handled separately (inline in +// convertDefinition) since the same considerations don't apply and their names +// are thus quite simple. We also specially-handle the type of the toplevel +// response object (inline in convertOperation). + +import ( + "strings" + + "github.com/vektah/gqlparser/v2/ast" +) + +// Yes, a linked list! Of name-prefixes in *reverse* order, i.e. from end to +// start. +// +// We could use a stack -- it would probably be marginally +// more efficient -- but then the caller would have to know more about how to +// manage it safely. Using a list, and treating it as immutable, makes it +// easy. +type prefixList struct { + head string // the list goes back-to-front, so this is the *last* prefix + tail *prefixList +} + +// creates a new one-element list +func newPrefixList(item string) *prefixList { + return &prefixList{head: item} +} + +func joinPrefixList(prefix *prefixList) string { + var reversed []string + for ; prefix != nil; prefix = prefix.tail { + reversed = append(reversed, prefix.head) + } + l := len(reversed) + for i := 0; i < l/2; i++ { + reversed[i], reversed[l-1-i] = reversed[l-1-i], reversed[i] + } + return strings.Join(reversed, "") +} + +// Given a prefix-list, and the next type-name, compute the prefix-list with +// that type-name added (if applicable). The returned value is not a valid +// prefix-list, since it ends with a type, not a field (see top-of-file +// comment), but it's used to construct both the type-names from the input and +// the next prefix-list. +func typeNameParts(prefix *prefixList, typeName string) *prefixList { + // GraphQL types are conventionally UpperCamelCase, but it's not required; + // our names will look best if they are. + typeName = upperFirst(typeName) + // If the prefix has just one part, that's the operation-name. There's no + // need to add "Query" or "Mutation". (Zero should never happen.) + if prefix == nil || prefix.tail == nil || + // If the name-so-far ends with this type's name, omit the + // type-name (see the "shortened" case in the top-of-file comment). + strings.HasSuffix(joinPrefixList(prefix), typeName) { + return prefix + } + return &prefixList{typeName, prefix} +} + +// Given a prefix-list, and a field, compute the next prefix-list, which will +// be used for that field's selections. +func nextPrefix(prefix *prefixList, field *ast.Field) *prefixList { + // Add the type. + prefix = typeNameParts(prefix, field.ObjectDefinition.Name) + // Add the field (there's no shortening here, see top-of-file comment). + prefix = &prefixList{upperFirst(field.Alias), prefix} + return prefix +} + +// Given a prefix-list, and the GraphQL of the current type, compute the name +// we should give it in Go. +func makeTypeName(prefix *prefixList, typeName string) string { + return joinPrefixList(typeNameParts(prefix, typeName)) +} + +// Like makeTypeName, but append typeName unconditionally. +// +// This is used for when you specify a type-name for a field of interface +// type; we use YourName for the interface, but need to do YourNameImplName for +// the implementations. +func makeLongTypeName(prefix *prefixList, typeName string) string { + typeName = upperFirst(typeName) + return joinPrefixList(&prefixList{typeName, prefix}) +} diff --git a/vendor/github.com/Khan/genqlient/generate/operation.go.tmpl b/vendor/github.com/Khan/genqlient/generate/operation.go.tmpl new file mode 100644 index 00000000..48221cda --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/operation.go.tmpl @@ -0,0 +1,49 @@ +// The query or mutation executed by {{.Name}}. +const {{.Name}}_Operation = `{{$.Body}}` + +{{.Doc}} +func {{.Name}}( + {{if ne .Config.ContextType "-" -}} + ctx {{ref .Config.ContextType}}, + {{end}} + {{- if not .Config.ClientGetter -}} + client {{ref "github.com/Khan/genqlient/graphql.Client"}}, + {{end}} + {{- if .Input -}} + {{- range .Input.Fields -}} + {{/* the GraphQL name here is the user-specified variable-name */ -}} + {{.GraphQLName}} {{.GoType.Reference}}, + {{end -}} + {{end -}} +) (*{{.ResponseName}}, {{if .Config.Extensions -}}map[string]interface{},{{end}} error) { + req := &graphql.Request{ + OpName: "{{.Name}}", + Query: {{.Name}}_Operation, + {{if .Input -}} + Variables: &{{.Input.GoName}}{ + {{range .Input.Fields -}} + {{.GoName}}: {{.GraphQLName}}, + {{end -}} + }, + {{end -}} + } + var err error + {{if .Config.ClientGetter -}} + var client graphql.Client + + client, err = {{ref .Config.ClientGetter}}({{if ne .Config.ContextType "-"}}ctx{{else}}{{end}}) + if err != nil { + return nil, {{if .Config.Extensions -}}nil,{{end -}} err + } + {{end}} + var data {{.ResponseName}} + resp := &graphql.Response{Data: &data} + + err = client.MakeRequest( + {{if ne .Config.ContextType "-"}}ctx{{else}}nil{{end}}, + req, + resp, + ) + + return &data, {{if .Config.Extensions -}}resp.Extensions,{{end -}} err +} diff --git a/vendor/github.com/Khan/genqlient/generate/parse.go b/vendor/github.com/Khan/genqlient/generate/parse.go new file mode 100644 index 00000000..4569f25c --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/parse.go @@ -0,0 +1,220 @@ +package generate + +import ( + "fmt" + goAst "go/ast" + goParser "go/parser" + goToken "go/token" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/parser" + "github.com/vektah/gqlparser/v2/validator" + _ "github.com/vektah/gqlparser/v2/validator/rules" +) + +func getSchema(globs StringList) (*ast.Schema, error) { + filenames, err := expandFilenames(globs) + if err != nil { + return nil, err + } + + sources := make([]*ast.Source, len(filenames)) + for i, filename := range filenames { + text, err := os.ReadFile(filename) + if err != nil { + return nil, errorf(nil, "unreadable schema file %v: %v", filename, err) + } + sources[i] = &ast.Source{Name: filename, Input: string(text)} + } + + // Ideally here we'd just call gqlparser.LoadSchema. But the schema we are + // given may or may not contain the builtin types String, Int, etc. (The + // spec says it shouldn't, but introspection will return those types, and + // some introspection-to-SDL tools aren't smart enough to remove them.) So + // we inline LoadSchema and insert some checks. + document, graphqlError := parser.ParseSchemas(sources...) + if graphqlError != nil { + // Schema doesn't even parse. + return nil, errorf(nil, "invalid schema: %v", graphqlError) + } + + // Check if we have a builtin type. (String is an arbitrary choice.) + hasBuiltins := false + for _, def := range document.Definitions { + if def.Name == "String" { + hasBuiltins = true + break + } + } + + if !hasBuiltins { + // modified from parser.ParseSchemas + var preludeAST *ast.SchemaDocument + preludeAST, graphqlError = parser.ParseSchema(validator.Prelude) + if graphqlError != nil { + return nil, errorf(nil, "invalid prelude (probably a gqlparser bug): %v", graphqlError) + } + document.Merge(preludeAST) + } + + schema, graphqlError := validator.ValidateSchemaDocument(document) + if graphqlError != nil { + return nil, errorf(nil, "invalid schema: %v", graphqlError) + } + + return schema, nil +} + +func getAndValidateQueries(basedir string, filenames StringList, schema *ast.Schema) (*ast.QueryDocument, error) { + queryDoc, err := getQueries(basedir, filenames) + if err != nil { + return nil, err + } + + // Cf. gqlparser.LoadQuery + graphqlErrors := validator.Validate(schema, queryDoc) + if graphqlErrors != nil { + return nil, errorf(nil, "query-spec does not match schema: %v", graphqlErrors) + } + + return queryDoc, nil +} + +func expandFilenames(globs []string) ([]string, error) { + uniqFilenames := make(map[string]bool, len(globs)) + for _, glob := range globs { + matches, err := filepath.Glob(glob) + if err != nil { + return nil, errorf(nil, "can't expand file-glob %v: %v", glob, err) + } + if len(matches) == 0 { + return nil, errorf(nil, "%v did not match any files", glob) + } + for _, match := range matches { + uniqFilenames[match] = true + } + } + filenames := make([]string, 0, len(uniqFilenames)) + for filename := range uniqFilenames { + filenames = append(filenames, filename) + } + return filenames, nil +} + +func getQueries(basedir string, globs StringList) (*ast.QueryDocument, error) { + // We merge all the queries into a single query-document, since operations + // in one might reference fragments in another. + // + // TODO(benkraft): It might be better to merge just within a filename, so + // that fragment-names don't need to be unique across files. (Although + // then we may have other problems; and query-names still need to be.) + mergedQueryDoc := new(ast.QueryDocument) + addQueryDoc := func(queryDoc *ast.QueryDocument) { + mergedQueryDoc.Operations = append(mergedQueryDoc.Operations, queryDoc.Operations...) + mergedQueryDoc.Fragments = append(mergedQueryDoc.Fragments, queryDoc.Fragments...) + } + + filenames, err := expandFilenames(globs) + if err != nil { + return nil, err + } + + for _, filename := range filenames { + text, err := os.ReadFile(filename) + if err != nil { + return nil, errorf(nil, "unreadable query-spec file %v: %v", filename, err) + } + + switch filepath.Ext(filename) { + case ".graphql": + queryDoc, err := getQueriesFromString(string(text), basedir, filename) + if err != nil { + return nil, err + } + + addQueryDoc(queryDoc) + + case ".go": + queryDocs, err := getQueriesFromGo(string(text), basedir, filename) + if err != nil { + return nil, err + } + + for _, queryDoc := range queryDocs { + addQueryDoc(queryDoc) + } + + default: + return nil, errorf(nil, "unknown file type: %v", filename) + } + } + + return mergedQueryDoc, nil +} + +func getQueriesFromString(text string, basedir, filename string) (*ast.QueryDocument, error) { + // make path relative to the config-directory + relname, err := filepath.Rel(basedir, filename) + if err == nil { + filename = relname + } + + // Cf. gqlparser.LoadQuery + document, graphqlError := parser.ParseQuery( + &ast.Source{Name: filename, Input: text}) + if graphqlError != nil { // ParseQuery returns type *graphql.Error, yuck + return nil, errorf(nil, "invalid query-spec file %v: %v", filename, graphqlError) + } + + return document, nil +} + +func getQueriesFromGo(text string, basedir, filename string) ([]*ast.QueryDocument, error) { + fset := goToken.NewFileSet() + f, err := goParser.ParseFile(fset, filename, text, 0) + if err != nil { + return nil, errorf(nil, "invalid Go file %v: %v", filename, err) + } + + var retval []*ast.QueryDocument + goAst.Inspect(f, func(node goAst.Node) bool { + if err != nil { + return false // don't bother to recurse if something already failed + } + + basicLit, ok := node.(*goAst.BasicLit) + if !ok || basicLit.Kind != goToken.STRING { + return true // recurse + } + + var value string + value, err = strconv.Unquote(basicLit.Value) + if err != nil { + return false + } + + if !strings.HasPrefix(strings.TrimSpace(value), "# @genqlient") { + return true + } + + // We put the filename as :, which errors.go knows + // how to parse back out (since it's what gqlparser will give to us in + // our errors). + pos := fset.Position(basicLit.Pos()) + fakeFilename := fmt.Sprintf("%v:%v", pos.Filename, pos.Line) + var query *ast.QueryDocument + query, err = getQueriesFromString(value, basedir, fakeFilename) + if err != nil { + return false + } + retval = append(retval, query) + + return true + }) + + return retval, err +} diff --git a/vendor/github.com/Khan/genqlient/generate/stringlist.go b/vendor/github.com/Khan/genqlient/generate/stringlist.go new file mode 100644 index 00000000..ac8785dd --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/stringlist.go @@ -0,0 +1,25 @@ +package generate + +// StringList provides yaml unmarshaler to accept both `string` and `[]string` as a valid type. +// Sourced from [gqlgen]. +// +// [gqlgen]: https://github.com/99designs/gqlgen/blob/1a0b19feff6f02d2af6631c9d847bc243f8ede39/codegen/config/config.go#L302-L329 +type StringList []string + +func (a *StringList) UnmarshalYAML(unmarshal func(interface{}) error) error { + var single string + err := unmarshal(&single) + if err == nil { + *a = []string{single} + return nil + } + + var multi []string + err = unmarshal(&multi) + if err != nil { + return err + } + + *a = multi + return nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/template.go b/vendor/github.com/Khan/genqlient/generate/template.go new file mode 100644 index 00000000..e120e273 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/template.go @@ -0,0 +1,53 @@ +package generate + +import ( + "embed" + "io" + "strings" + "text/template" +) + +//go:embed *.tmpl +var templates embed.FS + +func repeat(n int, s string) string { + var builder strings.Builder + for i := 0; i < n; i++ { + builder.WriteString(s) + } + return builder.String() +} + +func intRange(n int) []int { + ret := make([]int, n) + for i := 0; i < n; i++ { + ret[i] = i + } + return ret +} + +func sub(x, y int) int { return x - y } + +// render executes the given template with the funcs from this generator. +func (g *generator) render(tmplRelFilename string, w io.Writer, data interface{}) error { + tmpl := g.templateCache[tmplRelFilename] + if tmpl == nil { + funcMap := template.FuncMap{ + "ref": g.ref, + "repeat": repeat, + "intRange": intRange, + "sub": sub, + } + var err error + tmpl, err = template.New(tmplRelFilename).Funcs(funcMap).ParseFS(templates, tmplRelFilename) + if err != nil { + return errorf(nil, "could not load template %v: %v", tmplRelFilename, err) + } + g.templateCache[tmplRelFilename] = tmpl + } + err := tmpl.Execute(w, data) + if err != nil { + return errorf(nil, "could not render template: %v", err) + } + return nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/types.go b/vendor/github.com/Khan/genqlient/generate/types.go new file mode 100644 index 00000000..f50df7a3 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/types.go @@ -0,0 +1,558 @@ +package generate + +// This file defines the data structures from which genqlient generates types, +// and the code to write them out as actual Go code. The main entrypoint is +// goType, which represents such a type, but convert.go also constructs each +// of the implementing types, by traversing the GraphQL operation and schema. + +import ( + "fmt" + "io" + "strings" + + "github.com/vektah/gqlparser/v2/ast" +) + +// goType represents a type for which we'll generate code. +type goType interface { + // WriteDefinition writes the code for this type into the given io.Writer. + // + // TODO(benkraft): Some of the implementations might now benefit from being + // converted to templates. + WriteDefinition(io.Writer, *generator) error + + // Reference returns the Go name of this type, e.g. []*MyStruct, and may be + // used to refer to it in Go code. + Reference() string + + // GraphQLTypeName returns the name of the GraphQL type to which this Go type + // corresponds. + GraphQLTypeName() string + + // SelectionSet returns the selection-set of the GraphQL field from which + // this type was generated, or nil if none is applicable (for GraphQL + // scalar, enum, and input types, as well as any opaque + // (non-genqlient-generated) type since those are validated upon creation). + SelectionSet() ast.SelectionSet + + // Remove slice/pointer wrappers, and return the underlying (named (or + // builtin)) type. For example, given []*MyStruct, return MyStruct. + Unwrap() goType + + // Count the number of times Unwrap() will unwrap a slice type. For + // example, given [][][]*MyStruct (or []**[][]*MyStruct, but we never + // currently generate that), return 3. + SliceDepth() int + + // True if Unwrap() will unwrap a pointer at least once. + IsPointer() bool +} + +var ( + _ goType = (*goOpaqueType)(nil) + _ goType = (*goSliceType)(nil) + _ goType = (*goPointerType)(nil) + _ goType = (*goEnumType)(nil) + _ goType = (*goStructType)(nil) + _ goType = (*goInterfaceType)(nil) +) + +type ( + // goOpaqueType represents a user-defined or builtin type, often used to + // represent a GraphQL scalar. (See Config.Bindings for more context.) + goOpaqueType struct { + GoRef string + GraphQLName string + Marshaler, Unmarshaler string + } + // goTypenameForBuiltinType represents a builtin type that was + // given a different name due to a `typename` directive. We + // create a type like `type MyString string` for it. + goTypenameForBuiltinType struct { + GoTypeName string + GoBuiltinName string + GraphQLName string + } + // goSliceType represents the Go type []Elem, used to represent GraphQL + // list types. + goSliceType struct{ Elem goType } + // goSliceType represents the Go type *Elem, used when requested by the + // user (perhaps to handle nulls explicitly, or to avoid copying large + // structures). + goPointerType struct{ Elem goType } +) + +// Opaque types are defined by the user; pointers and slices need no definition +func (typ *goOpaqueType) WriteDefinition(io.Writer, *generator) error { return nil } + +func (typ *goTypenameForBuiltinType) WriteDefinition(w io.Writer, g *generator) error { + fmt.Fprintf(w, "type %s %s", typ.GoTypeName, typ.GoBuiltinName) + return nil +} +func (typ *goSliceType) WriteDefinition(io.Writer, *generator) error { return nil } +func (typ *goPointerType) WriteDefinition(io.Writer, *generator) error { return nil } + +func (typ *goOpaqueType) Reference() string { return typ.GoRef } +func (typ *goTypenameForBuiltinType) Reference() string { return typ.GoTypeName } +func (typ *goSliceType) Reference() string { return "[]" + typ.Elem.Reference() } +func (typ *goPointerType) Reference() string { return "*" + typ.Elem.Reference() } + +func (typ *goOpaqueType) SelectionSet() ast.SelectionSet { return nil } +func (typ *goTypenameForBuiltinType) SelectionSet() ast.SelectionSet { return nil } +func (typ *goSliceType) SelectionSet() ast.SelectionSet { return typ.Elem.SelectionSet() } +func (typ *goPointerType) SelectionSet() ast.SelectionSet { return typ.Elem.SelectionSet() } + +func (typ *goOpaqueType) GraphQLTypeName() string { return typ.GraphQLName } +func (typ *goTypenameForBuiltinType) GraphQLTypeName() string { return typ.GraphQLName } +func (typ *goSliceType) GraphQLTypeName() string { return typ.Elem.GraphQLTypeName() } +func (typ *goPointerType) GraphQLTypeName() string { return typ.Elem.GraphQLTypeName() } + +// goEnumType represents a Go named-string type used to represent a GraphQL +// enum. In this case, we generate both the type (`type T string`) and also a +// list of consts representing the values. +type goEnumType struct { + GoName string + GraphQLName string + Description string + Values []goEnumValue +} + +type goEnumValue struct { + Name string + Description string +} + +func (typ *goEnumType) WriteDefinition(w io.Writer, g *generator) error { + // All GraphQL enums have underlying type string (in the Go sense). + writeDescription(w, typ.Description) + fmt.Fprintf(w, "type %s string\n", typ.GoName) + fmt.Fprintf(w, "const (\n") + for _, val := range typ.Values { + writeDescription(w, val.Description) + fmt.Fprintf(w, "%s %s = \"%s\"\n", + typ.GoName+goConstName(val.Name), + typ.GoName, val.Name) + } + fmt.Fprintf(w, ")\n") + return nil +} + +func (typ *goEnumType) Reference() string { return typ.GoName } +func (typ *goEnumType) SelectionSet() ast.SelectionSet { return nil } +func (typ *goEnumType) GraphQLTypeName() string { return typ.GraphQLName } + +// goStructType represents a Go struct type used to represent a GraphQL object +// or input-object type. +type goStructType struct { + GoName string + Fields []*goStructField + IsInput bool + Selection ast.SelectionSet + descriptionInfo + Generator *generator // for the convenience of the template +} + +type goStructField struct { + GoName string + GoType goType + JSONName string // i.e. the field's alias in this query + GraphQLName string // i.e. the field's name in its type-def + Omitempty bool // only used on input types + Description string +} + +// IsAbstract returns true if this field is of abstract type (i.e. GraphQL +// union or interface; equivalently, represented by an interface in Go). +func (field *goStructField) IsAbstract() bool { + _, ok := field.GoType.Unwrap().(*goInterfaceType) + return ok +} + +// IsEmbedded returns true if this field is embedded (a.k.a. anonymous), which +// is in practice true if it corresponds to a named fragment spread in GraphQL. +func (field *goStructField) IsEmbedded() bool { + return field.GoName == "" +} + +// Selector returns the field's name, which is unqualified type-name if it's +// embedded. +func (field *goStructField) Selector() string { + if field.GoName != "" { + return field.GoName + } + // TODO(benkraft): This assumes the type is package-local, which is always + // true for embedded types for us, but isn't the most robust assumption. + return field.GoType.Unwrap().Reference() +} + +// unmarshaler returns: +// - the name of the function to use to unmarshal this field +// - true if this is a fully-qualified name (false if it is a package-local +// unqualified name) +// - true if we need to generate an unmarshaler at all, false if the default +// behavior will suffice +func (field *goStructField) unmarshaler() (qualifiedName string, needsImport bool, needsUnmarshaler bool) { + switch typ := field.GoType.Unwrap().(type) { + case *goOpaqueType: + if typ.Unmarshaler != "" { + return typ.Unmarshaler, true, true + } + case *goInterfaceType: + return "__unmarshal" + typ.Reference(), false, true + } + return "encoding/json.Unmarshal", true, field.IsEmbedded() +} + +// Unmarshaler returns the Go name of the function to use to unmarshal this +// field (which may be "json.Unmarshal" if there's not a special one). +func (field *goStructField) Unmarshaler(g *generator) (string, error) { + name, needsImport, _ := field.unmarshaler() + if needsImport { + return g.ref(name) + } + return name, nil +} + +// marshaler returns: +// - the fully-qualified name of the function to use to marshal this field +// - true if we need to generate an marshaler at all, false if the default +// behavior will suffice +func (field *goStructField) marshaler() (qualifiedName string, needsImport bool, needsMarshaler bool) { + switch typ := field.GoType.Unwrap().(type) { + case *goOpaqueType: + if typ.Marshaler != "" { + return typ.Marshaler, true, true + } + case *goInterfaceType: + return "__marshal" + typ.Reference(), false, true + } + return "encoding/json.Marshal", true, field.IsEmbedded() +} + +// Marshaler returns the Go name of the function to use to marshal this +// field (which may be "json.Marshal" if there's not a special one). +func (field *goStructField) Marshaler(g *generator) (string, error) { + name, needsImport, _ := field.marshaler() + if needsImport { + return g.ref(name) + } + return name, nil +} + +// NeedsMarshaling returns true if this field needs special handling when +// marshaling and unmarshaling, e.g. if it has a user-specified custom +// (un)marshaler. Note if it needs one, it needs the other: even if the user +// only specified an unmarshaler, we need to add `json:"-"` to the field, which +// means we need to specially handling it when marshaling. +func (field *goStructField) NeedsMarshaling() bool { + _, _, ok1 := field.marshaler() + _, _, ok2 := field.unmarshaler() + return ok1 || ok2 +} + +// NeedsMarshaler returns true if any fields of this type need special +// handling when (un)marshaling (see goStructField.NeedsMarshaling). +func (typ *goStructType) NeedsMarshaling() bool { + for _, f := range typ.Fields { + if f.NeedsMarshaling() { + return true + } + } + return false +} + +// selector represents a field and the path to get there from the type in +// question, and is used in FlattenedFields, below. +type selector struct { + *goStructField + // e.g. "OuterEmbed.InnerEmbed.LeafField" + Selector string +} + +// FlattenedFields returns the fields of this type and its recursive embeds, +// and the paths to reach them (via those embeds), but with different +// visibility rules for conflicting fields than Go. +// +// (Before you read further, now's a good time to review [Go's rules]. +// Done? Good.) +// +// To illustrate the need, consider the following query: +// +// fragment A on T { id } +// fragment B on T { id } +// query Q { t { ...A ...B } } +// +// We generate types: +// +// type A struct { Id string `json:"id"` } +// type B struct { Id string `json:"id"` } +// type QT struct { A; B } +// +// According to Go's embedding rules, QT has no field Id: since QT.A.Id and +// QT.B.Id are at equal depth, neither wins and gets promoted. (Go's JSON +// library uses similar logic to decide which field to write to JSON, except +// with the additional rule that a field with a JSON tag wins over a field +// without; in our case both have such a field.) +// +// Those rules don't work for us. When unmarshaling, we want to fill in all +// the potentially-matching fields (QT.A.Id and QT.B.Id in this case), and when +// marshaling, we want to always marshal exactly one potentially-conflicting +// field; we're happy to use the Go visibility rules when they apply but we +// need to always marshal one field, even if there's not a clear best choice. +// For unmarshaling, our QT.UnmarshalJSON ends up unmarshaling the same JSON +// object into QT, QT.A, and QT.B, which gives us the behavior we want. But +// for marshaling, we need to resolve the conflicts: if we simply marshaled QT, +// QT.A, and QT.B, we'd have to do some JSON-surgery to join them, and we'd +// probably end up with duplicate fields, which leads to unpredictable behavior +// based on the reader. That's no good. +// +// So: instead, we have our own rules, which work like the Go rules, except +// that if there's a tie we choose the first field (in source order). (In +// practice, hopefully, they all match, but validating that is even more work +// for a fairly rare case.) This function returns, for each JSON-name, the Go +// field we want to use. In the example above, it would return: +// +// []selector{{, "A.Id"}} +// +// [Go's rules]: https://golang.org/ref/spec#Selectors +func (typ *goStructType) FlattenedFields() ([]*selector, error) { + seenJSONNames := map[string]bool{} + retval := make([]*selector, 0, len(typ.Fields)) + + queue := make([]*selector, len(typ.Fields)) + for i, field := range typ.Fields { + queue[i] = &selector{field, field.Selector()} + } + + // Since our (non-embedded) fields always have JSON tags, the logic we want + // is simply: do a breadth-first search through the recursively embedded + // fields, and take the first one we see with a given JSON tag. + for len(queue) > 0 { + field := queue[0] + queue = queue[1:] + if field.IsEmbedded() { + typ, ok := field.GoType.(*goStructType) + if !ok { + // Should never happen: embeds correspond to named fragments, + // and even if the fragment is of interface type in GraphQL, + // either it's spread into a concrete type, or we are writing + // one of the implementations of the interface into which it's + // spread; either way we embed the corresponding implementation + // of the fragment. + return nil, errorf(nil, + "genqlient internal error: embedded field %s.%s was not a struct", + typ.GoName, field.GoName) + } + + // Enqueue the embedded fields for our BFS. + for _, subField := range typ.Fields { + queue = append(queue, + &selector{subField, field.Selector + "." + subField.Selector()}) + } + continue + } + + if seenJSONNames[field.JSONName] { + // We already chose a selector for this JSON field. Skip it. + continue + } + + // Else, we are the selector we are looking for. + seenJSONNames[field.JSONName] = true + retval = append(retval, field) + } + return retval, nil +} + +func (typ *goStructType) WriteDefinition(w io.Writer, g *generator) error { + writeDescription(w, structDescription(typ)) + + fmt.Fprintf(w, "type %s struct {\n", typ.GoName) + for _, field := range typ.Fields { + writeDescription(w, field.Description) + jsonTag := `"` + field.JSONName + if field.Omitempty { + jsonTag += ",omitempty" + } + jsonTag += `"` + if field.NeedsMarshaling() { + // certain types are handled in our (Un)MarshalJSON (see below) + jsonTag = `"-"` + } + // Note for embedded types field.GoName is "", which produces the code + // we want! + fmt.Fprintf(w, "\t%s %s `json:%s`\n", + field.GoName, field.GoType.Reference(), jsonTag) + } + fmt.Fprintf(w, "}\n") + + // Write out getter methods for each field. These are most useful for + // shared fields of an interface -- the methods will be included in the + // interface. But they can be useful in other cases, for example where you + // have a union several of whose members have a shared field (and can + // thereby be handled together). For simplicity's sake, we just write the + // methods always. + // + // Note we use the *flattened* fields here, which ensures we avoid + // conflicts in the case where multiple embedded types include the same + // field. + flattened, err := typ.FlattenedFields() + if err != nil { + return err + } + for _, field := range flattened { + description := fmt.Sprintf( + "Get%s returns %s.%s, and is useful for accessing the field via an interface.", + field.GoName, typ.GoName, field.GoName) + writeDescription(w, description) + fmt.Fprintf(w, "func (v *%s) Get%s() %s { return v.%s }\n", + typ.GoName, field.GoName, field.GoType.Reference(), field.Selector) + } + + // Now, if needed, write the marshaler/unmarshaler. We need one if we have + // any interface-typed fields, or any embedded fields. + // + // For interface-typed fields, ideally we'd write an UnmarshalJSON method + // on the field, but you can't add a method to an interface. So we write a + // per-interface-type helper, but we have to call it (with a little + // boilerplate) everywhere the type is referenced. + // + // For embedded fields (from fragments), mostly the JSON library would just + // do what we want, but there are two problems. First, if the embedded + // type has its own UnmarshalJSON, naively that would be promoted to + // become our UnmarshalJSON, which is no good. But we don't want to just + // hide that method and inline its fields, either; we need to call its + // UnmarshalJSON (on the same object we unmarshal into this struct). + // Second, if the embedded type duplicates any fields of the embedding type + // -- maybe both the fragment and the selection into which it's spread + // select the same field, or several fragments select the same field -- the + // JSON library will only fill one of those (the least-nested one); we want + // to fill them all. + // + // For fields with a custom marshaler or unmarshaler, we do basically the + // same thing as interface-typed fields, except the user has defined the + // helper. + // + // Note that genqlient itself only uses unmarshalers for output types, and + // marshalers for input types. But we write both in case you want to write + // your data to JSON for some reason (say to put it in a cache). (And we + // need to write both if we need to write either, because in such cases we + // write a `json:"-"` tag on the field.) + // + // TODO(benkraft): If/when proposal #5901 is implemented (Go 1.18 at the + // earliest), we may be able to do some of this a simpler way. + if typ.NeedsMarshaling() { + err := g.render("unmarshal.go.tmpl", w, typ) + if err != nil { + return err + } + err = g.render("marshal.go.tmpl", w, typ) + if err != nil { + return err + } + } + return nil +} + +func (typ *goStructType) Reference() string { return typ.GoName } +func (typ *goStructType) SelectionSet() ast.SelectionSet { return typ.Selection } +func (typ *goStructType) GraphQLTypeName() string { return typ.GraphQLName } + +// goInterfaceType represents a Go interface type, used to represent a GraphQL +// interface or union type. +type goInterfaceType struct { + GoName string + // Fields shared by all the interface's implementations; + // we'll generate getter methods for each. + SharedFields []*goStructField + Implementations []*goStructType + Selection ast.SelectionSet + descriptionInfo +} + +func (typ *goInterfaceType) WriteDefinition(w io.Writer, g *generator) error { + writeDescription(w, interfaceDescription(typ)) + + // Write the interface. + fmt.Fprintf(w, "type %s interface {\n", typ.GoName) + implementsMethodName := fmt.Sprintf("implementsGraphQLInterface%v", typ.GoName) + fmt.Fprintf(w, "\t%s()\n", implementsMethodName) + for _, sharedField := range typ.SharedFields { + if sharedField.GoName == "" { // embedded type + fmt.Fprintf(w, "\t%s\n", sharedField.GoType.Reference()) + continue + } + + methodName := "Get" + sharedField.GoName + description := "" + if sharedField.GraphQLName == "__typename" { + description = fmt.Sprintf( + "%s returns the receiver's concrete GraphQL type-name "+ + "(see interface doc for possible values).", methodName) + } else { + description = fmt.Sprintf( + `%s returns the interface-field %q from its implementation.`, + methodName, sharedField.GraphQLName) + if sharedField.Description != "" { + description = fmt.Sprintf( + "%s\nThe GraphQL interface field's documentation follows.\n\n%s", + description, sharedField.Description) + } + } + + writeDescription(w, description) + fmt.Fprintf(w, "\t%s() %s\n", methodName, sharedField.GoType.Reference()) + } + fmt.Fprintf(w, "}\n") + + // Now, write out the implementations. + for _, impl := range typ.Implementations { + fmt.Fprintf(w, "func (v *%s) %s() {}\n", + impl.Reference(), implementsMethodName) + } + + // Finally, write the marshal- and unmarshal-helpers, which + // will be called by struct fields referencing this type (see + // goStructType.WriteDefinition). + err := g.render("unmarshal_helper.go.tmpl", w, typ) + if err != nil { + return err + } + return g.render("marshal_helper.go.tmpl", w, typ) +} + +func (typ *goInterfaceType) Reference() string { return typ.GoName } +func (typ *goInterfaceType) SelectionSet() ast.SelectionSet { return typ.Selection } +func (typ *goInterfaceType) GraphQLTypeName() string { return typ.GraphQLName } + +func (typ *goOpaqueType) Unwrap() goType { return typ } +func (typ *goTypenameForBuiltinType) Unwrap() goType { return typ } +func (typ *goSliceType) Unwrap() goType { return typ.Elem.Unwrap() } +func (typ *goPointerType) Unwrap() goType { return typ.Elem.Unwrap() } +func (typ *goEnumType) Unwrap() goType { return typ } +func (typ *goStructType) Unwrap() goType { return typ } +func (typ *goInterfaceType) Unwrap() goType { return typ } + +func (typ *goOpaqueType) SliceDepth() int { return 0 } +func (typ *goTypenameForBuiltinType) SliceDepth() int { return 0 } +func (typ *goSliceType) SliceDepth() int { return typ.Elem.SliceDepth() + 1 } +func (typ *goPointerType) SliceDepth() int { return 0 } +func (typ *goEnumType) SliceDepth() int { return 0 } +func (typ *goStructType) SliceDepth() int { return 0 } +func (typ *goInterfaceType) SliceDepth() int { return 0 } + +func (typ *goOpaqueType) IsPointer() bool { return false } +func (typ *goTypenameForBuiltinType) IsPointer() bool { return false } +func (typ *goSliceType) IsPointer() bool { return typ.Elem.IsPointer() } +func (typ *goPointerType) IsPointer() bool { return true } +func (typ *goEnumType) IsPointer() bool { return false } +func (typ *goStructType) IsPointer() bool { return false } +func (typ *goInterfaceType) IsPointer() bool { return false } + +func writeDescription(w io.Writer, desc string) { + if desc != "" { + for _, line := range strings.Split(desc, "\n") { + fmt.Fprintf(w, "// %s\n", strings.TrimLeft(line, " \t")) + } + } +} diff --git a/vendor/github.com/Khan/genqlient/generate/unmarshal.go.tmpl b/vendor/github.com/Khan/genqlient/generate/unmarshal.go.tmpl new file mode 100644 index 00000000..1db59677 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/unmarshal.go.tmpl @@ -0,0 +1,154 @@ +{{/* We need to generate UnmarshalJSON methods for some types that we want to + handle specially. Specifically, we generate an UnmarshalJSON for each + struct with a field meeting any of these criteria: + - a field whose type is configured with a custom unmarshaler + - an embedded (anonymous) field; technically we only need an + UnmarshalJSON if the embedded type needs one, but it's easier to + generate it unconditionally + - a field of interface type + Additionally, since we add `json:"-"` to fields we handle specially, for + any field which requires a MarshalJSON, we also generate an UnmarshalJSON, + and vice versa. + + Given that, we want to specially handle the above-described fields, but + unmarshal everything else normally. To handle fields with custom + unmarshalers, first we unmarshal them into a json.RawMessage, then call + the custom unmarshaler. Interface-typed fields are similar, except + instead of a custom unmarshaler we call the helper we've generated + (see unmarshal_helper.go.tmpl). Embedded fields don't need the + json.RawMessage; we just unmarshal our input again into the embedded + field. */}} + +func (v *{{.GoName}}) UnmarshalJSON(b []byte) error { + {{/* Standard convention for unmarshalers is to no-op on null. */}} + if string(b) == "null" { + return nil + } + + {{/* For our first pass, we ignore embedded fields, unmarshal all the + custom-unmarshaler or abstract fields into json.RawMessage, and + unmarshal everything else normally. To do this, we want to call + json.Unmarshal on the receiver (v). But if we do that naively on a + value of type <.GoName>, it will call this function again, and recurse + infinitely. So we make a wrapper type which embeds both this type and + NoUmnarshalJSON, which prevents either's UnmarshalJSON method from + being promoted. For more on why this is so difficult, see + https://github.com/benjaminjkraft/notes/blob/master/go-json-interfaces.md. + (Note there are a few different ways "hide" the method, but this one + seems to be the best option that works if this type has embedded types + with UnmarshalJSON methods.) + */}} + + {{/* TODO(benkraft): Omit/simplify the first pass if all fields are + embedded/abstract. */ -}} + var firstPass struct{ + *{{.GoName}} + {{range .Fields -}} + {{if and .NeedsMarshaling (not .IsEmbedded) -}} + {{.GoName}} {{repeat .GoType.SliceDepth "[]"}}{{ref "encoding/json.RawMessage"}} `json:"{{.JSONName}}"` + {{end -}} + {{end -}} + {{/* TODO(benkraft): In principle you might have a field-name that + conflicts with this one; avoid that. */ -}} + {{ref "github.com/Khan/genqlient/graphql.NoUnmarshalJSON"}} + } + firstPass.{{.GoName}} = v + + err := {{ref "encoding/json.Unmarshal"}}(b, &firstPass) + if err != nil { + return err + } + + {{/* Now, handle the fields needing special handling. */}} + {{range $field := .Fields -}} + {{if $field.NeedsMarshaling -}} + {{if $field.IsEmbedded -}} + {{/* Embedded fields are easier: we just unmarshal the same input into + them. (They're also easier because they can't be lists, since they + arise from GraphQL fragment spreads.) + + Note that our behavior if you have two fields of the same name via + different embeds differs from ordinary json-unmarshaling: we unmarshal + into *all* of the fields. See goStructType.FlattenedFields in + types.go for more discussion of embedding and visibility. */ -}} + err = {{$field.Unmarshaler $.Generator}}( + b, &v.{{$field.GoType.Unwrap.Reference}}) + if err != nil { + return err + } + {{else -}} + {{/* For other fields (abstract or custom unmarshaler), first, call the + unmarshaler (our unmarshal-helper, or the user-specified one, + respectively). This gets a little complicated because we may have + a slice field. So what we do is basically, for each field of type + `[][]...[]MyType`: + + dst := &v.MyField // *[][]...[]MyType + src := firstPass.MyField // [][]...[]json.RawMessage + + // repeat the following three lines n times; each time, inside + // the loop we have one less layer of slice on src and dst + *dst = make([][]...[]MyType, len(src)) + for i, src := range src { + // We need the &(*dst)[i] because at each stage we want to + // keep dst as a pointer. (It only really has to be a + // pointer at the innermost level, but it's easiest to be + // consistent.) + dst := &(*dst)[i] + + // (now we have `dst *MyType` and `src json.RawMessage`) + __unmarshalMyType(dst, src) + + } // (also n times) + + Note that if the field also uses a pointer (`[][]...[]*MyType`), we + now pass around `*[][]...[]*MyType`; again in principle + `[][]...[]*MyType` would work but require more special-casing. Thus + in the innermost loop, `dst` is of type `**MyType`, so we have to + pass `*dst` to the unmarshaler. Of course, since MyType is an + interface, I'm not sure why you'd any of that anyway. + + One additional trick is we wrap everything above in a block ({ ... }), + so that the variables dst and src may take on different types for + each field we are handling, which would otherwise conflict. (We could + instead suffix the names, but that makes things much harder to read.) + */}} + { + dst := &v.{{$field.GoName}} + src := firstPass.{{$field.GoName}} + {{range $i := intRange $field.GoType.SliceDepth -}} + *dst = make( + {{repeat (sub $field.GoType.SliceDepth $i) "[]"}}{{if $field.GoType.IsPointer}}*{{end}}{{$field.GoType.Unwrap.Reference}}, + len(src)) + for i, src := range src { + dst := &(*dst)[i] + {{end -}} + {{/* dst now has type *; dst is json.RawMessage */ -}} + {{/* If the field is null in the input, skip calling unmarshaler. + (This matches json.Unmarshal.) If the field is missing entirely + from the input, we will have an uninitialized json.RawMessage; + handle that likewise. */ -}} + if len(src) != 0 && string(src) != "null" { + {{if $field.GoType.IsPointer -}} + {{/* In this case, the parent for loop did `make([]*MyType, ...)` + and we have a pointer into that list. But we actually still + need to initialize the *elements* of the list. */ -}} + *dst = new({{$field.GoType.Unwrap.Reference}}) + {{end -}} + err = {{$field.Unmarshaler $.Generator}}( + src, {{if $field.GoType.IsPointer}}*{{end}}dst) + if err != nil { + return fmt.Errorf( + "unable to unmarshal {{$.GoName}}.{{$field.GoName}}: %w", err) + } + } + {{range $i := intRange $field.GoType.SliceDepth -}} + } + {{end -}} + } + {{end}}{{/* end if/else .IsEmbedded */ -}} + {{end}}{{/* end if .NeedsMarshaling */ -}} + {{end}}{{/* end range .Fields */ -}} + + return nil +} diff --git a/vendor/github.com/Khan/genqlient/generate/unmarshal_helper.go.tmpl b/vendor/github.com/Khan/genqlient/generate/unmarshal_helper.go.tmpl new file mode 100644 index 00000000..bab2710e --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/unmarshal_helper.go.tmpl @@ -0,0 +1,36 @@ +{{/* This template generates a helper for each interface type genqlient must + unmarshal. This helper is called by the UnmarshalJSON of each (struct) + type with a field of the interface type, similar to how it calls custom + unmarshalers. The helper itself is fairly simple: it just parses out and + switches on __typename, then unmarshals into the relevant struct. */}} + +func __unmarshal{{.GoName}}(b []byte, v *{{.GoName}}) error { + if string(b) == "null" { + return nil + } + + var tn struct { + TypeName string `json:"__typename"` + } + err := {{ref "encoding/json.Unmarshal"}}(b, &tn) + if err != nil { + return err + } + + switch tn.TypeName { + {{range .Implementations -}} + case "{{.GraphQLName}}": + *v = new({{.GoName}}) + return {{ref "encoding/json.Unmarshal"}}(b, *v) + {{end -}} + case "": + {{/* Likely if we're making a request to a mock server and the author + of the mock didn't know to add __typename, so give a special + error. */ -}} + return {{ref "fmt.Errorf"}}( + "response was missing {{.GraphQLName}}.__typename") + default: + return {{ref "fmt.Errorf"}}( + `unexpected concrete type for {{.GoName}}: "%v"`, tn.TypeName) + } +} diff --git a/vendor/github.com/Khan/genqlient/generate/util.go b/vendor/github.com/Khan/genqlient/generate/util.go new file mode 100644 index 00000000..c424c7f1 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/util.go @@ -0,0 +1,77 @@ +package generate + +import ( + "strings" + "unicode" + "unicode/utf8" +) + +func reverse(slice []string) { + for left, right := 0, len(slice)-1; left < right; left, right = left+1, right-1 { + slice[left], slice[right] = slice[right], slice[left] + } +} + +func changeFirst(s string, f func(rune) rune) string { + c, n := utf8.DecodeRuneInString(s) + if c == utf8.RuneError { // empty or invalid + return s + } + return string(f(c)) + s[n:] +} + +func lowerFirst(s string) string { + return changeFirst(strings.TrimLeft(s, "_"), unicode.ToLower) +} + +func upperFirst(s string) string { + return changeFirst(strings.TrimLeft(s, "_"), unicode.ToUpper) +} + +func goConstName(s string) string { + if strings.TrimLeft(s, "_") == "" { + return s + } + var prev rune + return strings.Map(func(r rune) rune { + var ret rune + if r == '_' { + ret = -1 + } else if prev == '_' || prev == 0 { + ret = unicode.ToUpper(r) + } else { + ret = unicode.ToLower(r) + } + prev = r + return ret + }, s) +} + +// https://go.dev/ref/spec#Keywords +var goKeywords = map[string]bool{ + "break": true, + "default": true, + "func": true, + "interface": true, + "select": true, + "case": true, + "defer": true, + "go": true, + "map": true, + "struct": true, + "chan": true, + "else": true, + "goto": true, + "package": true, + "switch": true, + "const": true, + "fallthrough": true, + "if": true, + "range": true, + "type": true, + "continue": true, + "for": true, + "import": true, + "return": true, + "var": true, +} diff --git a/vendor/github.com/Khan/genqlient/generate/validation.go b/vendor/github.com/Khan/genqlient/generate/validation.go new file mode 100644 index 00000000..048996a8 --- /dev/null +++ b/vendor/github.com/Khan/genqlient/generate/validation.go @@ -0,0 +1,112 @@ +package generate + +// This file contains helpers to do various bits of validation in the process +// of converting types to Go, notably, for cases where we need to check that +// two types match. + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/parser" +) + +// selectionsMatch recursively compares the two selection-sets, and returns an +// error if they differ. +// +// It does not check arguments and directives, only field names, aliases, +// order, and fragment-structure. It does not recurse into named fragments, it +// only checks that their names match. +// +// If both selection-sets are nil/empty, they compare equal. +func selectionsMatch( + pos *ast.Position, + expectedSelectionSet, actualSelectionSet ast.SelectionSet, +) error { + if len(expectedSelectionSet) != len(actualSelectionSet) { + return errorf( + pos, "expected %d fields, got %d", + len(expectedSelectionSet), len(actualSelectionSet)) + } + + for i, expected := range expectedSelectionSet { + switch expected := expected.(type) { + case *ast.Field: + actual, ok := actualSelectionSet[i].(*ast.Field) + switch { + case !ok: + return errorf(actual.Position, + "expected selection #%d to be field, got %T", + i, actualSelectionSet[i]) + case actual.Name != expected.Name: + return errorf(actual.Position, + "expected field %d to be %s, got %s", + i, expected.Name, actual.Name) + case actual.Alias != expected.Alias: + return errorf(actual.Position, + "expected field %d's alias to be %s, got %s", + i, expected.Alias, actual.Alias) + } + err := selectionsMatch(actual.Position, expected.SelectionSet, actual.SelectionSet) + if err != nil { + return fmt.Errorf("in %s sub-selection: %w", actual.Alias, err) + } + case *ast.InlineFragment: + actual, ok := actualSelectionSet[i].(*ast.InlineFragment) + switch { + case !ok: + return errorf(actual.Position, + "expected selection %d to be inline fragment, got %T", + i, actualSelectionSet[i]) + case actual.TypeCondition != expected.TypeCondition: + return errorf(actual.Position, + "expected fragment %d to be on type %s, got %s", + i, expected.TypeCondition, actual.TypeCondition) + } + err := selectionsMatch(actual.Position, expected.SelectionSet, actual.SelectionSet) + if err != nil { + return fmt.Errorf("in inline fragment on %s: %w", actual.TypeCondition, err) + } + case *ast.FragmentSpread: + actual, ok := actualSelectionSet[i].(*ast.FragmentSpread) + switch { + case !ok: + return errorf(actual.Position, + "expected selection %d to be fragment spread, got %T", + i, actualSelectionSet[i]) + case actual.Name != expected.Name: + return errorf(actual.Position, + "expected fragment %d to be ...%s, got ...%s", + i, expected.Name, actual.Name) + } + } + } + return nil +} + +// validateBindingSelection checks that if you requested in your type-binding +// that this type must always request certain fields, then in fact it does. +func (g *generator) validateBindingSelection( + typeName string, + binding *TypeBinding, + pos *ast.Position, + selectionSet ast.SelectionSet, +) error { + if binding.ExpectExactFields == "" { + return nil // no validation requested + } + + // HACK: we parse the selection as if it were a query, which is basically + // the same (for syntax purposes; it of course wouldn't validate) + doc, gqlErr := parser.ParseQuery(&ast.Source{Input: binding.ExpectExactFields}) + if gqlErr != nil { + return errorf( + nil, "invalid type-binding %s.expect_exact_fields: %w", typeName, gqlErr) + } + + err := selectionsMatch(pos, doc.Operations[0].SelectionSet, selectionSet) + if err != nil { + return fmt.Errorf("invalid selection for type-binding %s: %w", typeName, err) + } + return nil +} diff --git a/vendor/github.com/Khan/genqlient/main.go b/vendor/github.com/Khan/genqlient/main.go new file mode 100644 index 00000000..7c3cd8ff --- /dev/null +++ b/vendor/github.com/Khan/genqlient/main.go @@ -0,0 +1,19 @@ +// genqlient is a GraphQL client generator for Go. +// +// To run genqlient: +// +// go run github.com/Khan/genqlient +// +// For programmatic access, see the "generate" package, below. For +// user documentation, see the project [GitHub]. +// +// [GitHub]: https://github.com/Khan/genqlient +package main + +import ( + "github.com/Khan/genqlient/generate" +) + +func main() { + generate.Main() +} diff --git a/vendor/github.com/agnivade/levenshtein/.gitignore b/vendor/github.com/agnivade/levenshtein/.gitignore new file mode 100644 index 00000000..345780a4 --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/.gitignore @@ -0,0 +1,5 @@ +coverage.txt +fuzz/fuzz-fuzz.zip +fuzz/corpus/corpus/* +fuzz/corpus/suppressions/* +fuzz/corpus/crashes/* diff --git a/vendor/github.com/agnivade/levenshtein/.travis.yml b/vendor/github.com/agnivade/levenshtein/.travis.yml new file mode 100644 index 00000000..0873fa98 --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/.travis.yml @@ -0,0 +1,23 @@ +language: go + +# See https://travis-ci.community/t/goos-js-goarch-wasm-go-run-fails-panic-newosproc-not-implemented/1651 +#addons: +# chrome: stable + +before_install: +- export GO111MODULE=on + +#install: +#- go get github.com/agnivade/wasmbrowsertest +#- mv $GOPATH/bin/wasmbrowsertest $GOPATH/bin/go_js_wasm_exec +#- export PATH=$GOPATH/bin:$PATH + +go: +- 1.13.x +- 1.14.x +- 1.15.x +- tip + +script: +#- GOOS=js GOARCH=wasm go test -v +- go test -v diff --git a/vendor/github.com/agnivade/levenshtein/License.txt b/vendor/github.com/agnivade/levenshtein/License.txt new file mode 100644 index 00000000..54b51f49 --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/License.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Agniva De Sarker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/agnivade/levenshtein/Makefile b/vendor/github.com/agnivade/levenshtein/Makefile new file mode 100644 index 00000000..5f6890d6 --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/Makefile @@ -0,0 +1,15 @@ +all: test install + +install: + go install + +lint: + gofmt -l -s -w . && go vet . && golint -set_exit_status=1 . + +test: # The first 2 go gets are to support older Go versions + go get github.com/arbovm/levenshtein + go get github.com/dgryski/trifles/leven + GO111MODULE=on go test -race -v -coverprofile=coverage.txt -covermode=atomic + +bench: + go test -run=XXX -bench=. -benchmem -count=5 diff --git a/vendor/github.com/agnivade/levenshtein/README.md b/vendor/github.com/agnivade/levenshtein/README.md new file mode 100644 index 00000000..13c52a21 --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/README.md @@ -0,0 +1,80 @@ +levenshtein [![Build Status](https://travis-ci.org/agnivade/levenshtein.svg?branch=master)](https://travis-ci.org/agnivade/levenshtein) [![Go Report Card](https://goreportcard.com/badge/github.com/agnivade/levenshtein)](https://goreportcard.com/report/github.com/agnivade/levenshtein) [![PkgGoDev](https://pkg.go.dev/badge/github.com/agnivade/levenshtein)](https://pkg.go.dev/github.com/agnivade/levenshtein) +=========== + +[Go](http://golang.org) package to calculate the [Levenshtein Distance](http://en.wikipedia.org/wiki/Levenshtein_distance) + +The library is fully capable of working with non-ascii strings. But the strings are not normalized. That is left as a user-dependant use case. Please normalize the strings before passing it to the library if you have such a requirement. +- https://blog.golang.org/normalization + +#### Limitation + +As a performance optimization, the library can handle strings only up to 65536 characters (runes). If you need to handle strings larger than that, please pin to version 1.0.3. + +Install +------- + + go get github.com/agnivade/levenshtein + +Example +------- + +```go +package main + +import ( + "fmt" + "github.com/agnivade/levenshtein" +) + +func main() { + s1 := "kitten" + s2 := "sitting" + distance := levenshtein.ComputeDistance(s1, s2) + fmt.Printf("The distance between %s and %s is %d.\n", s1, s2, distance) + // Output: + // The distance between kitten and sitting is 3. +} + +``` + +Benchmarks +---------- + +``` +name time/op +Simple/ASCII-4 330ns ± 2% +Simple/French-4 617ns ± 2% +Simple/Nordic-4 1.16µs ± 4% +Simple/Tibetan-4 1.05µs ± 1% + +name alloc/op +Simple/ASCII-4 96.0B ± 0% +Simple/French-4 128B ± 0% +Simple/Nordic-4 192B ± 0% +Simple/Tibetan-4 144B ± 0% + +name allocs/op +Simple/ASCII-4 1.00 ± 0% +Simple/French-4 1.00 ± 0% +Simple/Nordic-4 1.00 ± 0% +Simple/Tibetan-4 1.00 ± 0% +``` + +Comparisons with other libraries +-------------------------------- + +``` +name time/op +Leven/ASCII/agniva-4 353ns ± 1% +Leven/ASCII/arbovm-4 485ns ± 1% +Leven/ASCII/dgryski-4 395ns ± 0% +Leven/French/agniva-4 648ns ± 1% +Leven/French/arbovm-4 791ns ± 0% +Leven/French/dgryski-4 682ns ± 0% +Leven/Nordic/agniva-4 1.28µs ± 1% +Leven/Nordic/arbovm-4 1.52µs ± 1% +Leven/Nordic/dgryski-4 1.32µs ± 1% +Leven/Tibetan/agniva-4 1.12µs ± 1% +Leven/Tibetan/arbovm-4 1.31µs ± 0% +Leven/Tibetan/dgryski-4 1.16µs ± 0% +``` diff --git a/vendor/github.com/agnivade/levenshtein/levenshtein.go b/vendor/github.com/agnivade/levenshtein/levenshtein.go new file mode 100644 index 00000000..f727a66f --- /dev/null +++ b/vendor/github.com/agnivade/levenshtein/levenshtein.go @@ -0,0 +1,89 @@ +// Package levenshtein is a Go implementation to calculate Levenshtein Distance. +// +// Implementation taken from +// https://gist.github.com/andrei-m/982927#gistcomment-1931258 +package levenshtein + +import "unicode/utf8" + +// minLengthThreshold is the length of the string beyond which +// an allocation will be made. Strings smaller than this will be +// zero alloc. +const minLengthThreshold = 32 + +// ComputeDistance computes the levenshtein distance between the two +// strings passed as an argument. The return value is the levenshtein distance +// +// Works on runes (Unicode code points) but does not normalize +// the input strings. See https://blog.golang.org/normalization +// and the golang.org/x/text/unicode/norm package. +func ComputeDistance(a, b string) int { + if len(a) == 0 { + return utf8.RuneCountInString(b) + } + + if len(b) == 0 { + return utf8.RuneCountInString(a) + } + + if a == b { + return 0 + } + + // We need to convert to []rune if the strings are non-ASCII. + // This could be avoided by using utf8.RuneCountInString + // and then doing some juggling with rune indices, + // but leads to far more bounds checks. It is a reasonable trade-off. + s1 := []rune(a) + s2 := []rune(b) + + // swap to save some memory O(min(a,b)) instead of O(a) + if len(s1) > len(s2) { + s1, s2 = s2, s1 + } + lenS1 := len(s1) + lenS2 := len(s2) + + // Init the row. + var x []uint16 + if lenS1+1 > minLengthThreshold { + x = make([]uint16, lenS1+1) + } else { + // We make a small optimization here for small strings. + // Because a slice of constant length is effectively an array, + // it does not allocate. So we can re-slice it to the right length + // as long as it is below a desired threshold. + x = make([]uint16, minLengthThreshold) + x = x[:lenS1+1] + } + + // we start from 1 because index 0 is already 0. + for i := 1; i < len(x); i++ { + x[i] = uint16(i) + } + + // make a dummy bounds check to prevent the 2 bounds check down below. + // The one inside the loop is particularly costly. + _ = x[lenS1] + // fill in the rest + for i := 1; i <= lenS2; i++ { + prev := uint16(i) + for j := 1; j <= lenS1; j++ { + current := x[j-1] // match + if s2[i-1] != s1[j-1] { + current = min(min(x[j-1]+1, prev+1), x[j]+1) + } + x[j-1] = prev + prev = current + } + x[lenS1] = prev + } + return int(x[lenS1]) +} + +func min(a, b uint16) uint16 { + if a < b { + return a + } + return b +} diff --git a/vendor/github.com/alexflint/go-arg/.gitignore b/vendor/github.com/alexflint/go-arg/.gitignore new file mode 100644 index 00000000..daf913b1 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/alexflint/go-arg/LICENSE b/vendor/github.com/alexflint/go-arg/LICENSE new file mode 100644 index 00000000..a50c4942 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2015, Alex Flint +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/alexflint/go-arg/README.md b/vendor/github.com/alexflint/go-arg/README.md new file mode 100644 index 00000000..48fa2f07 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/README.md @@ -0,0 +1,525 @@ +

+ go-arg +
+ go-arg +
+

+

Struct-based argument parsing for Go

+

+ Sourcegraph + Documentation + Build Status + Coverage Status + Go Report Card +

+
+ +Declare command line arguments for your program by defining a struct. + +```go +var args struct { + Foo string + Bar bool +} +arg.MustParse(&args) +fmt.Println(args.Foo, args.Bar) +``` + +```shell +$ ./example --foo=hello --bar +hello true +``` + +### Installation + +```shell +go get github.com/alexflint/go-arg +``` + +### Required arguments + +```go +var args struct { + ID int `arg:"required"` + Timeout time.Duration +} +arg.MustParse(&args) +``` + +```shell +$ ./example +Usage: example --id ID [--timeout TIMEOUT] +error: --id is required +``` + +### Positional arguments + +```go +var args struct { + Input string `arg:"positional"` + Output []string `arg:"positional"` +} +arg.MustParse(&args) +fmt.Println("Input:", args.Input) +fmt.Println("Output:", args.Output) +``` + +``` +$ ./example src.txt x.out y.out z.out +Input: src.txt +Output: [x.out y.out z.out] +``` + +### Environment variables + +```go +var args struct { + Workers int `arg:"env"` +} +arg.MustParse(&args) +fmt.Println("Workers:", args.Workers) +``` + +``` +$ WORKERS=4 ./example +Workers: 4 +``` + +``` +$ WORKERS=4 ./example --workers=6 +Workers: 6 +``` + +You can also override the name of the environment variable: + +```go +var args struct { + Workers int `arg:"env:NUM_WORKERS"` +} +arg.MustParse(&args) +fmt.Println("Workers:", args.Workers) +``` + +``` +$ NUM_WORKERS=4 ./example +Workers: 4 +``` + +You can provide multiple values using the CSV (RFC 4180) format: + +```go +var args struct { + Workers []int `arg:"env"` +} +arg.MustParse(&args) +fmt.Println("Workers:", args.Workers) +``` + +``` +$ WORKERS='1,99' ./example +Workers: [1 99] +``` + +### Usage strings +```go +var args struct { + Input string `arg:"positional"` + Output []string `arg:"positional"` + Verbose bool `arg:"-v,--verbose" help:"verbosity level"` + Dataset string `help:"dataset to use"` + Optimize int `arg:"-O" help:"optimization level"` +} +arg.MustParse(&args) +``` + +```shell +$ ./example -h +Usage: [--verbose] [--dataset DATASET] [--optimize OPTIMIZE] [--help] INPUT [OUTPUT [OUTPUT ...]] + +Positional arguments: + INPUT + OUTPUT + +Options: + --verbose, -v verbosity level + --dataset DATASET dataset to use + --optimize OPTIMIZE, -O OPTIMIZE + optimization level + --help, -h print this help message +``` + +### Default values + +```go +var args struct { + Foo string `default:"abc"` + Bar bool +} +arg.MustParse(&args) +``` + +### Default values (before v1.2) + +```go +var args struct { + Foo string + Bar bool +} +arg.Foo = "abc" +arg.MustParse(&args) +``` + +### Arguments with multiple values +```go +var args struct { + Database string + IDs []int64 +} +arg.MustParse(&args) +fmt.Printf("Fetching the following IDs from %s: %q", args.Database, args.IDs) +``` + +```shell +./example -database foo -ids 1 2 3 +Fetching the following IDs from foo: [1 2 3] +``` + +### Arguments that can be specified multiple times, mixed with positionals +```go +var args struct { + Commands []string `arg:"-c,separate"` + Files []string `arg:"-f,separate"` + Databases []string `arg:"positional"` +} +arg.MustParse(&args) +``` + +```shell +./example -c cmd1 db1 -f file1 db2 -c cmd2 -f file2 -f file3 db3 -c cmd3 +Commands: [cmd1 cmd2 cmd3] +Files [file1 file2 file3] +Databases [db1 db2 db3] +``` + +### Arguments with keys and values +```go +var args struct { + UserIDs map[string]int +} +arg.MustParse(&args) +fmt.Println(args.UserIDs) +``` + +```shell +./example --userids john=123 mary=456 +map[john:123 mary:456] +``` + +### Custom validation +```go +var args struct { + Foo string + Bar string +} +p := arg.MustParse(&args) +if args.Foo == "" && args.Bar == "" { + p.Fail("you must provide either --foo or --bar") +} +``` + +```shell +./example +Usage: samples [--foo FOO] [--bar BAR] +error: you must provide either --foo or --bar +``` + +### Version strings + +```go +type args struct { + ... +} + +func (args) Version() string { + return "someprogram 4.3.0" +} + +func main() { + var args args + arg.MustParse(&args) +} +``` + +```shell +$ ./example --version +someprogram 4.3.0 +``` + +### Overriding option names + +```go +var args struct { + Short string `arg:"-s"` + Long string `arg:"--custom-long-option"` + ShortAndLong string `arg:"-x,--my-option"` + OnlyShort string `arg:"-o,--"` +} +arg.MustParse(&args) +``` + +```shell +$ ./example --help +Usage: example [-o ONLYSHORT] [--short SHORT] [--custom-long-option CUSTOM-LONG-OPTION] [--my-option MY-OPTION] + +Options: + --short SHORT, -s SHORT + --custom-long-option CUSTOM-LONG-OPTION + --my-option MY-OPTION, -x MY-OPTION + -o ONLYSHORT + --help, -h display this help and exit +``` + + +### Embedded structs + +The fields of embedded structs are treated just like regular fields: + +```go + +type DatabaseOptions struct { + Host string + Username string + Password string +} + +type LogOptions struct { + LogFile string + Verbose bool +} + +func main() { + var args struct { + DatabaseOptions + LogOptions + } + arg.MustParse(&args) +} +``` + +As usual, any field tagged with `arg:"-"` is ignored. + +### Custom parsing + +Implement `encoding.TextUnmarshaler` to define your own parsing logic. + +```go +// Accepts command line arguments of the form "head.tail" +type NameDotName struct { + Head, Tail string +} + +func (n *NameDotName) UnmarshalText(b []byte) error { + s := string(b) + pos := strings.Index(s, ".") + if pos == -1 { + return fmt.Errorf("missing period in %s", s) + } + n.Head = s[:pos] + n.Tail = s[pos+1:] + return nil +} + +func main() { + var args struct { + Name NameDotName + } + arg.MustParse(&args) + fmt.Printf("%#v\n", args.Name) +} +``` +```shell +$ ./example --name=foo.bar +main.NameDotName{Head:"foo", Tail:"bar"} + +$ ./example --name=oops +Usage: example [--name NAME] +error: error processing --name: missing period in "oops" +``` + +### Custom parsing with default values + +Implement `encoding.TextMarshaler` to define your own default value strings: + +```go +// Accepts command line arguments of the form "head.tail" +type NameDotName struct { + Head, Tail string +} + +func (n *NameDotName) UnmarshalText(b []byte) error { + // same as previous example +} + +// this is only needed if you want to display a default value in the usage string +func (n *NameDotName) MarshalText() ([]byte, error) { + return []byte(fmt.Sprintf("%s.%s", n.Head, n.Tail)), nil +} + +func main() { + var args struct { + Name NameDotName `default:"file.txt"` + } + arg.MustParse(&args) + fmt.Printf("%#v\n", args.Name) +} +``` +```shell +$ ./example --help +Usage: test [--name NAME] + +Options: + --name NAME [default: file.txt] + --help, -h display this help and exit + +$ ./example +main.NameDotName{Head:"file", Tail:"txt"} +``` + +### Custom placeholders + +*Introduced in version 1.3.0* + +Use the `placeholder` tag to control which placeholder text is used in the usage text. + +```go +var args struct { + Input string `arg:"positional" placeholder:"SRC"` + Output []string `arg:"positional" placeholder:"DST"` + Optimize int `arg:"-O" help:"optimization level" placeholder:"LEVEL"` + MaxJobs int `arg:"-j" help:"maximum number of simultaneous jobs" placeholder:"N"` +} +arg.MustParse(&args) +``` +```shell +$ ./example -h +Usage: example [--optimize LEVEL] [--maxjobs N] SRC [DST [DST ...]] + +Positional arguments: + SRC + DST + +Options: + --optimize LEVEL, -O LEVEL + optimization level + --maxjobs N, -j N maximum number of simultaneous jobs + --help, -h display this help and exit +``` + +### Description strings + +```go +type args struct { + Foo string +} + +func (args) Description() string { + return "this program does this and that" +} + +func main() { + var args args + arg.MustParse(&args) +} +``` + +```shell +$ ./example -h +this program does this and that +Usage: example [--foo FOO] + +Options: + --foo FOO + --help, -h display this help and exit +``` + +### Subcommands + +*Introduced in version 1.1.0* + +Subcommands are commonly used in tools that wish to group multiple functions into a single program. An example is the `git` tool: +```shell +$ git checkout [arguments specific to checking out code] +$ git commit [arguments specific to committing] +$ git push [arguments specific to pushing] +``` + +The strings "checkout", "commit", and "push" are different from simple positional arguments because the options available to the user change depending on which subcommand they choose. + +This can be implemented with `go-arg` as follows: + +```go +type CheckoutCmd struct { + Branch string `arg:"positional"` + Track bool `arg:"-t"` +} +type CommitCmd struct { + All bool `arg:"-a"` + Message string `arg:"-m"` +} +type PushCmd struct { + Remote string `arg:"positional"` + Branch string `arg:"positional"` + SetUpstream bool `arg:"-u"` +} +var args struct { + Checkout *CheckoutCmd `arg:"subcommand:checkout"` + Commit *CommitCmd `arg:"subcommand:commit"` + Push *PushCmd `arg:"subcommand:push"` + Quiet bool `arg:"-q"` // this flag is global to all subcommands +} + +arg.MustParse(&args) + +switch { +case args.Checkout != nil: + fmt.Printf("checkout requested for branch %s\n", args.Checkout.Branch) +case args.Commit != nil: + fmt.Printf("commit requested with message \"%s\"\n", args.Commit.Message) +case args.Push != nil: + fmt.Printf("push requested from %s to %s\n", args.Push.Branch, args.Push.Remote) +} +``` + +Some additional rules apply when working with subcommands: +* The `subcommand` tag can only be used with fields that are pointers to structs +* Any struct that contains a subcommand must not contain any positionals + +This package allows to have a program that accepts subcommands, but also does something else +when no subcommands are specified. +If on the other hand you want the program to terminate when no subcommands are specified, +the recommended way is: + +```go +p := arg.MustParse(&args) +if p.Subcommand() == nil { + p.Fail("missing subcommand") +} +``` + +### API Documentation + +https://godoc.org/github.com/alexflint/go-arg + +### Rationale + +There are many command line argument parsing libraries for Go, including one in the standard library, so why build another? + +The `flag` library that ships in the standard library seems awkward to me. Positional arguments must preceed options, so `./prog x --foo=1` does what you expect but `./prog --foo=1 x` does not. It also does not allow arguments to have both long (`--foo`) and short (`-f`) forms. + +Many third-party argument parsing libraries are great for writing sophisticated command line interfaces, but feel to me like overkill for a simple script with a few flags. + +The idea behind `go-arg` is that Go already has an excellent way to describe data structures using structs, so there is no need to develop additional levels of abstraction. Instead of one API to specify which arguments your program accepts, and then another API to get the values of those arguments, `go-arg` replaces both with a single struct. + +### Backward compatibility notes + +Earlier versions of this library required the help text to be part of the `arg` tag. This is still supported but is now deprecated. Instead, you should use a separate `help` tag, described above, which removes most of the limits on the text you can write. In particular, you will need to use the new `help` tag if your help text includes any commas. diff --git a/vendor/github.com/alexflint/go-arg/doc.go b/vendor/github.com/alexflint/go-arg/doc.go new file mode 100644 index 00000000..3b0bafd4 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/doc.go @@ -0,0 +1,39 @@ +// Package arg parses command line arguments using the fields from a struct. +// +// For example, +// +// var args struct { +// Iter int +// Debug bool +// } +// arg.MustParse(&args) +// +// defines two command line arguments, which can be set using any of +// +// ./example --iter=1 --debug // debug is a boolean flag so its value is set to true +// ./example -iter 1 // debug defaults to its zero value (false) +// ./example --debug=true // iter defaults to its zero value (zero) +// +// The fastest way to see how to use go-arg is to read the examples below. +// +// Fields can be bool, string, any float type, or any signed or unsigned integer type. +// They can also be slices of any of the above, or slices of pointers to any of the above. +// +// Tags can be specified using the `arg` and `help` tag names: +// +// var args struct { +// Input string `arg:"positional"` +// Log string `arg:"positional,required"` +// Debug bool `arg:"-d" help:"turn on debug mode"` +// RealMode bool `arg:"--real" +// Wr io.Writer `arg:"-"` +// } +// +// Any tag string that starts with a single hyphen is the short form for an argument +// (e.g. `./example -d`), and any tag string that starts with two hyphens is the long +// form for the argument (instead of the field name). +// +// Other valid tag strings are `positional` and `required`. +// +// Fields can be excluded from processing with `arg:"-"`. +package arg diff --git a/vendor/github.com/alexflint/go-arg/parse.go b/vendor/github.com/alexflint/go-arg/parse.go new file mode 100644 index 00000000..d76ef0fb --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/parse.go @@ -0,0 +1,725 @@ +package arg + +import ( + "encoding" + "encoding/csv" + "errors" + "fmt" + "os" + "path/filepath" + "reflect" + "strings" + + scalar "github.com/alexflint/go-scalar" +) + +// path represents a sequence of steps to find the output location for an +// argument or subcommand in the final destination struct +type path struct { + root int // index of the destination struct + fields []reflect.StructField // sequence of struct fields to traverse +} + +// String gets a string representation of the given path +func (p path) String() string { + s := "args" + for _, f := range p.fields { + s += "." + f.Name + } + return s +} + +// Child gets a new path representing a child of this path. +func (p path) Child(f reflect.StructField) path { + // copy the entire slice of fields to avoid possible slice overwrite + subfields := make([]reflect.StructField, len(p.fields)+1) + copy(subfields, p.fields) + subfields[len(subfields)-1] = f + return path{ + root: p.root, + fields: subfields, + } +} + +// spec represents a command line option +type spec struct { + dest path + field reflect.StructField // the struct field from which this option was created + long string // the --long form for this option, or empty if none + short string // the -s short form for this option, or empty if none + cardinality cardinality // determines how many tokens will be present (possible values: zero, one, multiple) + required bool // if true, this option must be present on the command line + positional bool // if true, this option will be looked for in the positional flags + separate bool // if true, each slice and map entry will have its own --flag + help string // the help text for this option + env string // the name of the environment variable for this option, or empty for none + defaultVal string // default value for this option + placeholder string // name of the data in help +} + +// command represents a named subcommand, or the top-level command +type command struct { + name string + help string + dest path + specs []*spec + subcommands []*command + parent *command +} + +// ErrHelp indicates that -h or --help were provided +var ErrHelp = errors.New("help requested by user") + +// ErrVersion indicates that --version was provided +var ErrVersion = errors.New("version requested by user") + +// MustParse processes command line arguments and exits upon failure +func MustParse(dest ...interface{}) *Parser { + p, err := NewParser(Config{}, dest...) + if err != nil { + fmt.Fprintln(stdout, err) + osExit(-1) + return nil // just in case osExit was monkey-patched + } + + err = p.Parse(flags()) + switch { + case err == ErrHelp: + p.writeHelpForCommand(stdout, p.lastCmd) + osExit(0) + case err == ErrVersion: + fmt.Fprintln(stdout, p.version) + osExit(0) + case err != nil: + p.failWithCommand(err.Error(), p.lastCmd) + } + + return p +} + +// Parse processes command line arguments and stores them in dest +func Parse(dest ...interface{}) error { + p, err := NewParser(Config{}, dest...) + if err != nil { + return err + } + return p.Parse(flags()) +} + +// flags gets all command line arguments other than the first (program name) +func flags() []string { + if len(os.Args) == 0 { // os.Args could be empty + return nil + } + return os.Args[1:] +} + +// Config represents configuration options for an argument parser +type Config struct { + // Program is the name of the program used in the help text + Program string + + // IgnoreEnv instructs the library not to read environment variables + IgnoreEnv bool +} + +// Parser represents a set of command line options with destination values +type Parser struct { + cmd *command + roots []reflect.Value + config Config + version string + description string + + // the following field changes during processing of command line arguments + lastCmd *command +} + +// Versioned is the interface that the destination struct should implement to +// make a version string appear at the top of the help message. +type Versioned interface { + // Version returns the version string that will be printed on a line by itself + // at the top of the help message. + Version() string +} + +// Described is the interface that the destination struct should implement to +// make a description string appear at the top of the help message. +type Described interface { + // Description returns the string that will be printed on a line by itself + // at the top of the help message. + Description() string +} + +// walkFields calls a function for each field of a struct, recursively expanding struct fields. +func walkFields(t reflect.Type, visit func(field reflect.StructField, owner reflect.Type) bool) { + walkFieldsImpl(t, visit, nil) +} + +func walkFieldsImpl(t reflect.Type, visit func(field reflect.StructField, owner reflect.Type) bool, path []int) { + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + field.Index = make([]int, len(path)+1) + copy(field.Index, append(path, i)) + expand := visit(field, t) + if expand && field.Type.Kind() == reflect.Struct { + var subpath []int + if field.Anonymous { + subpath = append(path, i) + } + walkFieldsImpl(field.Type, visit, subpath) + } + } +} + +// NewParser constructs a parser from a list of destination structs +func NewParser(config Config, dests ...interface{}) (*Parser, error) { + // first pick a name for the command for use in the usage text + var name string + switch { + case config.Program != "": + name = config.Program + case len(os.Args) > 0: + name = filepath.Base(os.Args[0]) + default: + name = "program" + } + + // construct a parser + p := Parser{ + cmd: &command{name: name}, + config: config, + } + + // make a list of roots + for _, dest := range dests { + p.roots = append(p.roots, reflect.ValueOf(dest)) + } + + // process each of the destination values + for i, dest := range dests { + t := reflect.TypeOf(dest) + if t.Kind() != reflect.Ptr { + panic(fmt.Sprintf("%s is not a pointer (did you forget an ampersand?)", t)) + } + + cmd, err := cmdFromStruct(name, path{root: i}, t) + if err != nil { + return nil, err + } + + // add nonzero field values as defaults + for _, spec := range cmd.specs { + if v := p.val(spec.dest); v.IsValid() && !isZero(v) { + if defaultVal, ok := v.Interface().(encoding.TextMarshaler); ok { + str, err := defaultVal.MarshalText() + if err != nil { + return nil, fmt.Errorf("%v: error marshaling default value to string: %v", spec.dest, err) + } + spec.defaultVal = string(str) + } else { + spec.defaultVal = fmt.Sprintf("%v", v) + } + } + } + + p.cmd.specs = append(p.cmd.specs, cmd.specs...) + p.cmd.subcommands = append(p.cmd.subcommands, cmd.subcommands...) + + if dest, ok := dest.(Versioned); ok { + p.version = dest.Version() + } + if dest, ok := dest.(Described); ok { + p.description = dest.Description() + } + } + + return &p, nil +} + +func cmdFromStruct(name string, dest path, t reflect.Type) (*command, error) { + // commands can only be created from pointers to structs + if t.Kind() != reflect.Ptr { + return nil, fmt.Errorf("subcommands must be pointers to structs but %s is a %s", + dest, t.Kind()) + } + + t = t.Elem() + if t.Kind() != reflect.Struct { + return nil, fmt.Errorf("subcommands must be pointers to structs but %s is a pointer to %s", + dest, t.Kind()) + } + + cmd := command{ + name: name, + dest: dest, + } + + var errs []string + walkFields(t, func(field reflect.StructField, t reflect.Type) bool { + // check for the ignore switch in the tag + tag := field.Tag.Get("arg") + if tag == "-" { + return false + } + + // if this is an embedded struct then recurse into its fields, even if + // it is unexported, because exported fields on unexported embedded + // structs are still writable + if field.Anonymous && field.Type.Kind() == reflect.Struct { + return true + } + + // ignore any other unexported field + if !isExported(field.Name) { + return false + } + + // duplicate the entire path to avoid slice overwrites + subdest := dest.Child(field) + spec := spec{ + dest: subdest, + field: field, + long: strings.ToLower(field.Name), + } + + help, exists := field.Tag.Lookup("help") + if exists { + spec.help = help + } + + defaultVal, hasDefault := field.Tag.Lookup("default") + if hasDefault { + spec.defaultVal = defaultVal + } + + // Look at the tag + var isSubcommand bool // tracks whether this field is a subcommand + for _, key := range strings.Split(tag, ",") { + if key == "" { + continue + } + key = strings.TrimLeft(key, " ") + var value string + if pos := strings.Index(key, ":"); pos != -1 { + value = key[pos+1:] + key = key[:pos] + } + + switch { + case strings.HasPrefix(key, "---"): + errs = append(errs, fmt.Sprintf("%s.%s: too many hyphens", t.Name(), field.Name)) + case strings.HasPrefix(key, "--"): + spec.long = key[2:] + case strings.HasPrefix(key, "-"): + if len(key) != 2 { + errs = append(errs, fmt.Sprintf("%s.%s: short arguments must be one character only", + t.Name(), field.Name)) + return false + } + spec.short = key[1:] + case key == "required": + if hasDefault { + errs = append(errs, fmt.Sprintf("%s.%s: 'required' cannot be used when a default value is specified", + t.Name(), field.Name)) + return false + } + spec.required = true + case key == "positional": + spec.positional = true + case key == "separate": + spec.separate = true + case key == "help": // deprecated + spec.help = value + case key == "env": + // Use override name if provided + if value != "" { + spec.env = value + } else { + spec.env = strings.ToUpper(field.Name) + } + case key == "subcommand": + // decide on a name for the subcommand + cmdname := value + if cmdname == "" { + cmdname = strings.ToLower(field.Name) + } + + // parse the subcommand recursively + subcmd, err := cmdFromStruct(cmdname, subdest, field.Type) + if err != nil { + errs = append(errs, err.Error()) + return false + } + + subcmd.parent = &cmd + subcmd.help = field.Tag.Get("help") + + cmd.subcommands = append(cmd.subcommands, subcmd) + isSubcommand = true + default: + errs = append(errs, fmt.Sprintf("unrecognized tag '%s' on field %s", key, tag)) + return false + } + } + + placeholder, hasPlaceholder := field.Tag.Lookup("placeholder") + if hasPlaceholder { + spec.placeholder = placeholder + } else if spec.long != "" { + spec.placeholder = strings.ToUpper(spec.long) + } else { + spec.placeholder = strings.ToUpper(spec.field.Name) + } + + // Check whether this field is supported. It's good to do this here rather than + // wait until ParseValue because it means that a program with invalid argument + // fields will always fail regardless of whether the arguments it received + // exercised those fields. + if !isSubcommand { + cmd.specs = append(cmd.specs, &spec) + + var err error + spec.cardinality, err = cardinalityOf(field.Type) + if err != nil { + errs = append(errs, fmt.Sprintf("%s.%s: %s fields are not supported", + t.Name(), field.Name, field.Type.String())) + return false + } + if spec.cardinality == multiple && hasDefault { + errs = append(errs, fmt.Sprintf("%s.%s: default values are not supported for slice or map fields", + t.Name(), field.Name)) + return false + } + } + + // if this was an embedded field then we already returned true up above + return false + }) + + if len(errs) > 0 { + return nil, errors.New(strings.Join(errs, "\n")) + } + + // check that we don't have both positionals and subcommands + var hasPositional bool + for _, spec := range cmd.specs { + if spec.positional { + hasPositional = true + } + } + if hasPositional && len(cmd.subcommands) > 0 { + return nil, fmt.Errorf("%s cannot have both subcommands and positional arguments", dest) + } + + return &cmd, nil +} + +// Parse processes the given command line option, storing the results in the field +// of the structs from which NewParser was constructed +func (p *Parser) Parse(args []string) error { + err := p.process(args) + if err != nil { + // If -h or --help were specified then make sure help text supercedes other errors + for _, arg := range args { + if arg == "-h" || arg == "--help" { + return ErrHelp + } + if arg == "--" { + break + } + } + } + return err +} + +// process environment vars for the given arguments +func (p *Parser) captureEnvVars(specs []*spec, wasPresent map[*spec]bool) error { + for _, spec := range specs { + if spec.env == "" { + continue + } + + value, found := os.LookupEnv(spec.env) + if !found { + continue + } + + if spec.cardinality == multiple { + // expect a CSV string in an environment + // variable in the case of multiple values + var values []string + var err error + if len(strings.TrimSpace(value)) > 0 { + values, err = csv.NewReader(strings.NewReader(value)).Read() + if err != nil { + return fmt.Errorf( + "error reading a CSV string from environment variable %s with multiple values: %v", + spec.env, + err, + ) + } + } + if err = setSliceOrMap(p.val(spec.dest), values, !spec.separate); err != nil { + return fmt.Errorf( + "error processing environment variable %s with multiple values: %v", + spec.env, + err, + ) + } + } else { + if err := scalar.ParseValue(p.val(spec.dest), value); err != nil { + return fmt.Errorf("error processing environment variable %s: %v", spec.env, err) + } + } + wasPresent[spec] = true + } + + return nil +} + +// process goes through arguments one-by-one, parses them, and assigns the result to +// the underlying struct field +func (p *Parser) process(args []string) error { + // track the options we have seen + wasPresent := make(map[*spec]bool) + + // union of specs for the chain of subcommands encountered so far + curCmd := p.cmd + p.lastCmd = curCmd + + // make a copy of the specs because we will add to this list each time we expand a subcommand + specs := make([]*spec, len(curCmd.specs)) + copy(specs, curCmd.specs) + + // deal with environment vars + if !p.config.IgnoreEnv { + err := p.captureEnvVars(specs, wasPresent) + if err != nil { + return err + } + } + + // process each string from the command line + var allpositional bool + var positionals []string + + // must use explicit for loop, not range, because we manipulate i inside the loop + for i := 0; i < len(args); i++ { + arg := args[i] + if arg == "--" { + allpositional = true + continue + } + + if !isFlag(arg) || allpositional { + // each subcommand can have either subcommands or positionals, but not both + if len(curCmd.subcommands) == 0 { + positionals = append(positionals, arg) + continue + } + + // if we have a subcommand then make sure it is valid for the current context + subcmd := findSubcommand(curCmd.subcommands, arg) + if subcmd == nil { + return fmt.Errorf("invalid subcommand: %s", arg) + } + + // instantiate the field to point to a new struct + v := p.val(subcmd.dest) + v.Set(reflect.New(v.Type().Elem())) // we already checked that all subcommands are struct pointers + + // add the new options to the set of allowed options + specs = append(specs, subcmd.specs...) + + // capture environment vars for these new options + if !p.config.IgnoreEnv { + err := p.captureEnvVars(subcmd.specs, wasPresent) + if err != nil { + return err + } + } + + curCmd = subcmd + p.lastCmd = curCmd + continue + } + + // check for special --help and --version flags + switch arg { + case "-h", "--help": + return ErrHelp + case "--version": + return ErrVersion + } + + // check for an equals sign, as in "--foo=bar" + var value string + opt := strings.TrimLeft(arg, "-") + if pos := strings.Index(opt, "="); pos != -1 { + value = opt[pos+1:] + opt = opt[:pos] + } + + // lookup the spec for this option (note that the "specs" slice changes as + // we expand subcommands so it is better not to use a map) + spec := findOption(specs, opt) + if spec == nil { + return fmt.Errorf("unknown argument %s", arg) + } + wasPresent[spec] = true + + // deal with the case of multiple values + if spec.cardinality == multiple { + var values []string + if value == "" { + for i+1 < len(args) && !isFlag(args[i+1]) && args[i+1] != "--" { + values = append(values, args[i+1]) + i++ + if spec.separate { + break + } + } + } else { + values = append(values, value) + } + err := setSliceOrMap(p.val(spec.dest), values, !spec.separate) + if err != nil { + return fmt.Errorf("error processing %s: %v", arg, err) + } + continue + } + + // if it's a flag and it has no value then set the value to true + // use boolean because this takes account of TextUnmarshaler + if spec.cardinality == zero && value == "" { + value = "true" + } + + // if we have something like "--foo" then the value is the next argument + if value == "" { + if i+1 == len(args) { + return fmt.Errorf("missing value for %s", arg) + } + if !nextIsNumeric(spec.field.Type, args[i+1]) && isFlag(args[i+1]) { + return fmt.Errorf("missing value for %s", arg) + } + value = args[i+1] + i++ + } + + err := scalar.ParseValue(p.val(spec.dest), value) + if err != nil { + return fmt.Errorf("error processing %s: %v", arg, err) + } + } + + // process positionals + for _, spec := range specs { + if !spec.positional { + continue + } + if len(positionals) == 0 { + break + } + wasPresent[spec] = true + if spec.cardinality == multiple { + err := setSliceOrMap(p.val(spec.dest), positionals, true) + if err != nil { + return fmt.Errorf("error processing %s: %v", spec.field.Name, err) + } + positionals = nil + } else { + err := scalar.ParseValue(p.val(spec.dest), positionals[0]) + if err != nil { + return fmt.Errorf("error processing %s: %v", spec.field.Name, err) + } + positionals = positionals[1:] + } + } + if len(positionals) > 0 { + return fmt.Errorf("too many positional arguments at '%s'", positionals[0]) + } + + // fill in defaults and check that all the required args were provided + for _, spec := range specs { + if wasPresent[spec] { + continue + } + + name := strings.ToLower(spec.field.Name) + if spec.long != "" && !spec.positional { + name = "--" + spec.long + } + + if spec.required { + return fmt.Errorf("%s is required", name) + } + if spec.defaultVal != "" { + err := scalar.ParseValue(p.val(spec.dest), spec.defaultVal) + if err != nil { + return fmt.Errorf("error processing default value for %s: %v", name, err) + } + } + } + + return nil +} + +func nextIsNumeric(t reflect.Type, s string) bool { + switch t.Kind() { + case reflect.Ptr: + return nextIsNumeric(t.Elem(), s) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Float32, reflect.Float64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + v := reflect.New(t) + err := scalar.ParseValue(v, s) + return err == nil + default: + return false + } +} + +// isFlag returns true if a token is a flag such as "-v" or "--user" but not "-" or "--" +func isFlag(s string) bool { + return strings.HasPrefix(s, "-") && strings.TrimLeft(s, "-") != "" +} + +// val returns a reflect.Value corresponding to the current value for the +// given path +func (p *Parser) val(dest path) reflect.Value { + v := p.roots[dest.root] + for _, field := range dest.fields { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return reflect.Value{} + } + v = v.Elem() + } + + v = v.FieldByIndex(field.Index) + } + return v +} + +// findOption finds an option from its name, or returns null if no spec is found +func findOption(specs []*spec, name string) *spec { + for _, spec := range specs { + if spec.positional { + continue + } + if spec.long == name || spec.short == name { + return spec + } + } + return nil +} + +// findSubcommand finds a subcommand using its name, or returns null if no subcommand is found +func findSubcommand(cmds []*command, name string) *command { + for _, cmd := range cmds { + if cmd.name == name { + return cmd + } + } + return nil +} diff --git a/vendor/github.com/alexflint/go-arg/reflect.go b/vendor/github.com/alexflint/go-arg/reflect.go new file mode 100644 index 00000000..cd80be7c --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/reflect.go @@ -0,0 +1,107 @@ +package arg + +import ( + "encoding" + "fmt" + "reflect" + "unicode" + "unicode/utf8" + + scalar "github.com/alexflint/go-scalar" +) + +var textUnmarshalerType = reflect.TypeOf([]encoding.TextUnmarshaler{}).Elem() + +// cardinality tracks how many tokens are expected for a given spec +// - zero is a boolean, which does to expect any value +// - one is an ordinary option that will be parsed from a single token +// - multiple is a slice or map that can accept zero or more tokens +type cardinality int + +const ( + zero cardinality = iota + one + multiple + unsupported +) + +func (k cardinality) String() string { + switch k { + case zero: + return "zero" + case one: + return "one" + case multiple: + return "multiple" + case unsupported: + return "unsupported" + default: + return fmt.Sprintf("unknown(%d)", int(k)) + } +} + +// cardinalityOf returns true if the type can be parsed from a string +func cardinalityOf(t reflect.Type) (cardinality, error) { + if scalar.CanParse(t) { + if isBoolean(t) { + return zero, nil + } + return one, nil + } + + // look inside pointer types + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + + // look inside slice and map types + switch t.Kind() { + case reflect.Slice: + if !scalar.CanParse(t.Elem()) { + return unsupported, fmt.Errorf("cannot parse into %v because %v not supported", t, t.Elem()) + } + return multiple, nil + case reflect.Map: + if !scalar.CanParse(t.Key()) { + return unsupported, fmt.Errorf("cannot parse into %v because key type %v not supported", t, t.Elem()) + } + if !scalar.CanParse(t.Elem()) { + return unsupported, fmt.Errorf("cannot parse into %v because value type %v not supported", t, t.Elem()) + } + return multiple, nil + default: + return unsupported, fmt.Errorf("cannot parse into %v", t) + } +} + +// isBoolean returns true if the type can be parsed from a single string +func isBoolean(t reflect.Type) bool { + switch { + case t.Implements(textUnmarshalerType): + return false + case t.Kind() == reflect.Bool: + return true + case t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Bool: + return true + default: + return false + } +} + +// isExported returns true if the struct field name is exported +func isExported(field string) bool { + r, _ := utf8.DecodeRuneInString(field) // returns RuneError for empty string or invalid UTF8 + return unicode.IsLetter(r) && unicode.IsUpper(r) +} + +// isZero returns true if v contains the zero value for its type +func isZero(v reflect.Value) bool { + t := v.Type() + if t.Kind() == reflect.Slice || t.Kind() == reflect.Map { + return v.IsNil() + } + if !t.Comparable() { + return false + } + return v.Interface() == reflect.Zero(t).Interface() +} diff --git a/vendor/github.com/alexflint/go-arg/sequence.go b/vendor/github.com/alexflint/go-arg/sequence.go new file mode 100644 index 00000000..35a3614e --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/sequence.go @@ -0,0 +1,123 @@ +package arg + +import ( + "fmt" + "reflect" + "strings" + + scalar "github.com/alexflint/go-scalar" +) + +// setSliceOrMap parses a sequence of strings into a slice or map. If clear is +// true then any values already in the slice or map are first removed. +func setSliceOrMap(dest reflect.Value, values []string, clear bool) error { + if !dest.CanSet() { + return fmt.Errorf("field is not writable") + } + + t := dest.Type() + if t.Kind() == reflect.Ptr { + dest = dest.Elem() + t = t.Elem() + } + + switch t.Kind() { + case reflect.Slice: + return setSlice(dest, values, clear) + case reflect.Map: + return setMap(dest, values, clear) + default: + return fmt.Errorf("setSliceOrMap cannot insert values into a %v", t) + } +} + +// setSlice parses a sequence of strings and inserts them into a slice. If clear +// is true then any values already in the slice are removed. +func setSlice(dest reflect.Value, values []string, clear bool) error { + var ptr bool + elem := dest.Type().Elem() + if elem.Kind() == reflect.Ptr && !elem.Implements(textUnmarshalerType) { + ptr = true + elem = elem.Elem() + } + + // clear the slice in case default values exist + if clear && !dest.IsNil() { + dest.SetLen(0) + } + + // parse the values one-by-one + for _, s := range values { + v := reflect.New(elem) + if err := scalar.ParseValue(v.Elem(), s); err != nil { + return err + } + if !ptr { + v = v.Elem() + } + dest.Set(reflect.Append(dest, v)) + } + return nil +} + +// setMap parses a sequence of name=value strings and inserts them into a map. +// If clear is true then any values already in the map are removed. +func setMap(dest reflect.Value, values []string, clear bool) error { + // determine the key and value type + var keyIsPtr bool + keyType := dest.Type().Key() + if keyType.Kind() == reflect.Ptr && !keyType.Implements(textUnmarshalerType) { + keyIsPtr = true + keyType = keyType.Elem() + } + + var valIsPtr bool + valType := dest.Type().Elem() + if valType.Kind() == reflect.Ptr && !valType.Implements(textUnmarshalerType) { + valIsPtr = true + valType = valType.Elem() + } + + // clear the slice in case default values exist + if clear && !dest.IsNil() { + for _, k := range dest.MapKeys() { + dest.SetMapIndex(k, reflect.Value{}) + } + } + + // allocate the map if it is not allocated + if dest.IsNil() { + dest.Set(reflect.MakeMap(dest.Type())) + } + + // parse the values one-by-one + for _, s := range values { + // split at the first equals sign + pos := strings.Index(s, "=") + if pos == -1 { + return fmt.Errorf("cannot parse %q into a map, expected format key=value", s) + } + + // parse the key + k := reflect.New(keyType) + if err := scalar.ParseValue(k.Elem(), s[:pos]); err != nil { + return err + } + if !keyIsPtr { + k = k.Elem() + } + + // parse the value + v := reflect.New(valType) + if err := scalar.ParseValue(v.Elem(), s[pos+1:]); err != nil { + return err + } + if !valIsPtr { + v = v.Elem() + } + + // add it to the map + dest.SetMapIndex(k, v) + } + return nil +} diff --git a/vendor/github.com/alexflint/go-arg/subcommand.go b/vendor/github.com/alexflint/go-arg/subcommand.go new file mode 100644 index 00000000..dff732c0 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/subcommand.go @@ -0,0 +1,37 @@ +package arg + +// Subcommand returns the user struct for the subcommand selected by +// the command line arguments most recently processed by the parser. +// The return value is always a pointer to a struct. If no subcommand +// was specified then it returns the top-level arguments struct. If +// no command line arguments have been processed by this parser then it +// returns nil. +func (p *Parser) Subcommand() interface{} { + if p.lastCmd == nil || p.lastCmd.parent == nil { + return nil + } + return p.val(p.lastCmd.dest).Interface() +} + +// SubcommandNames returns the sequence of subcommands specified by the +// user. If no subcommands were given then it returns an empty slice. +func (p *Parser) SubcommandNames() []string { + if p.lastCmd == nil { + return nil + } + + // make a list of ancestor commands + var ancestors []string + cur := p.lastCmd + for cur.parent != nil { // we want to exclude the root + ancestors = append(ancestors, cur.name) + cur = cur.parent + } + + // reverse the list + out := make([]string, len(ancestors)) + for i := 0; i < len(ancestors); i++ { + out[i] = ancestors[len(ancestors)-i-1] + } + return out +} diff --git a/vendor/github.com/alexflint/go-arg/usage.go b/vendor/github.com/alexflint/go-arg/usage.go new file mode 100644 index 00000000..c121c453 --- /dev/null +++ b/vendor/github.com/alexflint/go-arg/usage.go @@ -0,0 +1,260 @@ +package arg + +import ( + "fmt" + "io" + "os" + "strings" +) + +// the width of the left column +const colWidth = 25 + +// to allow monkey patching in tests +var ( + stdout io.Writer = os.Stdout + stderr io.Writer = os.Stderr + osExit = os.Exit +) + +// Fail prints usage information to stderr and exits with non-zero status +func (p *Parser) Fail(msg string) { + p.failWithCommand(msg, p.cmd) +} + +// failWithCommand prints usage information for the given subcommand to stderr and exits with non-zero status +func (p *Parser) failWithCommand(msg string, cmd *command) { + p.writeUsageForCommand(stderr, cmd) + fmt.Fprintln(stderr, "error:", msg) + osExit(-1) +} + +// WriteUsage writes usage information to the given writer +func (p *Parser) WriteUsage(w io.Writer) { + cmd := p.cmd + if p.lastCmd != nil { + cmd = p.lastCmd + } + p.writeUsageForCommand(w, cmd) +} + +// writeUsageForCommand writes usage information for the given subcommand +func (p *Parser) writeUsageForCommand(w io.Writer, cmd *command) { + var positionals, longOptions, shortOptions []*spec + for _, spec := range cmd.specs { + switch { + case spec.positional: + positionals = append(positionals, spec) + case spec.long != "": + longOptions = append(longOptions, spec) + case spec.short != "": + shortOptions = append(shortOptions, spec) + } + } + + if p.version != "" { + fmt.Fprintln(w, p.version) + } + + // make a list of ancestor commands so that we print with full context + var ancestors []string + ancestor := cmd + for ancestor != nil { + ancestors = append(ancestors, ancestor.name) + ancestor = ancestor.parent + } + + // print the beginning of the usage string + fmt.Fprint(w, "Usage:") + for i := len(ancestors) - 1; i >= 0; i-- { + fmt.Fprint(w, " "+ancestors[i]) + } + + // write the option component of the usage message + for _, spec := range shortOptions { + // prefix with a space + fmt.Fprint(w, " ") + if !spec.required { + fmt.Fprint(w, "[") + } + fmt.Fprint(w, synopsis(spec, "-"+spec.short)) + if !spec.required { + fmt.Fprint(w, "]") + } + } + + for _, spec := range longOptions { + // prefix with a space + fmt.Fprint(w, " ") + if !spec.required { + fmt.Fprint(w, "[") + } + fmt.Fprint(w, synopsis(spec, "--"+spec.long)) + if !spec.required { + fmt.Fprint(w, "]") + } + } + + // write the positional component of the usage message + for _, spec := range positionals { + // prefix with a space + fmt.Fprint(w, " ") + if spec.cardinality == multiple { + if !spec.required { + fmt.Fprint(w, "[") + } + fmt.Fprintf(w, "%s [%s ...]", spec.placeholder, spec.placeholder) + if !spec.required { + fmt.Fprint(w, "]") + } + } else { + fmt.Fprint(w, spec.placeholder) + } + } + + // if the program supports subcommands, give a hint to the user about their existence + if len(cmd.subcommands) > 0 { + fmt.Fprint(w, " []") + } + + fmt.Fprint(w, "\n") +} + +func printTwoCols(w io.Writer, left, help string, defaultVal string, envVal string) { + lhs := " " + left + fmt.Fprint(w, lhs) + if help != "" { + if len(lhs)+2 < colWidth { + fmt.Fprint(w, strings.Repeat(" ", colWidth-len(lhs))) + } else { + fmt.Fprint(w, "\n"+strings.Repeat(" ", colWidth)) + } + fmt.Fprint(w, help) + } + + bracketsContent := []string{} + + if defaultVal != "" { + bracketsContent = append(bracketsContent, + fmt.Sprintf("default: %s", defaultVal), + ) + } + + if envVal != "" { + bracketsContent = append(bracketsContent, + fmt.Sprintf("env: %s", envVal), + ) + } + + if len(bracketsContent) > 0 { + fmt.Fprintf(w, " [%s]", strings.Join(bracketsContent, ", ")) + } + fmt.Fprint(w, "\n") +} + +// WriteHelp writes the usage string followed by the full help string for each option +func (p *Parser) WriteHelp(w io.Writer) { + cmd := p.cmd + if p.lastCmd != nil { + cmd = p.lastCmd + } + p.writeHelpForCommand(w, cmd) +} + +// writeHelp writes the usage string for the given subcommand +func (p *Parser) writeHelpForCommand(w io.Writer, cmd *command) { + var positionals, longOptions, shortOptions []*spec + for _, spec := range cmd.specs { + switch { + case spec.positional: + positionals = append(positionals, spec) + case spec.long != "": + longOptions = append(longOptions, spec) + case spec.short != "": + shortOptions = append(shortOptions, spec) + } + } + + if p.description != "" { + fmt.Fprintln(w, p.description) + } + p.writeUsageForCommand(w, cmd) + + // write the list of positionals + if len(positionals) > 0 { + fmt.Fprint(w, "\nPositional arguments:\n") + for _, spec := range positionals { + printTwoCols(w, spec.placeholder, spec.help, "", "") + } + } + + // write the list of options with the short-only ones first to match the usage string + if len(shortOptions)+len(longOptions) > 0 || cmd.parent == nil { + fmt.Fprint(w, "\nOptions:\n") + for _, spec := range shortOptions { + p.printOption(w, spec) + } + for _, spec := range longOptions { + p.printOption(w, spec) + } + } + + // obtain a flattened list of options from all ancestors + var globals []*spec + ancestor := cmd.parent + for ancestor != nil { + globals = append(globals, ancestor.specs...) + ancestor = ancestor.parent + } + + // write the list of global options + if len(globals) > 0 { + fmt.Fprint(w, "\nGlobal options:\n") + for _, spec := range globals { + p.printOption(w, spec) + } + } + + // write the list of built in options + p.printOption(w, &spec{ + cardinality: zero, + long: "help", + short: "h", + help: "display this help and exit", + }) + if p.version != "" { + p.printOption(w, &spec{ + cardinality: zero, + long: "version", + help: "display version and exit", + }) + } + + // write the list of subcommands + if len(cmd.subcommands) > 0 { + fmt.Fprint(w, "\nCommands:\n") + for _, subcmd := range cmd.subcommands { + printTwoCols(w, subcmd.name, subcmd.help, "", "") + } + } +} + +func (p *Parser) printOption(w io.Writer, spec *spec) { + ways := make([]string, 0, 2) + if spec.long != "" { + ways = append(ways, synopsis(spec, "--"+spec.long)) + } + if spec.short != "" { + ways = append(ways, synopsis(spec, "-"+spec.short)) + } + if len(ways) > 0 { + printTwoCols(w, strings.Join(ways, ", "), spec.help, spec.defaultVal, spec.env) + } +} + +func synopsis(spec *spec, form string) string { + if spec.cardinality == zero { + return form + } + return form + " " + spec.placeholder +} diff --git a/vendor/github.com/alexflint/go-scalar/.gitignore b/vendor/github.com/alexflint/go-scalar/.gitignore new file mode 100644 index 00000000..daf913b1 --- /dev/null +++ b/vendor/github.com/alexflint/go-scalar/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/alexflint/go-scalar/.travis.yml b/vendor/github.com/alexflint/go-scalar/.travis.yml new file mode 100644 index 00000000..d4e2c014 --- /dev/null +++ b/vendor/github.com/alexflint/go-scalar/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - tip +before_install: + - go get github.com/axw/gocov/gocov + - go get github.com/mattn/goveralls + - if ! go get github.com/golang/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi +script: + - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/alexflint/go-scalar/LICENSE b/vendor/github.com/alexflint/go-scalar/LICENSE new file mode 100644 index 00000000..a50c4942 --- /dev/null +++ b/vendor/github.com/alexflint/go-scalar/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2015, Alex Flint +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/alexflint/go-scalar/README.md b/vendor/github.com/alexflint/go-scalar/README.md new file mode 100644 index 00000000..e6f510e8 --- /dev/null +++ b/vendor/github.com/alexflint/go-scalar/README.md @@ -0,0 +1,28 @@ +[![GoDoc](https://godoc.org/github.com/alexflint/go-scalar?status.svg)](https://godoc.org/github.com/alexflint/go-scalar) +[![Build Status](https://travis-ci.org/alexflint/go-scalar.svg?branch=master)](https://travis-ci.org/alexflint/go-scalar) +[![Coverage Status](https://coveralls.io/repos/alexflint/go-scalar/badge.svg?branch=master&service=github)](https://coveralls.io/github/alexflint/go-scalar?branch=master) +[![Report Card](https://goreportcard.com/badge/github.com/alexflint/go-scalar)](https://goreportcard.com/badge/github.com/alexflint/go-scalar) + +## Scalar parsing library + +Scalar is a library for parsing strings into arbitrary scalars (integers, +floats, strings, booleans, etc). It is helpful for tasks such as parsing +strings passed as environment variables or command line arguments. + +```shell +go get github.com/alexflint/go-scalar +``` + +The main API works as follows: + +```go +var value int +err := scalar.Parse(&value, "123") +``` + +There is also a variant that takes a `reflect.Value`: + +```go +var value int +err := scalar.ParseValue(reflect.ValueOf(&value), "123") +``` diff --git a/vendor/github.com/alexflint/go-scalar/scalar.go b/vendor/github.com/alexflint/go-scalar/scalar.go new file mode 100644 index 00000000..073392cd --- /dev/null +++ b/vendor/github.com/alexflint/go-scalar/scalar.go @@ -0,0 +1,153 @@ +// Package scalar parses strings into values of scalar type. + +package scalar + +import ( + "encoding" + "errors" + "fmt" + "net" + "net/mail" + "reflect" + "strconv" + "time" +) + +// The reflected form of some special types +var ( + textUnmarshalerType = reflect.TypeOf([]encoding.TextUnmarshaler{}).Elem() + durationType = reflect.TypeOf(time.Duration(0)) + mailAddressType = reflect.TypeOf(mail.Address{}) + macType = reflect.TypeOf(net.HardwareAddr{}) +) + +var ( + errNotSettable = errors.New("value is not settable") + errPtrNotSettable = errors.New("value is a nil pointer and is not settable") +) + +// Parse assigns a value to v by parsing s. +func Parse(dest interface{}, s string) error { + return ParseValue(reflect.ValueOf(dest), s) +} + +// ParseValue assigns a value to v by parsing s. +func ParseValue(v reflect.Value, s string) error { + // If we have a nil pointer then allocate a new object + if v.Kind() == reflect.Ptr && v.IsNil() { + if !v.CanSet() { + return errPtrNotSettable + } + + v.Set(reflect.New(v.Type().Elem())) + } + + // If it implements encoding.TextUnmarshaler then use that + if scalar, ok := v.Interface().(encoding.TextUnmarshaler); ok { + return scalar.UnmarshalText([]byte(s)) + } + // If it's a value instead of a pointer, check that we can unmarshal it + // via TextUnmarshaler as well + if v.CanAddr() { + if scalar, ok := v.Addr().Interface().(encoding.TextUnmarshaler); ok { + return scalar.UnmarshalText([]byte(s)) + } + } + + // If we have a pointer then dereference it + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + + if !v.CanSet() { + return errNotSettable + } + + // Switch on concrete type + switch scalar := v.Interface(); scalar.(type) { + case time.Duration: + duration, err := time.ParseDuration(s) + if err != nil { + return err + } + v.Set(reflect.ValueOf(duration)) + return nil + case mail.Address: + addr, err := mail.ParseAddress(s) + if err != nil { + return err + } + v.Set(reflect.ValueOf(*addr)) + return nil + case net.HardwareAddr: + ip, err := net.ParseMAC(s) + if err != nil { + return err + } + v.Set(reflect.ValueOf(ip)) + return nil + } + + // Switch on kind so that we can handle derived types + switch v.Kind() { + case reflect.String: + v.SetString(s) + case reflect.Bool: + x, err := strconv.ParseBool(s) + if err != nil { + return err + } + v.SetBool(x) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + x, err := strconv.ParseInt(s, 10, v.Type().Bits()) + if err != nil { + return err + } + v.SetInt(x) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + x, err := strconv.ParseUint(s, 10, v.Type().Bits()) + if err != nil { + return err + } + v.SetUint(x) + case reflect.Float32, reflect.Float64: + x, err := strconv.ParseFloat(s, v.Type().Bits()) + if err != nil { + return err + } + v.SetFloat(x) + default: + return fmt.Errorf("cannot parse into %v", v.Type()) + } + return nil +} + +// CanParse returns true if the type can be parsed from a string. +func CanParse(t reflect.Type) bool { + // If it implements encoding.TextUnmarshaler then use that + if t.Implements(textUnmarshalerType) || reflect.PtrTo(t).Implements(textUnmarshalerType) { + return true + } + + // If we have a pointer then dereference it + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + + // Check for other special types + switch t { + case durationType, mailAddressType, macType: + return true + } + + // Fall back to checking the kind + switch t.Kind() { + case reflect.Bool: + return true + case reflect.String, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Float32, reflect.Float64: + return true + } + return false +} diff --git a/vendor/github.com/vektah/gqlparser/v2/formatter/formatter.go b/vendor/github.com/vektah/gqlparser/v2/formatter/formatter.go new file mode 100644 index 00000000..44f97325 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/formatter/formatter.go @@ -0,0 +1,636 @@ +package formatter + +import ( + "fmt" + "io" + "sort" + "strings" + + "github.com/vektah/gqlparser/v2/ast" +) + +type Formatter interface { + FormatSchema(schema *ast.Schema) + FormatSchemaDocument(doc *ast.SchemaDocument) + FormatQueryDocument(doc *ast.QueryDocument) +} + +type FormatterOption func(*formatter) + +func WithIndent(indent string) FormatterOption { + return func(f *formatter) { + f.indent = indent + } +} + +func NewFormatter(w io.Writer, options ...FormatterOption) Formatter { + f := &formatter{ + indent: "\t", + writer: w, + } + for _, opt := range options { + opt(f) + } + return f +} + +type formatter struct { + writer io.Writer + + indent string + indentSize int + emitBuiltin bool + + padNext bool + lineHead bool +} + +func (f *formatter) writeString(s string) { + _, _ = f.writer.Write([]byte(s)) +} + +func (f *formatter) writeIndent() *formatter { + if f.lineHead { + f.writeString(strings.Repeat(f.indent, f.indentSize)) + } + f.lineHead = false + f.padNext = false + + return f +} + +func (f *formatter) WriteNewline() *formatter { + f.writeString("\n") + f.lineHead = true + f.padNext = false + + return f +} + +func (f *formatter) WriteWord(word string) *formatter { + if f.lineHead { + f.writeIndent() + } + if f.padNext { + f.writeString(" ") + } + f.writeString(strings.TrimSpace(word)) + f.padNext = true + + return f +} + +func (f *formatter) WriteString(s string) *formatter { + if f.lineHead { + f.writeIndent() + } + if f.padNext { + f.writeString(" ") + } + f.writeString(s) + f.padNext = false + + return f +} + +func (f *formatter) WriteDescription(s string) *formatter { + if s == "" { + return f + } + + f.WriteString(`"""`) + if ss := strings.Split(s, "\n"); len(ss) > 1 { + f.WriteNewline() + for _, s := range ss { + f.WriteString(s).WriteNewline() + } + } else { + f.WriteString(s) + } + + f.WriteString(`"""`).WriteNewline() + + return f +} + +func (f *formatter) IncrementIndent() { + f.indentSize++ +} + +func (f *formatter) DecrementIndent() { + f.indentSize-- +} + +func (f *formatter) NoPadding() *formatter { + f.padNext = false + + return f +} + +func (f *formatter) NeedPadding() *formatter { + f.padNext = true + + return f +} + +func (f *formatter) FormatSchema(schema *ast.Schema) { + if schema == nil { + return + } + + var inSchema bool + startSchema := func() { + if !inSchema { + inSchema = true + + f.WriteWord("schema").WriteString("{").WriteNewline() + f.IncrementIndent() + } + } + if schema.Query != nil && schema.Query.Name != "Query" { + startSchema() + f.WriteWord("query").NoPadding().WriteString(":").NeedPadding() + f.WriteWord(schema.Query.Name).WriteNewline() + } + if schema.Mutation != nil && schema.Mutation.Name != "Mutation" { + startSchema() + f.WriteWord("mutation").NoPadding().WriteString(":").NeedPadding() + f.WriteWord(schema.Mutation.Name).WriteNewline() + } + if schema.Subscription != nil && schema.Subscription.Name != "Subscription" { + startSchema() + f.WriteWord("subscription").NoPadding().WriteString(":").NeedPadding() + f.WriteWord(schema.Subscription.Name).WriteNewline() + } + if inSchema { + f.DecrementIndent() + f.WriteString("}").WriteNewline() + } + + directiveNames := make([]string, 0, len(schema.Directives)) + for name := range schema.Directives { + directiveNames = append(directiveNames, name) + } + sort.Strings(directiveNames) + for _, name := range directiveNames { + f.FormatDirectiveDefinition(schema.Directives[name]) + } + + typeNames := make([]string, 0, len(schema.Types)) + for name := range schema.Types { + typeNames = append(typeNames, name) + } + sort.Strings(typeNames) + for _, name := range typeNames { + f.FormatDefinition(schema.Types[name], false) + } +} + +func (f *formatter) FormatSchemaDocument(doc *ast.SchemaDocument) { + // TODO emit by position based order + + if doc == nil { + return + } + + f.FormatSchemaDefinitionList(doc.Schema, false) + f.FormatSchemaDefinitionList(doc.SchemaExtension, true) + + f.FormatDirectiveDefinitionList(doc.Directives) + + f.FormatDefinitionList(doc.Definitions, false) + f.FormatDefinitionList(doc.Extensions, true) +} + +func (f *formatter) FormatQueryDocument(doc *ast.QueryDocument) { + // TODO emit by position based order + + if doc == nil { + return + } + + f.FormatOperationList(doc.Operations) + f.FormatFragmentDefinitionList(doc.Fragments) +} + +func (f *formatter) FormatSchemaDefinitionList(lists ast.SchemaDefinitionList, extension bool) { + if len(lists) == 0 { + return + } + + if extension { + f.WriteWord("extend") + } + f.WriteWord("schema").WriteString("{").WriteNewline() + f.IncrementIndent() + + for _, def := range lists { + f.FormatSchemaDefinition(def) + } + + f.DecrementIndent() + f.WriteString("}").WriteNewline() +} + +func (f *formatter) FormatSchemaDefinition(def *ast.SchemaDefinition) { + f.WriteDescription(def.Description) + + f.FormatDirectiveList(def.Directives) + + f.FormatOperationTypeDefinitionList(def.OperationTypes) +} + +func (f *formatter) FormatOperationTypeDefinitionList(lists ast.OperationTypeDefinitionList) { + for _, def := range lists { + f.FormatOperationTypeDefinition(def) + } +} + +func (f *formatter) FormatOperationTypeDefinition(def *ast.OperationTypeDefinition) { + f.WriteWord(string(def.Operation)).NoPadding().WriteString(":").NeedPadding() + f.WriteWord(def.Type) + f.WriteNewline() +} + +func (f *formatter) FormatFieldList(fieldList ast.FieldList) { + if len(fieldList) == 0 { + return + } + + f.WriteString("{").WriteNewline() + f.IncrementIndent() + + for _, field := range fieldList { + f.FormatFieldDefinition(field) + } + + f.DecrementIndent() + f.WriteString("}") +} + +func (f *formatter) FormatFieldDefinition(field *ast.FieldDefinition) { + if !f.emitBuiltin && strings.HasPrefix(field.Name, "__") { + return + } + + f.WriteDescription(field.Description) + + f.WriteWord(field.Name).NoPadding() + f.FormatArgumentDefinitionList(field.Arguments) + f.NoPadding().WriteString(":").NeedPadding() + f.FormatType(field.Type) + + if field.DefaultValue != nil { + f.WriteWord("=") + f.FormatValue(field.DefaultValue) + } + + f.FormatDirectiveList(field.Directives) + + f.WriteNewline() +} + +func (f *formatter) FormatArgumentDefinitionList(lists ast.ArgumentDefinitionList) { + if len(lists) == 0 { + return + } + + f.WriteString("(") + for idx, arg := range lists { + f.FormatArgumentDefinition(arg) + + // Skip emitting (insignificant) comma in case it is the + // last argument, or we printed a new line in its definition. + if idx != len(lists)-1 && arg.Description == "" { + f.NoPadding().WriteWord(",") + } + } + f.NoPadding().WriteString(")").NeedPadding() +} + +func (f *formatter) FormatArgumentDefinition(def *ast.ArgumentDefinition) { + if def.Description != "" { + f.WriteNewline().IncrementIndent() + f.WriteDescription(def.Description) + } + + f.WriteWord(def.Name).NoPadding().WriteString(":").NeedPadding() + f.FormatType(def.Type) + + if def.DefaultValue != nil { + f.WriteWord("=") + f.FormatValue(def.DefaultValue) + } + + f.NeedPadding().FormatDirectiveList(def.Directives) + + if def.Description != "" { + f.DecrementIndent() + f.WriteNewline() + } +} + +func (f *formatter) FormatDirectiveLocation(location ast.DirectiveLocation) { + f.WriteWord(string(location)) +} + +func (f *formatter) FormatDirectiveDefinitionList(lists ast.DirectiveDefinitionList) { + if len(lists) == 0 { + return + } + + for _, dec := range lists { + f.FormatDirectiveDefinition(dec) + } +} + +func (f *formatter) FormatDirectiveDefinition(def *ast.DirectiveDefinition) { + if !f.emitBuiltin { + if def.Position.Src.BuiltIn { + return + } + } + + f.WriteDescription(def.Description) + f.WriteWord("directive").WriteString("@").WriteWord(def.Name) + + if len(def.Arguments) != 0 { + f.NoPadding() + f.FormatArgumentDefinitionList(def.Arguments) + } + + if len(def.Locations) != 0 { + f.WriteWord("on") + + for idx, dirLoc := range def.Locations { + f.FormatDirectiveLocation(dirLoc) + + if idx != len(def.Locations)-1 { + f.WriteWord("|") + } + } + } + + f.WriteNewline() +} + +func (f *formatter) FormatDefinitionList(lists ast.DefinitionList, extend bool) { + if len(lists) == 0 { + return + } + + for _, dec := range lists { + f.FormatDefinition(dec, extend) + } +} + +func (f *formatter) FormatDefinition(def *ast.Definition, extend bool) { + if !f.emitBuiltin && def.BuiltIn { + return + } + + f.WriteDescription(def.Description) + + if extend { + f.WriteWord("extend") + } + + switch def.Kind { + case ast.Scalar: + f.WriteWord("scalar").WriteWord(def.Name) + + case ast.Object: + f.WriteWord("type").WriteWord(def.Name) + + case ast.Interface: + f.WriteWord("interface").WriteWord(def.Name) + + case ast.Union: + f.WriteWord("union").WriteWord(def.Name) + + case ast.Enum: + f.WriteWord("enum").WriteWord(def.Name) + + case ast.InputObject: + f.WriteWord("input").WriteWord(def.Name) + } + + if len(def.Interfaces) != 0 { + f.WriteWord("implements").WriteWord(strings.Join(def.Interfaces, " & ")) + } + + f.FormatDirectiveList(def.Directives) + + if len(def.Types) != 0 { + f.WriteWord("=").WriteWord(strings.Join(def.Types, " | ")) + } + + f.FormatFieldList(def.Fields) + + f.FormatEnumValueList(def.EnumValues) + + f.WriteNewline() +} + +func (f *formatter) FormatEnumValueList(lists ast.EnumValueList) { + if len(lists) == 0 { + return + } + + f.WriteString("{").WriteNewline() + f.IncrementIndent() + + for _, v := range lists { + f.FormatEnumValueDefinition(v) + } + + f.DecrementIndent() + f.WriteString("}") +} + +func (f *formatter) FormatEnumValueDefinition(def *ast.EnumValueDefinition) { + f.WriteDescription(def.Description) + + f.WriteWord(def.Name) + f.FormatDirectiveList(def.Directives) + + f.WriteNewline() +} + +func (f *formatter) FormatOperationList(lists ast.OperationList) { + for _, def := range lists { + f.FormatOperationDefinition(def) + } +} + +func (f *formatter) FormatOperationDefinition(def *ast.OperationDefinition) { + f.WriteWord(string(def.Operation)) + if def.Name != "" { + f.WriteWord(def.Name) + } + f.FormatVariableDefinitionList(def.VariableDefinitions) + f.FormatDirectiveList(def.Directives) + + if len(def.SelectionSet) != 0 { + f.FormatSelectionSet(def.SelectionSet) + f.WriteNewline() + } +} + +func (f *formatter) FormatDirectiveList(lists ast.DirectiveList) { + if len(lists) == 0 { + return + } + + for _, dir := range lists { + f.FormatDirective(dir) + } +} + +func (f *formatter) FormatDirective(dir *ast.Directive) { + f.WriteString("@").WriteWord(dir.Name) + f.FormatArgumentList(dir.Arguments) +} + +func (f *formatter) FormatArgumentList(lists ast.ArgumentList) { + if len(lists) == 0 { + return + } + f.NoPadding().WriteString("(") + for idx, arg := range lists { + f.FormatArgument(arg) + + if idx != len(lists)-1 { + f.NoPadding().WriteWord(",") + } + } + f.WriteString(")").NeedPadding() +} + +func (f *formatter) FormatArgument(arg *ast.Argument) { + f.WriteWord(arg.Name).NoPadding().WriteString(":").NeedPadding() + f.WriteString(arg.Value.String()) +} + +func (f *formatter) FormatFragmentDefinitionList(lists ast.FragmentDefinitionList) { + for _, def := range lists { + f.FormatFragmentDefinition(def) + } +} + +func (f *formatter) FormatFragmentDefinition(def *ast.FragmentDefinition) { + f.WriteWord("fragment").WriteWord(def.Name) + f.FormatVariableDefinitionList(def.VariableDefinition) + f.WriteWord("on").WriteWord(def.TypeCondition) + f.FormatDirectiveList(def.Directives) + + if len(def.SelectionSet) != 0 { + f.FormatSelectionSet(def.SelectionSet) + f.WriteNewline() + } +} + +func (f *formatter) FormatVariableDefinitionList(lists ast.VariableDefinitionList) { + if len(lists) == 0 { + return + } + + f.WriteString("(") + for idx, def := range lists { + f.FormatVariableDefinition(def) + + if idx != len(lists)-1 { + f.NoPadding().WriteWord(",") + } + } + f.NoPadding().WriteString(")").NeedPadding() +} + +func (f *formatter) FormatVariableDefinition(def *ast.VariableDefinition) { + f.WriteString("$").WriteWord(def.Variable).NoPadding().WriteString(":").NeedPadding() + f.FormatType(def.Type) + + if def.DefaultValue != nil { + f.WriteWord("=") + f.FormatValue(def.DefaultValue) + } + + // TODO https://github.com/vektah/gqlparser/v2/issues/102 + // VariableDefinition : Variable : Type DefaultValue? Directives[Const]? +} + +func (f *formatter) FormatSelectionSet(sets ast.SelectionSet) { + if len(sets) == 0 { + return + } + + f.WriteString("{").WriteNewline() + f.IncrementIndent() + + for _, sel := range sets { + f.FormatSelection(sel) + } + + f.DecrementIndent() + f.WriteString("}") +} + +func (f *formatter) FormatSelection(selection ast.Selection) { + switch v := selection.(type) { + case *ast.Field: + f.FormatField(v) + + case *ast.FragmentSpread: + f.FormatFragmentSpread(v) + + case *ast.InlineFragment: + f.FormatInlineFragment(v) + + default: + panic(fmt.Errorf("unknown Selection type: %T", selection)) + } + + f.WriteNewline() +} + +func (f *formatter) FormatField(field *ast.Field) { + if field.Alias != "" && field.Alias != field.Name { + f.WriteWord(field.Alias).NoPadding().WriteString(":").NeedPadding() + } + f.WriteWord(field.Name) + + if len(field.Arguments) != 0 { + f.NoPadding() + f.FormatArgumentList(field.Arguments) + f.NeedPadding() + } + + f.FormatDirectiveList(field.Directives) + + f.FormatSelectionSet(field.SelectionSet) +} + +func (f *formatter) FormatFragmentSpread(spread *ast.FragmentSpread) { + f.WriteWord("...").WriteWord(spread.Name) + + f.FormatDirectiveList(spread.Directives) +} + +func (f *formatter) FormatInlineFragment(inline *ast.InlineFragment) { + f.WriteWord("...") + if inline.TypeCondition != "" { + f.WriteWord("on").WriteWord(inline.TypeCondition) + } + + f.FormatDirectiveList(inline.Directives) + + f.FormatSelectionSet(inline.SelectionSet) +} + +func (f *formatter) FormatType(t *ast.Type) { + f.WriteWord(t.String()) +} + +func (f *formatter) FormatValue(value *ast.Value) { + f.WriteString(value.String()) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go b/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go new file mode 100644 index 00000000..4065a610 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go @@ -0,0 +1,58 @@ +package lexer + +import ( + "math" + "strings" +) + +// blockStringValue produces the value of a block string from its parsed raw value, similar to +// Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc. +// +// This implements the GraphQL spec's BlockStringValue() static algorithm. +func blockStringValue(raw string) string { + lines := strings.Split(raw, "\n") + + commonIndent := math.MaxInt32 + for _, line := range lines { + indent := leadingWhitespace(line) + if indent < len(line) && indent < commonIndent { + commonIndent = indent + if commonIndent == 0 { + break + } + } + } + + if commonIndent != math.MaxInt32 && len(lines) > 0 { + for i := 1; i < len(lines); i++ { + if len(lines[i]) < commonIndent { + lines[i] = "" + } else { + lines[i] = lines[i][commonIndent:] + } + } + } + + start := 0 + end := len(lines) + + for start < end && leadingWhitespace(lines[start]) == math.MaxInt32 { + start++ + } + + for start < end && leadingWhitespace(lines[end-1]) == math.MaxInt32 { + end-- + } + + return strings.Join(lines[start:end], "\n") +} + +func leadingWhitespace(str string) int { + for i, r := range str { + if r != ' ' && r != '\t' { + return i + } + } + // this line is made up entirely of whitespace, its leading whitespace doesnt count. + return math.MaxInt32 +} diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go new file mode 100644 index 00000000..25dcdcb9 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go @@ -0,0 +1,515 @@ +package lexer + +import ( + "bytes" + "unicode/utf8" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +// Lexer turns graphql request and schema strings into tokens +type Lexer struct { + *ast.Source + // An offset into the string in bytes + start int + // An offset into the string in runes + startRunes int + // An offset into the string in bytes + end int + // An offset into the string in runes + endRunes int + // the current line number + line int + // An offset into the string in rune + lineStartRunes int +} + +func New(src *ast.Source) Lexer { + return Lexer{ + Source: src, + line: 1, + } +} + +// take one rune from input and advance end +func (s *Lexer) peek() (rune, int) { + return utf8.DecodeRuneInString(s.Input[s.end:]) +} + +func (s *Lexer) makeToken(kind Type) (Token, error) { + return s.makeValueToken(kind, s.Input[s.start:s.end]) +} + +func (s *Lexer) makeValueToken(kind Type, value string) (Token, error) { + return Token{ + Kind: kind, + Value: value, + Pos: ast.Position{ + Start: s.startRunes, + End: s.endRunes, + Line: s.line, + Column: s.startRunes - s.lineStartRunes + 1, + Src: s.Source, + }, + }, nil +} + +func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { + column := s.endRunes - s.lineStartRunes + 1 + return Token{ + Kind: Invalid, + Pos: ast.Position{ + Start: s.startRunes, + End: s.endRunes, + Line: s.line, + Column: column, + Src: s.Source, + }, + }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...) +} + +// ReadToken gets the next token from the source starting at the given position. +// +// This skips over whitespace and comments until it finds the next lexable +// token, then lexes punctuators immediately or calls the appropriate helper +// function for more complicated tokens. +func (s *Lexer) ReadToken() (token Token, err error) { + + s.ws() + s.start = s.end + s.startRunes = s.endRunes + + if s.end >= len(s.Input) { + return s.makeToken(EOF) + } + r := s.Input[s.start] + s.end++ + s.endRunes++ + switch r { + case '!': + return s.makeValueToken(Bang, "") + + case '$': + return s.makeValueToken(Dollar, "") + case '&': + return s.makeValueToken(Amp, "") + case '(': + return s.makeValueToken(ParenL, "") + case ')': + return s.makeValueToken(ParenR, "") + case '.': + if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == "..." { + s.end += 2 + s.endRunes += 2 + return s.makeValueToken(Spread, "") + } + case ':': + return s.makeValueToken(Colon, "") + case '=': + return s.makeValueToken(Equals, "") + case '@': + return s.makeValueToken(At, "") + case '[': + return s.makeValueToken(BracketL, "") + case ']': + return s.makeValueToken(BracketR, "") + case '{': + return s.makeValueToken(BraceL, "") + case '}': + return s.makeValueToken(BraceR, "") + case '|': + return s.makeValueToken(Pipe, "") + case '#': + s.readComment() + return s.ReadToken() + + case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': + return s.readName() + + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return s.readNumber() + + case '"': + if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == `"""` { + return s.readBlockString() + } + + return s.readString() + } + + s.end-- + s.endRunes-- + + if r < 0x0020 && r != 0x0009 && r != 0x000a && r != 0x000d { + return s.makeError(`Cannot contain the invalid character "\u%04d"`, r) + } + + if r == '\'' { + return s.makeError(`Unexpected single quote character ('), did you mean to use a double quote (")?`) + } + + return s.makeError(`Cannot parse the unexpected character "%s".`, string(r)) +} + +// ws reads from body starting at startPosition until it finds a non-whitespace +// or commented character, and updates the token end to include all whitespace +func (s *Lexer) ws() { + for s.end < len(s.Input) { + switch s.Input[s.end] { + case '\t', ' ', ',': + s.end++ + s.endRunes++ + case '\n': + s.end++ + s.endRunes++ + s.line++ + s.lineStartRunes = s.endRunes + case '\r': + s.end++ + s.endRunes++ + s.line++ + s.lineStartRunes = s.endRunes + // skip the following newline if its there + if s.end < len(s.Input) && s.Input[s.end] == '\n' { + s.end++ + s.endRunes++ + } + // byte order mark, given ws is hot path we aren't relying on the unicode package here. + case 0xef: + if s.end+2 < len(s.Input) && s.Input[s.end+1] == 0xBB && s.Input[s.end+2] == 0xBF { + s.end += 3 + s.endRunes++ + } else { + return + } + default: + return + } + } +} + +// readComment from the input +// +// #[\u0009\u0020-\uFFFF]* +func (s *Lexer) readComment() (Token, error) { + for s.end < len(s.Input) { + r, w := s.peek() + + // SourceCharacter but not LineTerminator + if r > 0x001f || r == '\t' { + s.end += w + s.endRunes++ + } else { + break + } + } + + return s.makeToken(Comment) +} + +// readNumber from the input, either a float +// or an int depending on whether a decimal point appears. +// +// Int: -?(0|[1-9][0-9]*) +// Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)? +func (s *Lexer) readNumber() (Token, error) { + float := false + + // backup to the first digit + s.end-- + s.endRunes-- + + s.acceptByte('-') + + if s.acceptByte('0') { + if consumed := s.acceptDigits(); consumed != 0 { + s.end -= consumed + s.endRunes -= consumed + return s.makeError("Invalid number, unexpected digit after 0: %s.", s.describeNext()) + } + } else { + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if s.acceptByte('.') { + float = true + + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if s.acceptByte('e', 'E') { + float = true + + s.acceptByte('-', '+') + + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if float { + return s.makeToken(Float) + } else { + return s.makeToken(Int) + } +} + +// acceptByte if it matches any of given bytes, returning true if it found anything +func (s *Lexer) acceptByte(bytes ...uint8) bool { + if s.end >= len(s.Input) { + return false + } + + for _, accepted := range bytes { + if s.Input[s.end] == accepted { + s.end++ + s.endRunes++ + return true + } + } + return false +} + +// acceptDigits from the input, returning the number of digits it found +func (s *Lexer) acceptDigits() int { + consumed := 0 + for s.end < len(s.Input) && s.Input[s.end] >= '0' && s.Input[s.end] <= '9' { + s.end++ + s.endRunes++ + consumed++ + } + + return consumed +} + +// describeNext peeks at the input and returns a human readable string. This should will alloc +// and should only be used in errors +func (s *Lexer) describeNext() string { + if s.end < len(s.Input) { + return `"` + string(s.Input[s.end]) + `"` + } + return "" +} + +// readString from the input +// +// "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*" +func (s *Lexer) readString() (Token, error) { + inputLen := len(s.Input) + + // this buffer is lazily created only if there are escape characters. + var buf *bytes.Buffer + + // skip the opening quote + s.start++ + s.startRunes++ + + for s.end < inputLen { + r := s.Input[s.end] + if r == '\n' || r == '\r' { + break + } + if r < 0x0020 && r != '\t' { + return s.makeError(`Invalid character within String: "\u%04d".`, r) + } + switch r { + default: + var char = rune(r) + var w = 1 + + // skip unicode overhead if we are in the ascii range + if r >= 127 { + char, w = utf8.DecodeRuneInString(s.Input[s.end:]) + } + s.end += w + s.endRunes++ + + if buf != nil { + buf.WriteRune(char) + } + + case '"': + t, err := s.makeToken(String) + // the token should not include the quotes in its value, but should cover them in its position + t.Pos.Start-- + t.Pos.End++ + + if buf != nil { + t.Value = buf.String() + } + + // skip the close quote + s.end++ + s.endRunes++ + + return t, err + + case '\\': + if s.end+1 >= inputLen { + s.end++ + s.endRunes++ + return s.makeError(`Invalid character escape sequence.`) + } + + if buf == nil { + buf = bytes.NewBufferString(s.Input[s.start:s.end]) + } + + escape := s.Input[s.end+1] + + if escape == 'u' { + if s.end+6 >= inputLen { + s.end++ + s.endRunes++ + return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:]) + } + + r, ok := unhex(s.Input[s.end+2 : s.end+6]) + if !ok { + s.end++ + s.endRunes++ + return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:s.end+5]) + } + buf.WriteRune(r) + s.end += 6 + s.endRunes += 6 + } else { + switch escape { + case '"', '/', '\\': + buf.WriteByte(escape) + case 'b': + buf.WriteByte('\b') + case 'f': + buf.WriteByte('\f') + case 'n': + buf.WriteByte('\n') + case 'r': + buf.WriteByte('\r') + case 't': + buf.WriteByte('\t') + default: + s.end += 1 + s.endRunes += 1 + return s.makeError("Invalid character escape sequence: \\%s.", string(escape)) + } + s.end += 2 + s.endRunes += 2 + } + } + } + + return s.makeError("Unterminated string.") +} + +// readBlockString from the input +// +// """("?"?(\\"""|\\(?!=""")|[^"\\]))*""" +func (s *Lexer) readBlockString() (Token, error) { + inputLen := len(s.Input) + + var buf bytes.Buffer + + // skip the opening quote + s.start += 3 + s.startRunes += 3 + s.end += 2 + s.endRunes += 2 + + for s.end < inputLen { + r := s.Input[s.end] + + // Closing triple quote (""") + if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` { + t, err := s.makeValueToken(BlockString, blockStringValue(buf.String())) + + // the token should not include the quotes in its value, but should cover them in its position + t.Pos.Start -= 3 + t.Pos.End += 3 + + // skip the close quote + s.end += 3 + s.endRunes += 3 + return t, err + } + + // SourceCharacter + if r < 0x0020 && r != '\t' && r != '\n' && r != '\r' { + return s.makeError(`Invalid character within String: "\u%04d".`, r) + } + + if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` { + buf.WriteString(`"""`) + s.end += 4 + s.endRunes += 4 + } else if r == '\r' { + if s.end+1 < inputLen && s.Input[s.end+1] == '\n' { + s.end++ + s.endRunes++ + } + + buf.WriteByte('\n') + s.end++ + s.endRunes++ + s.line++ + s.lineStartRunes = s.endRunes + } else { + var char = rune(r) + var w = 1 + + // skip unicode overhead if we are in the ascii range + if r >= 127 { + char, w = utf8.DecodeRuneInString(s.Input[s.end:]) + } + s.end += w + s.endRunes++ + buf.WriteRune(char) + if r == '\n' { + s.line++ + s.lineStartRunes = s.endRunes + } + } + } + + return s.makeError("Unterminated string.") +} + +func unhex(b string) (v rune, ok bool) { + for _, c := range b { + v <<= 4 + switch { + case '0' <= c && c <= '9': + v |= c - '0' + case 'a' <= c && c <= 'f': + v |= c - 'a' + 10 + case 'A' <= c && c <= 'F': + v |= c - 'A' + 10 + default: + return 0, false + } + } + + return v, true +} + +// readName from the input +// +// [_A-Za-z][_0-9A-Za-z]* +func (s *Lexer) readName() (Token, error) { + for s.end < len(s.Input) { + r, w := s.peek() + + if (r >= '0' && r <= '9') || (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' { + s.end += w + s.endRunes++ + } else { + break + } + } + + return s.makeToken(Name) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml new file mode 100644 index 00000000..5c4d5f0f --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml @@ -0,0 +1,692 @@ +encoding: + - name: disallows uncommon control characters + input: "\u0007" + error: + message: 'Cannot contain the invalid character "\u0007"' + locations: [{line: 1, column: 1}] + + - name: accepts BOM header + input: "\uFEFF foo" + tokens: + - + kind: NAME + start: 2 + end: 5 + value: 'foo' + +simple tokens: + - name: records line and column + input: "\n \r\n \r foo\n" + tokens: + - + kind: NAME + start: 8 + end: 11 + line: 4 + column: 3 + value: 'foo' + + - name: skips whitespace + input: "\n\n foo\n\n\n" + tokens: + - + kind: NAME + start: 6 + end: 9 + value: 'foo' + + - name: skips comments + input: "\n #comment\n foo#comment\n" + tokens: + - + kind: NAME + start: 18 + end: 21 + value: 'foo' + + - name: skips commas + input: ",,,foo,,," + tokens: + - + kind: NAME + start: 3 + end: 6 + value: 'foo' + + - name: errors respect whitespace + input: "\n\n ?\n\n\n" + error: + message: 'Cannot parse the unexpected character "?".' + locations: [{line: 3, column: 5}] + string: | + Syntax Error: Cannot parse the unexpected character "?". + GraphQL request (3:5) + 2: + 3: ? + ^ + 4: + + - name: lex reports useful information for dashes in names + input: "a-b" + error: + message: 'Invalid number, expected digit but got: "b".' + locations: [{ line: 1, column: 3 }] + tokens: + - + kind: Name + start: 0 + end: 1 + value: a + +lexes strings: + - name: basic + input: '"simple"' + tokens: + - + kind: STRING + start: 0 + end: 8 + value: 'simple' + + - name: whitespace + input: '" white space "' + tokens: + - + kind: STRING + start: 0 + end: 15 + value: ' white space ' + + - name: quote + input: '"quote \""' + tokens: + - + kind: STRING + start: 0 + end: 10 + value: 'quote "' + + - name: escaped + input: '"escaped \n\r\b\t\f"' + tokens: + - + kind: STRING + start: 0 + end: 20 + value: "escaped \n\r\b\t\f" + + - name: slashes + input: '"slashes \\ \/"' + tokens: + - + kind: STRING + start: 0 + end: 15 + value: 'slashes \ /' + + - name: unicode + input: '"unicode \u1234\u5678\u90AB\uCDEF"' + tokens: + - + kind: STRING + start: 0 + end: 34 + value: "unicode \u1234\u5678\u90AB\uCDEF" + +lex reports useful string errors: + - name: unterminated + input: '"' + error: + message: "Unterminated string." + locations: [{ line: 1, column: 2 }] + + - name: no end quote + input: '"no end quote' + error: + message: 'Unterminated string.' + locations: [{ line: 1, column: 14 }] + + - name: single quotes + input: "'single quotes'" + error: + message: "Unexpected single quote character ('), did you mean to use a double quote (\")?" + locations: [{ line: 1, column: 1 }] + + - name: control characters + input: "\"contains unescaped \u0007 control char\"" + error: + message: 'Invalid character within String: "\u0007".' + locations: [{ line: 1, column: 21 }] + + - name: null byte + input: "\"null-byte is not \u0000 end of file\"" + error: + message: 'Invalid character within String: "\u0000".' + locations: [{ line: 1, column: 19 }] + + - name: unterminated newline + input: "\"multi\nline\"" + error: + message: 'Unterminated string.' + locations: [{line: 1, column: 7 }] + + - name: unterminated carriage return + input: "\"multi\rline\"" + error: + message: 'Unterminated string.' + locations: [{ line: 1, column: 7 }] + + - name: bad escape character + input: '"bad \z esc"' + error: + message: 'Invalid character escape sequence: \z.' + locations: [{ line: 1, column: 7 }] + + - name: hex escape sequence + input: '"bad \x esc"' + error: + message: 'Invalid character escape sequence: \x.' + locations: [{ line: 1, column: 7 }] + + - name: short escape sequence + input: '"bad \u1 esc"' + error: + message: 'Invalid character escape sequence: \u1 es.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 1 + input: '"bad \u0XX1 esc"' + error: + message: 'Invalid character escape sequence: \u0XX1.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 2 + input: '"bad \uXXXX esc"' + error: + message: 'Invalid character escape sequence: \uXXXX.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 3 + input: '"bad \uFXXX esc"' + error: + message: 'Invalid character escape sequence: \uFXXX.' + locations: [{ line: 1, column: 7 }] + + - name: invalid character escape sequence + input: '"bad \uXXXF esc"' + error: + message: 'Invalid character escape sequence: \uXXXF.' + locations: [{ line: 1, column: 7 }] + +lexes block strings: + - name: simple + input: '"""simple"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 12 + value: 'simple' + + - name: white space + input: '""" white space """' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 19 + value: ' white space ' + + - name: contains quote + input: '"""contains " quote"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 22 + value: 'contains " quote' + + - name: contains triplequote + input: "\"\"\"contains \\\"\"\" triplequote\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 31 + value: 'contains """ triplequote' + + - name: multi line + input: "\"\"\"multi\nline\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 16 + value: "multi\nline" + + - name: multi line normalized + input: "\"\"\"multi\rline\r\nnormalized\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 28 + value: "multi\nline\nnormalized" + + - name: unescaped + input: '"""unescaped \n\r\b\t\f\u1234"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 32 + value: 'unescaped \n\r\b\t\f\u1234' + + - name: slashes + input: '"""slashes \\ \/"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 19 + value: 'slashes \\ \/' + + - name: multiple lines + input: | + """ + + spans + multiple + lines + + """ + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 36 + value: "spans\n multiple\n lines" + + - name: records correct line and column after block string + input: | + """ + + some + description + + """ foo + tokens: + - + kind: BLOCK_STRING + value: "some\ndescription" + - + kind: NAME + start: 27 + end: 30 + line: 6 + column: 5 + value: 'foo' + +lex reports useful block string errors: + - name: unterminated string + input: '"""' + error: + message: "Unterminated string." + locations: [{ line: 1, column: 4 }] + + - name: unescaped control characters + input: "\"\"\"contains unescaped \u0007 control char\"\"\"" + error: + message: 'Invalid character within String: "\u0007".' + locations: [{ line: 1, column: 23 }] + + - name: null byte + input: "\"\"\"null-byte is not \u0000 end of file\"\"\"" + error: + message: 'Invalid character within String: "\u0000".' + locations: [{ line: 1, column: 21 }] + +lexes numbers: + - name: integer + input: "4" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '4' + + - name: float + input: "4.123" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '4.123' + + - name: negative + input: "-4" + tokens: + - + kind: INT + start: 0 + end: 2 + value: '-4' + + - name: nine + input: "9" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '9' + + - name: zero + input: "0" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '0' + + - name: negative float + input: "-4.123" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '-4.123' + + - name: float leading zero + input: "0.123" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '0.123' + + - name: exponent whole + input: "123e4" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '123e4' + + - name: exponent uppercase + input: "123E4" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '123E4' + + - name: exponent negative power + input: "123e-4" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '123e-4' + + - name: exponent positive power + input: "123e+4" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '123e+4' + + - name: exponent negative base + input: "-1.123e4" + tokens: + - + kind: FLOAT + start: 0 + end: 8 + value: '-1.123e4' + + - name: exponent negative base upper + input: "-1.123E4" + tokens: + - + kind: FLOAT + start: 0 + end: 8 + value: '-1.123E4' + + - name: exponent negative base negative power + input: "-1.123e-4" + tokens: + - + kind: FLOAT + start: 0 + end: 9 + value: '-1.123e-4' + + - name: exponent negative base positive power + input: "-1.123e+4" + tokens: + - + kind: FLOAT + start: 0 + end: 9 + value: '-1.123e+4' + + - name: exponent negative base large power + input: "-1.123e4567" + tokens: + - + kind: FLOAT + start: 0 + end: 11 + value: '-1.123e4567' + +lex reports useful number errors: + - name: zero + input: "00" + error: + message: 'Invalid number, unexpected digit after 0: "0".' + locations: [{ line: 1, column: 2 }] + + - name: positive + input: "+1" + error: + message: 'Cannot parse the unexpected character "+".' + locations: [{ line: 1, column: 1 }] + + - name: trailing dot + input: "1." + error: + message: 'Invalid number, expected digit but got: .' + locations: [{ line: 1, column: 3 }] + + - name: traililng dot exponent + input: "1.e1" + error: + message: 'Invalid number, expected digit but got: "e".' + locations: [{ line: 1, column: 3 }] + + - name: missing leading zero + input: ".123" + error: + message: 'Cannot parse the unexpected character ".".' + locations: [{ line: 1, column: 1 }] + + - name: characters + input: "1.A" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 3 }] + + - name: negative characters + input: "-A" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 2 }] + + - name: missing exponent + input: '1.0e' + error: + message: 'Invalid number, expected digit but got: .' + locations: [{ line: 1, column: 5 }] + + - name: character exponent + input: "1.0eA" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 5 }] + +lexes punctuation: + - name: bang + input: "!" + tokens: + - + kind: BANG + start: 0 + end: 1 + value: undefined + + - name: dollar + input: "$" + tokens: + - + kind: DOLLAR + start: 0 + end: 1 + value: undefined + + - name: open paren + input: "(" + tokens: + - + kind: PAREN_L + start: 0 + end: 1 + value: undefined + + - name: close paren + input: ")" + tokens: + - + kind: PAREN_R + start: 0 + end: 1 + value: undefined + + - name: spread + input: "..." + tokens: + - + kind: SPREAD + start: 0 + end: 3 + value: undefined + + - name: colon + input: ":" + tokens: + - + kind: COLON + start: 0 + end: 1 + value: undefined + + - name: equals + input: "=" + tokens: + - + kind: EQUALS + start: 0 + end: 1 + value: undefined + + - name: at + input: "@" + tokens: + - + kind: AT + start: 0 + end: 1 + value: undefined + + - name: open bracket + input: "[" + tokens: + - + kind: BRACKET_L + start: 0 + end: 1 + value: undefined + + - name: close bracket + input: "]" + tokens: + - + kind: BRACKET_R + start: 0 + end: 1 + value: undefined + + - name: open brace + input: "{" + tokens: + - + kind: BRACE_L + start: 0 + end: 1 + value: undefined + + - name: close brace + input: "}" + tokens: + - + kind: BRACE_R + start: 0 + end: 1 + value: undefined + + - name: pipe + input: "|" + tokens: + - + kind: PIPE + start: 0 + end: 1 + value: undefined + +lex reports useful unknown character error: + - name: not a spread + input: ".." + error: + message: 'Cannot parse the unexpected character ".".' + locations: [{ line: 1, column: 1 }] + + - name: question mark + input: "?" + error: + message: 'Cannot parse the unexpected character "?".' + message: 'Cannot parse the unexpected character "?".' + locations: [{ line: 1, column: 1 }] + + - name: unicode 203 + input: "\u203B" + error: + message: 'Cannot parse the unexpected character "â".' + locations: [{ line: 1, column: 1 }] + + - name: unicode 200 + input: "\u200b" + error: + message: 'Cannot parse the unexpected character "â".' + locations: [{ line: 1, column: 1 }] + diff --git a/vendor/github.com/vektah/gqlparser/v2/lexer/token.go b/vendor/github.com/vektah/gqlparser/v2/lexer/token.go new file mode 100644 index 00000000..8985a7ef --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/token.go @@ -0,0 +1,148 @@ +package lexer + +import ( + "strconv" + + "github.com/vektah/gqlparser/v2/ast" +) + +const ( + Invalid Type = iota + EOF + Bang + Dollar + Amp + ParenL + ParenR + Spread + Colon + Equals + At + BracketL + BracketR + BraceL + BraceR + Pipe + Name + Int + Float + String + BlockString + Comment +) + +func (t Type) Name() string { + switch t { + case Invalid: + return "Invalid" + case EOF: + return "EOF" + case Bang: + return "Bang" + case Dollar: + return "Dollar" + case Amp: + return "Amp" + case ParenL: + return "ParenL" + case ParenR: + return "ParenR" + case Spread: + return "Spread" + case Colon: + return "Colon" + case Equals: + return "Equals" + case At: + return "At" + case BracketL: + return "BracketL" + case BracketR: + return "BracketR" + case BraceL: + return "BraceL" + case BraceR: + return "BraceR" + case Pipe: + return "Pipe" + case Name: + return "Name" + case Int: + return "Int" + case Float: + return "Float" + case String: + return "String" + case BlockString: + return "BlockString" + case Comment: + return "Comment" + } + return "Unknown " + strconv.Itoa(int(t)) +} + +func (t Type) String() string { + switch t { + case Invalid: + return "" + case EOF: + return "" + case Bang: + return "!" + case Dollar: + return "$" + case Amp: + return "&" + case ParenL: + return "(" + case ParenR: + return ")" + case Spread: + return "..." + case Colon: + return ":" + case Equals: + return "=" + case At: + return "@" + case BracketL: + return "[" + case BracketR: + return "]" + case BraceL: + return "{" + case BraceR: + return "}" + case Pipe: + return "|" + case Name: + return "Name" + case Int: + return "Int" + case Float: + return "Float" + case String: + return "String" + case BlockString: + return "BlockString" + case Comment: + return "Comment" + } + return "Unknown " + strconv.Itoa(int(t)) +} + +// Kind represents a type of token. The types are predefined as constants. +type Type int + +type Token struct { + Kind Type // The token type. + Value string // The literal value consumed. + Pos ast.Position // The file and line this token was read from +} + +func (t Token) String() string { + if t.Value != "" { + return t.Kind.String() + " " + strconv.Quote(t.Value) + } + return t.Kind.String() +} diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/parser.go b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go new file mode 100644 index 00000000..68eb51ed --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go @@ -0,0 +1,136 @@ +package parser + +import ( + "strconv" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/lexer" +) + +type parser struct { + lexer lexer.Lexer + err error + + peeked bool + peekToken lexer.Token + peekError error + + prev lexer.Token +} + +func (p *parser) peekPos() *ast.Position { + if p.err != nil { + return nil + } + + peek := p.peek() + return &peek.Pos +} + +func (p *parser) peek() lexer.Token { + if p.err != nil { + return p.prev + } + + if !p.peeked { + p.peekToken, p.peekError = p.lexer.ReadToken() + p.peeked = true + } + + return p.peekToken +} + +func (p *parser) error(tok lexer.Token, format string, args ...interface{}) { + if p.err != nil { + return + } + p.err = gqlerror.ErrorLocf(tok.Pos.Src.Name, tok.Pos.Line, tok.Pos.Column, format, args...) +} + +func (p *parser) next() lexer.Token { + if p.err != nil { + return p.prev + } + if p.peeked { + p.peeked = false + p.prev, p.err = p.peekToken, p.peekError + } else { + p.prev, p.err = p.lexer.ReadToken() + } + return p.prev +} + +func (p *parser) expectKeyword(value string) lexer.Token { + tok := p.peek() + if tok.Kind == lexer.Name && tok.Value == value { + return p.next() + } + + p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String()) + return tok +} + +func (p *parser) expect(kind lexer.Type) lexer.Token { + tok := p.peek() + if tok.Kind == kind { + return p.next() + } + + p.error(tok, "Expected %s, found %s", kind, tok.Kind.String()) + return tok +} + +func (p *parser) skip(kind lexer.Type) bool { + if p.err != nil { + return false + } + + tok := p.peek() + + if tok.Kind != kind { + return false + } + p.next() + return true +} + +func (p *parser) unexpectedError() { + p.unexpectedToken(p.peek()) +} + +func (p *parser) unexpectedToken(tok lexer.Token) { + p.error(tok, "Unexpected %s", tok.String()) +} + +func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) { + hasDef := p.skip(start) + if !hasDef { + return + } + + for p.peek().Kind != end && p.err == nil { + cb() + } + p.next() +} + +func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { + hasDef := p.skip(start) + if !hasDef { + return + } + + called := false + for p.peek().Kind != end && p.err == nil { + called = true + cb() + } + + if !called { + p.error(p.peek(), "expected at least one definition, found %s", p.peek().Kind.String()) + return + } + + p.next() +} diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/query.go b/vendor/github.com/vektah/gqlparser/v2/parser/query.go new file mode 100644 index 00000000..a38d982a --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query.go @@ -0,0 +1,348 @@ +package parser + +import ( + "github.com/vektah/gqlparser/v2/lexer" + + . "github.com/vektah/gqlparser/v2/ast" +) + +func ParseQuery(source *Source) (*QueryDocument, error) { + p := parser{ + lexer: lexer.New(source), + } + return p.parseQueryDocument(), p.err +} + +func (p *parser) parseQueryDocument() *QueryDocument { + var doc QueryDocument + for p.peek().Kind != lexer.EOF { + if p.err != nil { + return &doc + } + doc.Position = p.peekPos() + switch p.peek().Kind { + case lexer.Name: + switch p.peek().Value { + case "query", "mutation", "subscription": + doc.Operations = append(doc.Operations, p.parseOperationDefinition()) + case "fragment": + doc.Fragments = append(doc.Fragments, p.parseFragmentDefinition()) + default: + p.unexpectedError() + } + case lexer.BraceL: + doc.Operations = append(doc.Operations, p.parseOperationDefinition()) + default: + p.unexpectedError() + } + } + + return &doc +} + +func (p *parser) parseOperationDefinition() *OperationDefinition { + if p.peek().Kind == lexer.BraceL { + return &OperationDefinition{ + Position: p.peekPos(), + Operation: Query, + SelectionSet: p.parseRequiredSelectionSet(), + } + } + + var od OperationDefinition + od.Position = p.peekPos() + od.Operation = p.parseOperationType() + + if p.peek().Kind == lexer.Name { + od.Name = p.next().Value + } + + od.VariableDefinitions = p.parseVariableDefinitions() + od.Directives = p.parseDirectives(false) + od.SelectionSet = p.parseRequiredSelectionSet() + + return &od +} + +func (p *parser) parseOperationType() Operation { + tok := p.next() + switch tok.Value { + case "query": + return Query + case "mutation": + return Mutation + case "subscription": + return Subscription + } + p.unexpectedToken(tok) + return "" +} + +func (p *parser) parseVariableDefinitions() VariableDefinitionList { + var defs []*VariableDefinition + p.many(lexer.ParenL, lexer.ParenR, func() { + defs = append(defs, p.parseVariableDefinition()) + }) + + return defs +} + +func (p *parser) parseVariableDefinition() *VariableDefinition { + var def VariableDefinition + def.Position = p.peekPos() + def.Variable = p.parseVariable() + + p.expect(lexer.Colon) + + def.Type = p.parseTypeReference() + + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + + def.Directives = p.parseDirectives(false) + + return &def +} + +func (p *parser) parseVariable() string { + p.expect(lexer.Dollar) + return p.parseName() +} + +func (p *parser) parseOptionalSelectionSet() SelectionSet { + var selections []Selection + p.some(lexer.BraceL, lexer.BraceR, func() { + selections = append(selections, p.parseSelection()) + }) + + return SelectionSet(selections) +} + +func (p *parser) parseRequiredSelectionSet() SelectionSet { + if p.peek().Kind != lexer.BraceL { + p.error(p.peek(), "Expected %s, found %s", lexer.BraceL, p.peek().Kind.String()) + return nil + } + + var selections []Selection + p.some(lexer.BraceL, lexer.BraceR, func() { + selections = append(selections, p.parseSelection()) + }) + + return SelectionSet(selections) +} + +func (p *parser) parseSelection() Selection { + if p.peek().Kind == lexer.Spread { + return p.parseFragment() + } + return p.parseField() +} + +func (p *parser) parseField() *Field { + var field Field + field.Position = p.peekPos() + field.Alias = p.parseName() + + if p.skip(lexer.Colon) { + field.Name = p.parseName() + } else { + field.Name = field.Alias + } + + field.Arguments = p.parseArguments(false) + field.Directives = p.parseDirectives(false) + if p.peek().Kind == lexer.BraceL { + field.SelectionSet = p.parseOptionalSelectionSet() + } + + return &field +} + +func (p *parser) parseArguments(isConst bool) ArgumentList { + var arguments ArgumentList + p.many(lexer.ParenL, lexer.ParenR, func() { + arguments = append(arguments, p.parseArgument(isConst)) + }) + + return arguments +} + +func (p *parser) parseArgument(isConst bool) *Argument { + arg := Argument{} + arg.Position = p.peekPos() + arg.Name = p.parseName() + p.expect(lexer.Colon) + + arg.Value = p.parseValueLiteral(isConst) + return &arg +} + +func (p *parser) parseFragment() Selection { + p.expect(lexer.Spread) + + if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" { + return &FragmentSpread{ + Position: p.peekPos(), + Name: p.parseFragmentName(), + Directives: p.parseDirectives(false), + } + } + + var def InlineFragment + def.Position = p.peekPos() + if p.peek().Value == "on" { + p.next() // "on" + + def.TypeCondition = p.parseName() + } + + def.Directives = p.parseDirectives(false) + def.SelectionSet = p.parseRequiredSelectionSet() + return &def +} + +func (p *parser) parseFragmentDefinition() *FragmentDefinition { + var def FragmentDefinition + def.Position = p.peekPos() + p.expectKeyword("fragment") + + def.Name = p.parseFragmentName() + def.VariableDefinition = p.parseVariableDefinitions() + + p.expectKeyword("on") + + def.TypeCondition = p.parseName() + def.Directives = p.parseDirectives(false) + def.SelectionSet = p.parseRequiredSelectionSet() + return &def +} + +func (p *parser) parseFragmentName() string { + if p.peek().Value == "on" { + p.unexpectedError() + return "" + } + + return p.parseName() +} + +func (p *parser) parseValueLiteral(isConst bool) *Value { + token := p.peek() + + var kind ValueKind + switch token.Kind { + case lexer.BracketL: + return p.parseList(isConst) + case lexer.BraceL: + return p.parseObject(isConst) + case lexer.Dollar: + if isConst { + p.unexpectedError() + return nil + } + return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable} + case lexer.Int: + kind = IntValue + case lexer.Float: + kind = FloatValue + case lexer.String: + kind = StringValue + case lexer.BlockString: + kind = BlockValue + case lexer.Name: + switch token.Value { + case "true", "false": + kind = BooleanValue + case "null": + kind = NullValue + default: + kind = EnumValue + } + default: + p.unexpectedError() + return nil + } + + p.next() + + return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind} +} + +func (p *parser) parseList(isConst bool) *Value { + var values ChildValueList + pos := p.peekPos() + p.many(lexer.BracketL, lexer.BracketR, func() { + values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)}) + }) + + return &Value{Children: values, Kind: ListValue, Position: pos} +} + +func (p *parser) parseObject(isConst bool) *Value { + var fields ChildValueList + pos := p.peekPos() + p.many(lexer.BraceL, lexer.BraceR, func() { + fields = append(fields, p.parseObjectField(isConst)) + }) + + return &Value{Children: fields, Kind: ObjectValue, Position: pos} +} + +func (p *parser) parseObjectField(isConst bool) *ChildValue { + field := ChildValue{} + field.Position = p.peekPos() + field.Name = p.parseName() + + p.expect(lexer.Colon) + + field.Value = p.parseValueLiteral(isConst) + return &field +} + +func (p *parser) parseDirectives(isConst bool) []*Directive { + var directives []*Directive + + for p.peek().Kind == lexer.At { + if p.err != nil { + break + } + directives = append(directives, p.parseDirective(isConst)) + } + return directives +} + +func (p *parser) parseDirective(isConst bool) *Directive { + p.expect(lexer.At) + + return &Directive{ + Position: p.peekPos(), + Name: p.parseName(), + Arguments: p.parseArguments(isConst), + } +} + +func (p *parser) parseTypeReference() *Type { + var typ Type + + if p.skip(lexer.BracketL) { + typ.Position = p.peekPos() + typ.Elem = p.parseTypeReference() + p.expect(lexer.BracketR) + } else { + typ.Position = p.peekPos() + typ.NamedType = p.parseName() + } + + if p.skip(lexer.Bang) { + typ.NonNull = true + } + return &typ +} + +func (p *parser) parseName() string { + token := p.expect(lexer.Name) + + return token.Value +} diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml new file mode 100644 index 00000000..a46a01e7 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml @@ -0,0 +1,544 @@ +parser provides useful errors: + - name: unclosed paren + input: '{' + error: + message: "Expected Name, found " + locations: [{line: 1, column: 2}] + + - name: missing on in fragment + input: | + { ...MissingOn } + fragment MissingOn Type + error: + message: 'Expected "on", found Name "Type"' + locations: [{ line: 2, column: 20 }] + + - name: missing name after alias + input: '{ field: {} }' + error: + message: "Expected Name, found {" + locations: [{ line: 1, column: 10 }] + + - name: not an operation + input: 'notanoperation Foo { field }' + error: + message: 'Unexpected Name "notanoperation"' + locations: [{ line: 1, column: 1 }] + + - name: a wild splat appears + input: '...' + error: + message: 'Unexpected ...' + locations: [{ line: 1, column: 1}] + +variables: + - name: are allowed in args + input: '{ field(complex: { a: { b: [ $var ] } }) }' + + - name: are not allowed in default args + input: 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }' + error: + message: 'Unexpected $' + locations: [{ line: 1, column: 37 }] + + - name: can have directives + input: 'query ($withDirective: String @first @second, $withoutDirective: String) { f }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + VariableDefinitions: [VariableDefinition] + - + Variable: "withDirective" + Type: String + Directives: [Directive] + - + Name: "first" + - + Name: "second" + - + Variable: "withoutDirective" + Type: String + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + +fragments: + - name: can not be named 'on' + input: 'fragment on on on { on }' + error: + message: 'Unexpected Name "on"' + locations: [{ line: 1, column: 10 }] + + - name: can not spread fragments called 'on' + input: '{ ...on }' + error: + message: 'Expected Name, found }' + locations: [{ line: 1, column: 9 }] + +encoding: + - name: multibyte characters are supported + input: | + # This comment has a ਊ multi-byte character. + { field(arg: "Has a ਊ multi-byte character.") } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "field" + Name: "field" + Arguments: [Argument] + - + Name: "arg" + Value: "Has a ਊ multi-byte character." + +keywords are allowed anywhere a name is: + - name: on + input: | + query on { + ... a + ... on on { field } + } + fragment a on Type { + on(on: $on) + @on(on: on) + } + + - name: subscription + input: | + query subscription { + ... subscription + ... on subscription { field } + } + fragment subscription on Type { + subscription(subscription: $subscription) + @subscription(subscription: subscription) + } + + - name: true + input: | + query true { + ... true + ... on true { field } + } + fragment true on Type { + true(true: $true) + @true(true: true) + } + +operations: + - name: anonymous mutation + input: 'mutation { mutationField }' + + - name: named mutation + input: 'mutation Foo { mutationField }' + + - name: anonymous subscription + input: 'subscription { subscriptionField }' + + - name: named subscription + input: 'subscription Foo { subscriptionField }' + + +ast: + - name: simple query + input: | + { + node(id: 4) { + id, + name + } + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "node" + Name: "node" + Arguments: [Argument] + - + Name: "id" + Value: 4 + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Alias: "name" + Name: "name" + + - name: nameless query with no variables + input: | + query { + node { + id + } + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "node" + Name: "node" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + + - name: fragment defined variables + input: 'fragment a($v: Boolean = false) on t { f(v: $v) }' + ast: | + + Fragments: [FragmentDefinition] + - + Name: "a" + VariableDefinition: [VariableDefinition] + - + Variable: "v" + Type: Boolean + DefaultValue: false + TypeCondition: "t" + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "v" + Value: $v + + +values: + - name: null + input: '{ f(id: null) }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "id" + Value: null + + - name: strings + input: '{ f(long: """long""", short: "short") } ' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "long" + Value: "long" + - + Name: "short" + Value: "short" + + - name: list + input: '{ f(id: [1,2]) }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "id" + Value: [1,2] + +types: + - name: common types + input: 'query ($string: String, $int: Int, $arr: [Arr], $notnull: [Arr!]!) { f }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + VariableDefinitions: [VariableDefinition] + - + Variable: "string" + Type: String + - + Variable: "int" + Type: Int + - + Variable: "arr" + Type: [Arr] + - + Variable: "notnull" + Type: [Arr!]! + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + +large queries: + - name: kitchen sink + input: | + # Copyright (c) 2015-present, Facebook, Inc. + # + # This source code is licensed under the MIT license found in the + # LICENSE file in the root directory of this source tree. + + query queryName($foo: ComplexType, $site: Site = MOBILE) { + whoever123is: node(id: [123, 456]) { + id , + ... on User @defer { + field2 { + id , + alias: field1(first:10, after:$foo,) @include(if: $foo) { + id, + ...frag + } + } + } + ... @skip(unless: $foo) { + id + } + ... { + id + } + } + } + + mutation likeStory { + like(story: 123) @defer { + story { + id + } + } + } + + subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) { + storyLikeSubscribe(input: $input) { + story { + likers { + count + } + likeSentence { + text + } + } + } + } + + fragment frag on Friend { + foo(size: $size, bar: $b, obj: {key: "value", block: """ + block string uses \""" + """}) + } + + { + unnamed(truthy: true, falsey: false, nullish: null), + query + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + Name: "queryName" + VariableDefinitions: [VariableDefinition] + - + Variable: "foo" + Type: ComplexType + - + Variable: "site" + Type: Site + DefaultValue: MOBILE + SelectionSet: [Selection] + - + Alias: "whoever123is" + Name: "node" + Arguments: [Argument] + - + Name: "id" + Value: [123,456] + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + TypeCondition: "User" + Directives: [Directive] + - + Name: "defer" + SelectionSet: [Selection] + - + Alias: "field2" + Name: "field2" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Alias: "alias" + Name: "field1" + Arguments: [Argument] + - + Name: "first" + Value: 10 + - + Name: "after" + Value: $foo + Directives: [Directive] + - + Name: "include" + Arguments: [Argument] + - + Name: "if" + Value: $foo + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Name: "frag" + - + Directives: [Directive] + - + Name: "skip" + Arguments: [Argument] + - + Name: "unless" + Value: $foo + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Operation: Operation("mutation") + Name: "likeStory" + SelectionSet: [Selection] + - + Alias: "like" + Name: "like" + Arguments: [Argument] + - + Name: "story" + Value: 123 + Directives: [Directive] + - + Name: "defer" + SelectionSet: [Selection] + - + Alias: "story" + Name: "story" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Operation: Operation("subscription") + Name: "StoryLikeSubscription" + VariableDefinitions: [VariableDefinition] + - + Variable: "input" + Type: StoryLikeSubscribeInput + SelectionSet: [Selection] + - + Alias: "storyLikeSubscribe" + Name: "storyLikeSubscribe" + Arguments: [Argument] + - + Name: "input" + Value: $input + SelectionSet: [Selection] + - + Alias: "story" + Name: "story" + SelectionSet: [Selection] + - + Alias: "likers" + Name: "likers" + SelectionSet: [Selection] + - + Alias: "count" + Name: "count" + - + Alias: "likeSentence" + Name: "likeSentence" + SelectionSet: [Selection] + - + Alias: "text" + Name: "text" + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "unnamed" + Name: "unnamed" + Arguments: [Argument] + - + Name: "truthy" + Value: true + - + Name: "falsey" + Value: false + - + Name: "nullish" + Value: null + - + Alias: "query" + Name: "query" + Fragments: [FragmentDefinition] + - + Name: "frag" + TypeCondition: "Friend" + SelectionSet: [Selection] + - + Alias: "foo" + Name: "foo" + Arguments: [Argument] + - + Name: "size" + Value: $size + - + Name: "bar" + Value: $b + - + Name: "obj" + Value: {key:"value",block:"block string uses \"\"\""} + +fuzzer: +- name: 01 + input: '{__typename{...}}' + error: + message: 'Expected {, found }' + locations: [{ line: 1, column: 16 }] + +- name: 02 + input: '{...{__typename{...{}}}}' + error: + message: 'expected at least one definition, found }' + locations: [{ line: 1, column: 21 }] diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/schema.go b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go new file mode 100644 index 00000000..aec08a97 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go @@ -0,0 +1,534 @@ +package parser + +import ( + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/lexer" +) + +func ParseSchema(source *Source) (*SchemaDocument, error) { + p := parser{ + lexer: lexer.New(source), + } + ast, err := p.parseSchemaDocument(), p.err + if err != nil { + return nil, err + } + + for _, def := range ast.Definitions { + def.BuiltIn = source.BuiltIn + } + for _, def := range ast.Extensions { + def.BuiltIn = source.BuiltIn + } + + return ast, nil +} + +func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { + ast := &SchemaDocument{} + for _, input := range inputs { + inputAst, err := ParseSchema(input) + if err != nil { + return nil, err + } + ast.Merge(inputAst) + } + return ast, nil +} + +func (p *parser) parseSchemaDocument() *SchemaDocument { + var doc SchemaDocument + doc.Position = p.peekPos() + for p.peek().Kind != lexer.EOF { + if p.err != nil { + return nil + } + + var description string + if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String { + description = p.parseDescription() + } + + if p.peek().Kind != lexer.Name { + p.unexpectedError() + break + } + + switch p.peek().Value { + case "scalar", "type", "interface", "union", "enum", "input": + doc.Definitions = append(doc.Definitions, p.parseTypeSystemDefinition(description)) + case "schema": + doc.Schema = append(doc.Schema, p.parseSchemaDefinition(description)) + case "directive": + doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description)) + case "extend": + if description != "" { + p.unexpectedToken(p.prev) + } + p.parseTypeSystemExtension(&doc) + default: + p.unexpectedError() + return nil + } + } + + return &doc +} + +func (p *parser) parseDescription() string { + token := p.peek() + + if token.Kind != lexer.BlockString && token.Kind != lexer.String { + return "" + } + + return p.next().Value +} + +func (p *parser) parseTypeSystemDefinition(description string) *Definition { + tok := p.peek() + if tok.Kind != lexer.Name { + p.unexpectedError() + return nil + } + + switch tok.Value { + case "scalar": + return p.parseScalarTypeDefinition(description) + case "type": + return p.parseObjectTypeDefinition(description) + case "interface": + return p.parseInterfaceTypeDefinition(description) + case "union": + return p.parseUnionTypeDefinition(description) + case "enum": + return p.parseEnumTypeDefinition(description) + case "input": + return p.parseInputObjectTypeDefinition(description) + default: + p.unexpectedError() + return nil + } +} + +func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { + p.expectKeyword("schema") + + def := SchemaDefinition{Description: description} + def.Position = p.peekPos() + def.Description = description + def.Directives = p.parseDirectives(true) + + p.some(lexer.BraceL, lexer.BraceR, func() { + def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) + }) + return &def +} + +func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition { + var op OperationTypeDefinition + op.Position = p.peekPos() + op.Operation = p.parseOperationType() + p.expect(lexer.Colon) + op.Type = p.parseName() + return &op +} + +func (p *parser) parseScalarTypeDefinition(description string) *Definition { + p.expectKeyword("scalar") + + var def Definition + def.Position = p.peekPos() + def.Kind = Scalar + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseObjectTypeDefinition(description string) *Definition { + p.expectKeyword("type") + + var def Definition + def.Position = p.peekPos() + def.Kind = Object + def.Description = description + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + return &def +} + +func (p *parser) parseImplementsInterfaces() []string { + var types []string + if p.peek().Value == "implements" { + p.next() + // optional leading ampersand + p.skip(lexer.Amp) + + types = append(types, p.parseName()) + for p.skip(lexer.Amp) && p.err == nil { + types = append(types, p.parseName()) + } + } + return types +} + +func (p *parser) parseFieldsDefinition() FieldList { + var defs FieldList + p.some(lexer.BraceL, lexer.BraceR, func() { + defs = append(defs, p.parseFieldDefinition()) + }) + return defs +} + +func (p *parser) parseFieldDefinition() *FieldDefinition { + var def FieldDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + def.Arguments = p.parseArgumentDefs() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + def.Directives = p.parseDirectives(true) + + return &def +} + +func (p *parser) parseArgumentDefs() ArgumentDefinitionList { + var args ArgumentDefinitionList + p.some(lexer.ParenL, lexer.ParenR, func() { + args = append(args, p.parseArgumentDef()) + }) + return args +} + +func (p *parser) parseArgumentDef() *ArgumentDefinition { + var def ArgumentDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseInputValueDef() *FieldDefinition { + var def FieldDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseInterfaceTypeDefinition(description string) *Definition { + p.expectKeyword("interface") + + var def Definition + def.Position = p.peekPos() + def.Kind = Interface + def.Description = description + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + return &def +} + +func (p *parser) parseUnionTypeDefinition(description string) *Definition { + p.expectKeyword("union") + + var def Definition + def.Position = p.peekPos() + def.Kind = Union + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Types = p.parseUnionMemberTypes() + return &def +} + +func (p *parser) parseUnionMemberTypes() []string { + var types []string + if p.skip(lexer.Equals) { + // optional leading pipe + p.skip(lexer.Pipe) + + types = append(types, p.parseName()) + for p.skip(lexer.Pipe) && p.err == nil { + types = append(types, p.parseName()) + } + } + return types +} + +func (p *parser) parseEnumTypeDefinition(description string) *Definition { + p.expectKeyword("enum") + + var def Definition + def.Position = p.peekPos() + def.Kind = Enum + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.EnumValues = p.parseEnumValuesDefinition() + return &def +} + +func (p *parser) parseEnumValuesDefinition() EnumValueList { + var values EnumValueList + p.some(lexer.BraceL, lexer.BraceR, func() { + values = append(values, p.parseEnumValueDefinition()) + }) + return values +} + +func (p *parser) parseEnumValueDefinition() *EnumValueDefinition { + return &EnumValueDefinition{ + Position: p.peekPos(), + Description: p.parseDescription(), + Name: p.parseName(), + Directives: p.parseDirectives(true), + } +} + +func (p *parser) parseInputObjectTypeDefinition(description string) *Definition { + p.expectKeyword("input") + + var def Definition + def.Position = p.peekPos() + def.Kind = InputObject + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseInputFieldsDefinition() + return &def +} + +func (p *parser) parseInputFieldsDefinition() FieldList { + var values FieldList + p.some(lexer.BraceL, lexer.BraceR, func() { + values = append(values, p.parseInputValueDef()) + }) + return values +} + +func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) { + p.expectKeyword("extend") + + switch p.peek().Value { + case "schema": + doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension()) + case "scalar": + doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension()) + case "type": + doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension()) + case "interface": + doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension()) + case "union": + doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension()) + case "enum": + doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension()) + case "input": + doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension()) + default: + p.unexpectedError() + } +} + +func (p *parser) parseSchemaExtension() *SchemaDefinition { + p.expectKeyword("schema") + + var def SchemaDefinition + def.Position = p.peekPos() + def.Directives = p.parseDirectives(true) + p.some(lexer.BraceL, lexer.BraceR, func() { + def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) + }) + if len(def.Directives) == 0 && len(def.OperationTypes) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseScalarTypeExtension() *Definition { + p.expectKeyword("scalar") + + var def Definition + def.Position = p.peekPos() + def.Kind = Scalar + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + if len(def.Directives) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseObjectTypeExtension() *Definition { + p.expectKeyword("type") + + var def Definition + def.Position = p.peekPos() + def.Kind = Object + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseInterfaceTypeExtension() *Definition { + p.expectKeyword("interface") + + var def Definition + def.Position = p.peekPos() + def.Kind = Interface + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + if len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseUnionTypeExtension() *Definition { + p.expectKeyword("union") + + var def Definition + def.Position = p.peekPos() + def.Kind = Union + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Types = p.parseUnionMemberTypes() + + if len(def.Directives) == 0 && len(def.Types) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseEnumTypeExtension() *Definition { + p.expectKeyword("enum") + + var def Definition + def.Position = p.peekPos() + def.Kind = Enum + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.EnumValues = p.parseEnumValuesDefinition() + if len(def.Directives) == 0 && len(def.EnumValues) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseInputObjectTypeExtension() *Definition { + p.expectKeyword("input") + + var def Definition + def.Position = p.peekPos() + def.Kind = InputObject + def.Name = p.parseName() + def.Directives = p.parseDirectives(false) + def.Fields = p.parseInputFieldsDefinition() + if len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition { + p.expectKeyword("directive") + p.expect(lexer.At) + + var def DirectiveDefinition + def.Position = p.peekPos() + def.Description = description + def.Name = p.parseName() + def.Arguments = p.parseArgumentDefs() + + if peek := p.peek(); peek.Kind == lexer.Name && peek.Value == "repeatable" { + def.IsRepeatable = true + p.skip(lexer.Name) + } + + p.expectKeyword("on") + def.Locations = p.parseDirectiveLocations() + return &def +} + +func (p *parser) parseDirectiveLocations() []DirectiveLocation { + p.skip(lexer.Pipe) + + locations := []DirectiveLocation{p.parseDirectiveLocation()} + + for p.skip(lexer.Pipe) && p.err == nil { + locations = append(locations, p.parseDirectiveLocation()) + } + + return locations +} + +func (p *parser) parseDirectiveLocation() DirectiveLocation { + name := p.expect(lexer.Name) + + switch name.Value { + case `QUERY`: + return LocationQuery + case `MUTATION`: + return LocationMutation + case `SUBSCRIPTION`: + return LocationSubscription + case `FIELD`: + return LocationField + case `FRAGMENT_DEFINITION`: + return LocationFragmentDefinition + case `FRAGMENT_SPREAD`: + return LocationFragmentSpread + case `INLINE_FRAGMENT`: + return LocationInlineFragment + case `VARIABLE_DEFINITION`: + return LocationVariableDefinition + case `SCHEMA`: + return LocationSchema + case `SCALAR`: + return LocationScalar + case `OBJECT`: + return LocationObject + case `FIELD_DEFINITION`: + return LocationFieldDefinition + case `ARGUMENT_DEFINITION`: + return LocationArgumentDefinition + case `INTERFACE`: + return LocationInterface + case `UNION`: + return LocationUnion + case `ENUM`: + return LocationEnum + case `ENUM_VALUE`: + return LocationEnumValue + case `INPUT_OBJECT`: + return LocationInputObject + case `INPUT_FIELD_DEFINITION`: + return LocationInputFieldDefinition + } + + p.unexpectedToken(name) + return "" +} diff --git a/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml new file mode 100644 index 00000000..8b6a5d0c --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml @@ -0,0 +1,646 @@ +object types: + - name: simple + input: | + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: with description + input: | + "Description" + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "Description" + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: with block description + input: | + """ + Description + """ + # Even with comments between them + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "Description" + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + - name: with field arg + input: | + type Hello { + world(flag: Boolean): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "flag" + Type: Boolean + Type: String + + - name: with field arg and default value + input: | + type Hello { + world(flag: Boolean = true): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "flag" + DefaultValue: true + Type: Boolean + Type: String + + - name: with field list arg + input: | + type Hello { + world(things: [String]): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "things" + Type: [String] + Type: String + + - name: with two args + input: | + type Hello { + world(argOne: Boolean, argTwo: Int): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "argOne" + Type: Boolean + - + Name: "argTwo" + Type: Int + Type: String + - name: must define one or more fields + input: | + type Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 13 }] + +type extensions: + - name: Object extension + input: | + extend type Hello { + world: String + } + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: without any fields + input: "extend type Hello implements Greeting" + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Greeting" + + - name: without fields twice + input: | + extend type Hello implements Greeting + extend type Hello implements SecondGreeting + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Greeting" + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "SecondGreeting" + + - name: without anything errors + input: "extend type Hello" + error: + message: "Unexpected " + locations: [{ line: 1, column: 18 }] + + - name: can have descriptions # hmm, this might not be spec compliant... + input: | + "Description" + extend type Hello { + world: String + } + error: + message: 'Unexpected String "Description"' + locations: [{ line: 1, column: 2 }] + + - name: can not have descriptions on types + input: | + extend "Description" type Hello { + world: String + } + error: + message: Unexpected String "Description" + locations: [{ line: 1, column: 9 }] + + - name: all can have directives + input: | + extend scalar Foo @deprecated + extend type Foo @deprecated + extend interface Foo @deprecated + extend union Foo @deprecated + extend enum Foo @deprecated + extend input Foo @deprecated + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("SCALAR") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("OBJECT") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("INTERFACE") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("UNION") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("ENUM") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("INPUT_OBJECT") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + +schema definition: + - name: simple + input: | + schema { + query: Query + } + ast: | + + Schema: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("query") + Type: "Query" + +schema extensions: + - name: simple + input: | + extend schema { + mutation: Mutation + } + ast: | + + SchemaExtension: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("mutation") + Type: "Mutation" + + - name: directive only + input: "extend schema @directive" + ast: | + + SchemaExtension: [SchemaDefinition] + - + Directives: [Directive] + - + Name: "directive" + + - name: without anything errors + input: "extend schema" + error: + message: "Unexpected " + locations: [{ line: 1, column: 14}] + +inheritance: + - name: single + input: "type Hello implements World { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "World" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + + - name: multi + input: "type Hello implements Wo & rld { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Wo" + - "rld" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + + - name: multi with leading amp + input: "type Hello implements & Wo & rld { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Wo" + - "rld" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + +enums: + - name: single value + input: "enum Hello { WORLD }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("ENUM") + Name: "Hello" + EnumValues: [EnumValueDefinition] + - + Name: "WORLD" + + - name: double value + input: "enum Hello { WO, RLD }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("ENUM") + Name: "Hello" + EnumValues: [EnumValueDefinition] + - + Name: "WO" + - + Name: "RLD" + - name: must define one or more unique enum values + input: | + enum Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 13 }] + +interface: + - name: simple + input: | + interface Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INTERFACE") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + - name: must define one or more fields + input: | + interface Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 18 }] + + - name: may define intermediate interfaces + input: | + interface IA { + id: ID! + } + + interface IIA implements IA { + id: ID! + } + + type A implements IIA { + id: ID! + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INTERFACE") + Name: "IA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + - + Kind: DefinitionKind("INTERFACE") + Name: "IIA" + Interfaces: [string] + - "IA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + - + Kind: DefinitionKind("OBJECT") + Name: "A" + Interfaces: [string] + - "IIA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + +unions: + - name: simple + input: "union Hello = World" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "World" + + - name: with two types + input: "union Hello = Wo | Rld" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "Wo" + - "Rld" + + - name: with leading pipe + input: "union Hello = | Wo | Rld" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "Wo" + - "Rld" + + - name: cant be empty + input: "union Hello = || Wo | Rld" + error: + message: "Expected Name, found |" + locations: [{ line: 1, column: 16 }] + + - name: cant double pipe + input: "union Hello = Wo || Rld" + error: + message: "Expected Name, found |" + locations: [{ line: 1, column: 19 }] + + - name: cant have trailing pipe + input: "union Hello = | Wo | Rld |" + error: + message: "Expected Name, found " + locations: [{ line: 1, column: 27 }] + +scalar: + - name: simple + input: "scalar Hello" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("SCALAR") + Name: "Hello" + +input object: + - name: simple + input: | + input Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INPUT_OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: can not have args + input: | + input Hello { + world(foo: Int): String + } + error: + message: "Expected :, found (" + locations: [{ line: 2, column: 8 }] + - name: must define one or more input fields + input: | + input Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 14 }] + +directives: + - name: simple + input: directive @foo on FIELD + ast: | + + Directives: [DirectiveDefinition] + - + Name: "foo" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: false + + - name: executable + input: | + directive @onQuery on QUERY + directive @onMutation on MUTATION + directive @onSubscription on SUBSCRIPTION + directive @onField on FIELD + directive @onFragmentDefinition on FRAGMENT_DEFINITION + directive @onFragmentSpread on FRAGMENT_SPREAD + directive @onInlineFragment on INLINE_FRAGMENT + directive @onVariableDefinition on VARIABLE_DEFINITION + ast: | + + Directives: [DirectiveDefinition] + - + Name: "onQuery" + Locations: [DirectiveLocation] + - DirectiveLocation("QUERY") + IsRepeatable: false + - + Name: "onMutation" + Locations: [DirectiveLocation] + - DirectiveLocation("MUTATION") + IsRepeatable: false + - + Name: "onSubscription" + Locations: [DirectiveLocation] + - DirectiveLocation("SUBSCRIPTION") + IsRepeatable: false + - + Name: "onField" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: false + - + Name: "onFragmentDefinition" + Locations: [DirectiveLocation] + - DirectiveLocation("FRAGMENT_DEFINITION") + IsRepeatable: false + - + Name: "onFragmentSpread" + Locations: [DirectiveLocation] + - DirectiveLocation("FRAGMENT_SPREAD") + IsRepeatable: false + - + Name: "onInlineFragment" + Locations: [DirectiveLocation] + - DirectiveLocation("INLINE_FRAGMENT") + IsRepeatable: false + - + Name: "onVariableDefinition" + Locations: [DirectiveLocation] + - DirectiveLocation("VARIABLE_DEFINITION") + IsRepeatable: false + + - name: repeatable + input: directive @foo repeatable on FIELD + ast: | + + Directives: [DirectiveDefinition] + - + Name: "foo" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: true + + - name: invalid location + input: "directive @foo on FIELD | INCORRECT_LOCATION" + error: + message: 'Unexpected Name "INCORRECT_LOCATION"' + locations: [{ line: 1, column: 27 }] + +fuzzer: + - name: 1 + input: "type o{d(g:[" + error: + message: 'Expected Name, found ' + locations: [{ line: 1, column: 13 }] + - name: 2 + input: "\"\"\"\r" + error: + message: 'Unexpected ' + locations: [{ line: 2, column: 1 }] diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/error.go b/vendor/github.com/vektah/gqlparser/v2/validator/error.go new file mode 100644 index 00000000..f8f76055 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/error.go @@ -0,0 +1,55 @@ +package validator + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type ErrorOption func(err *gqlerror.Error) + +func Message(msg string, args ...interface{}) ErrorOption { + return func(err *gqlerror.Error) { + err.Message += fmt.Sprintf(msg, args...) + } +} + +func At(position *ast.Position) ErrorOption { + return func(err *gqlerror.Error) { + if position == nil { + return + } + err.Locations = append(err.Locations, gqlerror.Location{ + Line: position.Line, + Column: position.Column, + }) + if position.Src.Name != "" { + err.SetFile(position.Src.Name) + } + } +} + +func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption { + suggested := SuggestionList(typed, suggestions) + return func(err *gqlerror.Error) { + if len(suggested) > 0 { + err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?" + } + } +} + +func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption { + suggested := SuggestionList(typed, suggestions) + return func(err *gqlerror.Error) { + if len(suggested) > 0 { + err.Message += " " + prefix + " " + OrList(suggested...) + "?" + } + } +} + +func Suggestf(suggestion string, args ...interface{}) ErrorOption { + return func(err *gqlerror.Error) { + err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?" + } +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go b/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go new file mode 100644 index 00000000..f1ab5873 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go @@ -0,0 +1,39 @@ +package validator + +import "bytes" + +// Given [ A, B, C ] return '"A", "B", or "C"'. +func QuotedOrList(items ...string) string { + itemsQuoted := make([]string, len(items)) + for i, item := range items { + itemsQuoted[i] = `"` + item + `"` + } + return OrList(itemsQuoted...) +} + +// Given [ A, B, C ] return 'A, B, or C'. +func OrList(items ...string) string { + var buf bytes.Buffer + + if len(items) > 5 { + items = items[:5] + } + if len(items) == 2 { + buf.WriteString(items[0]) + buf.WriteString(" or ") + buf.WriteString(items[1]) + return buf.String() + } + + for i, item := range items { + if i != 0 { + if i == len(items)-1 { + buf.WriteString(", or ") + } else { + buf.WriteString(", ") + } + } + buf.WriteString(item) + } + return buf.String() +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go new file mode 100644 index 00000000..c354ec0d --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go @@ -0,0 +1,15 @@ +package validator + +import ( + _ "embed" + "github.com/vektah/gqlparser/v2/ast" +) + +//go:embed prelude.graphql +var preludeGraphql string + +var Prelude = &ast.Source{ + Name: "prelude.graphql", + Input: preludeGraphql, + BuiltIn: true, +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql new file mode 100644 index 00000000..bdca0096 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql @@ -0,0 +1,121 @@ +# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema + +"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1." +scalar Int + +"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point)." +scalar Float + +"The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text." +scalar String + +"The `Boolean` scalar type represents `true` or `false`." +scalar Boolean + +"""The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.""" +scalar ID + +"The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument." +directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument." +directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"The @deprecated built-in directive is used within the type system definition language to indicate deprecated portions of a GraphQL service's schema, such as deprecated fields on a type, arguments on a field, input fields on an input type, or values of an enum type." +directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE + +"The @specifiedBy built-in directive is used within the type system definition language to provide a scalar specification URL for specifying the behavior of custom scalar types." +directive @specifiedBy(url: String!) on SCALAR + +type __Schema { + description: String + types: [__Type!]! + queryType: __Type! + mutationType: __Type + subscriptionType: __Type + directives: [__Directive!]! +} + +type __Type { + kind: __TypeKind! + name: String + description: String + # must be non-null for OBJECT and INTERFACE, otherwise null. + fields(includeDeprecated: Boolean = false): [__Field!] + # must be non-null for OBJECT and INTERFACE, otherwise null. + interfaces: [__Type!] + # must be non-null for INTERFACE and UNION, otherwise null. + possibleTypes: [__Type!] + # must be non-null for ENUM, otherwise null. + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + # must be non-null for INPUT_OBJECT, otherwise null. + inputFields: [__InputValue!] + # must be non-null for NON_NULL and LIST, otherwise null. + ofType: __Type + # may be non-null for custom SCALAR, otherwise null. + specifiedByURL: String +} + +type __Field { + name: String! + description: String + args: [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String +} + +type __InputValue { + name: String! + description: String + type: __Type! + defaultValue: String +} + +type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String +} + +enum __TypeKind { + SCALAR + OBJECT + INTERFACE + UNION + ENUM + INPUT_OBJECT + LIST + NON_NULL +} + +type __Directive { + name: String! + description: String + locations: [__DirectiveLocation!]! + args: [__InputValue!]! + isRepeatable: Boolean! +} + +enum __DirectiveLocation { + QUERY + MUTATION + SUBSCRIPTION + FIELD + FRAGMENT_DEFINITION + FRAGMENT_SPREAD + INLINE_FRAGMENT + VARIABLE_DEFINITION + SCHEMA + SCALAR + OBJECT + FIELD_DEFINITION + ARGUMENT_DEFINITION + INTERFACE + UNION + ENUM + ENUM_VALUE + INPUT_OBJECT + INPUT_FIELD_DEFINITION +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go new file mode 100644 index 00000000..aa83c696 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go @@ -0,0 +1,94 @@ +package validator + +import ( + "fmt" + "sort" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.ObjectDefinition == nil || field.Definition != nil { + return + } + + message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name) + + if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil { + message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?" + } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil { + message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?" + } + + addError( + Message(message), + At(field.Position), + ) + }) + }) +} + +// Go through all of the implementations of type, as well as the interfaces +// that they implement. If any of those types include the provided field, +// suggest them, sorted by how often the type is referenced, starting +// with Interfaces. +func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string { + if !parent.IsAbstractType() { + return nil + } + + var suggestedObjectTypes []string + var suggestedInterfaceTypes []string + interfaceUsageCount := map[string]int{} + + for _, possibleType := range walker.Schema.GetPossibleTypes(parent) { + field := possibleType.Fields.ForName(name) + if field == nil { + continue + } + + suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name) + + for _, possibleInterface := range possibleType.Interfaces { + interfaceField := walker.Schema.Types[possibleInterface] + if interfaceField != nil && interfaceField.Fields.ForName(name) != nil { + if interfaceUsageCount[possibleInterface] == 0 { + suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface) + } + interfaceUsageCount[possibleInterface]++ + } + } + } + + suggestedTypes := append(suggestedInterfaceTypes, suggestedObjectTypes...) + + sort.SliceStable(suggestedTypes, func(i, j int) bool { + typeA, typeB := suggestedTypes[i], suggestedTypes[j] + diff := interfaceUsageCount[typeB] - interfaceUsageCount[typeA] + if diff != 0 { + return diff < 0 + } + return strings.Compare(typeA, typeB) < 0 + }) + + return suggestedTypes +} + +// For the field name provided, determine if there are any similar field names +// that may be the result of a typo. +func getSuggestedFieldNames(parent *ast.Definition, name string) []string { + if parent.Kind != ast.Object && parent.Kind != ast.Interface { + return nil + } + + var possibleFieldNames []string + for _, field := range parent.Fields { + possibleFieldNames = append(possibleFieldNames, field.Name) + } + + return SuggestionList(name, possibleFieldNames) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go new file mode 100644 index 00000000..5215f697 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go @@ -0,0 +1,39 @@ +package validator + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) { + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + fragmentType := walker.Schema.Types[inlineFragment.TypeCondition] + if fragmentType == nil || fragmentType.IsCompositeType() { + return + } + + message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition) + + addError( + Message(message), + At(inlineFragment.Position), + ) + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() { + return + } + + message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition) + + addError( + Message(message), + At(fragment.Position), + ) + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go new file mode 100644 index 00000000..da5a7962 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go @@ -0,0 +1,57 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) { + // A GraphQL field is only valid if all supplied arguments are defined by that field. + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil || field.ObjectDefinition == nil { + return + } + for _, arg := range field.Arguments { + def := field.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + var suggestions []string + for _, argDef := range field.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + + addError( + Message(`Unknown argument "%s" on field "%s.%s".`, arg.Name, field.ObjectDefinition.Name, field.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(field.Position), + ) + } + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + return + } + for _, arg := range directive.Arguments { + def := directive.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + var suggestions []string + for _, argDef := range directive.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + + addError( + Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(directive.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go new file mode 100644 index 00000000..18fe41fd --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go @@ -0,0 +1,47 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) { + type mayNotBeUsedDirective struct { + Name string + Line int + Column int + } + var seen map[mayNotBeUsedDirective]bool = map[mayNotBeUsedDirective]bool{} + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + addError( + Message(`Unknown directive "@%s".`, directive.Name), + At(directive.Position), + ) + return + } + + for _, loc := range directive.Definition.Locations { + if loc == directive.Location { + return + } + } + + // position must be exists if directive.Definition != nil + tmp := mayNotBeUsedDirective{ + Name: directive.Name, + Line: directive.Position.Line, + Column: directive.Position.Column, + } + + if !seen[tmp] { + addError( + Message(`Directive "@%s" may not be used on %s.`, directive.Name, directive.Location), + At(directive.Position), + ) + seen[tmp] = true + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go new file mode 100644 index 00000000..b7427d0d --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go @@ -0,0 +1,19 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) { + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if fragmentSpread.Definition == nil { + addError( + Message(`Unknown fragment "%s".`, fragmentSpread.Name), + At(fragmentSpread.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go new file mode 100644 index 00000000..4c60c8d8 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go @@ -0,0 +1,35 @@ +package validator + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownRootType", func(observers *Events, addError AddErrFunc) { + // A query's root must be a valid type. Surprisingly, this isn't + // checked anywhere else! + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + var def *ast.Definition + switch operation.Operation { + case ast.Query, "": + def = walker.Schema.Query + case ast.Mutation: + def = walker.Schema.Mutation + case ast.Subscription: + def = walker.Schema.Subscription + default: + // This shouldn't even parse; if it did we probably need to + // update this switch block to add the new operation type. + panic(fmt.Sprintf(`got unknown operation type "%s"`, operation.Operation)) + } + if def == nil { + addError( + Message(`Schema does not support operation type "%s"`, operation.Operation), + At(operation.Position)) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go new file mode 100644 index 00000000..7abfbf62 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go @@ -0,0 +1,59 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) { + observers.OnVariable(func(walker *Walker, variable *ast.VariableDefinition) { + typeName := variable.Type.Name() + typdef := walker.Schema.Types[typeName] + if typdef != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typeName), + At(variable.Position), + ) + }) + + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + typedName := inlineFragment.TypeCondition + if typedName == "" { + return + } + + def := walker.Schema.Types[typedName] + if def != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typedName), + At(inlineFragment.Position), + ) + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + typeName := fragment.TypeCondition + def := walker.Schema.Types[typeName] + if def != nil { + return + } + + var possibleTypes []string + for _, t := range walker.Schema.Types { + possibleTypes = append(possibleTypes, t.Name) + } + + addError( + Message(`Unknown type "%s".`, typeName), + SuggestListQuoted("Did you mean", typeName, possibleTypes), + At(fragment.Position), + ) + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go new file mode 100644 index 00000000..d2752854 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go @@ -0,0 +1,19 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if operation.Name == "" && len(walker.Document.Operations) > 1 { + addError( + Message(`This anonymous operation must be the only defined operation.`), + At(operation.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go new file mode 100644 index 00000000..da73f349 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go @@ -0,0 +1,93 @@ +package validator + +import ( + "fmt" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) { + visitedFrags := make(map[string]bool) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + var spreadPath []*ast.FragmentSpread + spreadPathIndexByName := make(map[string]int) + + var recursive func(fragment *ast.FragmentDefinition) + recursive = func(fragment *ast.FragmentDefinition) { + if visitedFrags[fragment.Name] { + return + } + + visitedFrags[fragment.Name] = true + + spreadNodes := getFragmentSpreads(fragment.SelectionSet) + if len(spreadNodes) == 0 { + return + } + spreadPathIndexByName[fragment.Name] = len(spreadPath) + + for _, spreadNode := range spreadNodes { + spreadName := spreadNode.Name + + cycleIndex, ok := spreadPathIndexByName[spreadName] + + spreadPath = append(spreadPath, spreadNode) + if !ok { + spreadFragment := walker.Document.Fragments.ForName(spreadName) + if spreadFragment != nil { + recursive(spreadFragment) + } + } else { + cyclePath := spreadPath[cycleIndex : len(spreadPath)-1] + var fragmentNames []string + for _, fs := range cyclePath { + fragmentNames = append(fragmentNames, fmt.Sprintf(`"%s"`, fs.Name)) + } + var via string + if len(fragmentNames) != 0 { + via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", ")) + } + addError( + Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via), + At(spreadNode.Position), + ) + } + + spreadPath = spreadPath[:len(spreadPath)-1] + } + + delete(spreadPathIndexByName, fragment.Name) + } + + recursive(fragment) + }) + }) +} + +func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread { + var spreads []*ast.FragmentSpread + + setsToVisit := []ast.SelectionSet{node} + + for len(setsToVisit) != 0 { + set := setsToVisit[len(setsToVisit)-1] + setsToVisit = setsToVisit[:len(setsToVisit)-1] + + for _, selection := range set { + switch selection := selection.(type) { + case *ast.FragmentSpread: + spreads = append(spreads, selection) + case *ast.Field: + setsToVisit = append(setsToVisit, selection.SelectionSet) + case *ast.InlineFragment: + setsToVisit = append(setsToVisit, selection.SelectionSet) + } + } + } + + return spreads +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go new file mode 100644 index 00000000..91df727a --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go @@ -0,0 +1,28 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil { + return + } + + if walker.CurrentOperation.Name != "" { + addError( + Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name), + At(value.Position), + ) + } else { + addError( + Message(`Variable "%s" is not defined.`, value), + At(value.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go new file mode 100644 index 00000000..dfc89672 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go @@ -0,0 +1,30 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) { + + inFragmentDefinition := false + fragmentNameUsed := make(map[string]bool) + + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if !inFragmentDefinition { + fragmentNameUsed[fragmentSpread.Name] = true + } + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + inFragmentDefinition = true + if !fragmentNameUsed[fragment.Name] { + addError( + Message(`Fragment "%s" is never used.`, fragment.Name), + At(fragment.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go new file mode 100644 index 00000000..df2e5f4b --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go @@ -0,0 +1,30 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, varDef := range operation.VariableDefinitions { + if varDef.Used { + continue + } + + if operation.Name != "" { + addError( + Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name), + At(varDef.Position), + ) + } else { + addError( + Message(`Variable "$%s" is never used.`, varDef.Variable), + At(varDef.Position), + ) + } + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go new file mode 100644 index 00000000..38e1efa1 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go @@ -0,0 +1,560 @@ +package validator + +import ( + "bytes" + "fmt" + "reflect" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + + AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) { + /** + * Algorithm: + * + * Conflicts occur when two fields exist in a query which will produce the same + * response name, but represent differing values, thus creating a conflict. + * The algorithm below finds all conflicts via making a series of comparisons + * between fields. In order to compare as few fields as possible, this makes + * a series of comparisons "within" sets of fields and "between" sets of fields. + * + * Given any selection set, a collection produces both a set of fields by + * also including all inline fragments, as well as a list of fragments + * referenced by fragment spreads. + * + * A) Each selection set represented in the document first compares "within" its + * collected set of fields, finding any conflicts between every pair of + * overlapping fields. + * Note: This is the *only time* that a the fields "within" a set are compared + * to each other. After this only fields "between" sets are compared. + * + * B) Also, if any fragment is referenced in a selection set, then a + * comparison is made "between" the original set of fields and the + * referenced fragment. + * + * C) Also, if multiple fragments are referenced, then comparisons + * are made "between" each referenced fragment. + * + * D) When comparing "between" a set of fields and a referenced fragment, first + * a comparison is made between each field in the original set of fields and + * each field in the the referenced set of fields. + * + * E) Also, if any fragment is referenced in the referenced selection set, + * then a comparison is made "between" the original set of fields and the + * referenced fragment (recursively referring to step D). + * + * F) When comparing "between" two fragments, first a comparison is made between + * each field in the first referenced set of fields and each field in the the + * second referenced set of fields. + * + * G) Also, any fragments referenced by the first must be compared to the + * second, and any fragments referenced by the second must be compared to the + * first (recursively referring to step F). + * + * H) When comparing two fields, if both have selection sets, then a comparison + * is made "between" both selection sets, first comparing the set of fields in + * the first selection set with the set of fields in the second. + * + * I) Also, if any fragment is referenced in either selection set, then a + * comparison is made "between" the other set of fields and the + * referenced fragment. + * + * J) Also, if two fragments are referenced in both selection sets, then a + * comparison is made "between" the two fragments. + * + */ + + m := &overlappingFieldsCanBeMergedManager{ + comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)}, + } + + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnField(func(walker *Walker, field *ast.Field) { + if walker.CurrentOperation == nil { + // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation + return + } + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + }) +} + +type pairSet struct { + data map[string]map[string]bool +} + +func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) { + add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) { + m := pairSet.data[a.Name] + if m == nil { + m = make(map[string]bool) + pairSet.data[a.Name] = m + } + m[b.Name] = areMutuallyExclusive + } + add(a, b) + add(b, a) +} + +func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool { + am, ok := pairSet.data[a.Name] + if !ok { + return false + } + result, ok := am[b.Name] + if !ok { + return false + } + + // areMutuallyExclusive being false is a superset of being true, + // hence if we want to know if this PairSet "has" these two with no + // exclusivity, we have to ensure it was added as such. + if !areMutuallyExclusive { + return !result + } + + return true +} + +type sequentialFieldsMap struct { + // We can't use map[string][]*ast.Field. because map is not stable... + seq []string + data map[string][]*ast.Field +} + +type fieldIterateEntry struct { + ResponseName string + Fields []*ast.Field +} + +func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) { + fields, ok := m.data[responseName] + if !ok { + m.seq = append(m.seq, responseName) + } + fields = append(fields, field) + m.data[responseName] = fields +} + +func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) { + fields, ok := m.data[responseName] + return fields, ok +} + +func (m *sequentialFieldsMap) Iterator() [][]*ast.Field { + fieldsList := make([][]*ast.Field, 0, len(m.seq)) + for _, responseName := range m.seq { + fields := m.data[responseName] + fieldsList = append(fieldsList, fields) + } + return fieldsList +} + +func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry { + fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq)) + for _, responseName := range m.seq { + fields := m.data[responseName] + fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{ + ResponseName: responseName, + Fields: fields, + }) + } + return fieldEntriesList +} + +type conflictMessageContainer struct { + Conflicts []*ConflictMessage +} + +type ConflictMessage struct { + Message string + ResponseName string + Names []string + SubMessage []*ConflictMessage + Position *ast.Position +} + +func (m *ConflictMessage) String(buf *bytes.Buffer) { + if len(m.SubMessage) == 0 { + buf.WriteString(m.Message) + return + } + + for idx, subMessage := range m.SubMessage { + buf.WriteString(`subfields "`) + buf.WriteString(subMessage.ResponseName) + buf.WriteString(`" conflict because `) + subMessage.String(buf) + if idx != len(m.SubMessage)-1 { + buf.WriteString(" and ") + } + } +} + +func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) { + var buf bytes.Buffer + m.String(&buf) + addError( + Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()), + At(m.Position), + ) +} + +type overlappingFieldsCanBeMergedManager struct { + walker *Walker + + // per walker + comparedFragmentPairs pairSet + // cachedFieldsAndFragmentNames interface{} + + // per selectionSet + comparedFragments map[string]bool +} + +func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage { + if len(selectionSet) == 0 { + return nil + } + + fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet) + + var conflicts conflictMessageContainer + + // (A) Find find all conflicts "within" the fieldMap of this selection set. + // Note: this is the *only place* `collectConflictsWithin` is called. + m.collectConflictsWithin(&conflicts, fieldsMap) + + m.comparedFragments = make(map[string]bool) + for idx, fragmentSpreadA := range fragmentSpreads { + // (B) Then collect conflicts between these fieldMap and those represented by + // each spread fragment name found. + m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA) + + for _, fragmentSpreadB := range fragmentSpreads[idx+1:] { + // (C) Then compare this fragment with all other fragments found in this + // selection set to collect conflicts between fragments spread together. + // This compares each item in the list of fragment names to every other + // item in that same list (except for itself). + m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB) + } + } + + return conflicts.Conflicts +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) { + if m.comparedFragments[fragmentSpread.Name] { + return + } + m.comparedFragments[fragmentSpread.Name] = true + + if fragmentSpread.Definition == nil { + return + } + + fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet) + + // Do not compare a fragment's fieldMap to itself. + if reflect.DeepEqual(fieldsMap, fieldsMapB) { + return + } + + // (D) First collect any conflicts between the provided collection of fields + // and the collection of fields represented by the given fragment. + m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB) + + // (E) Then collect any conflicts between the provided collection of fields + // and any fragment names found in the given fragment. + baseFragmentSpread := fragmentSpread + for _, fragmentSpread := range fragmentSpreads { + if fragmentSpread.Name == baseFragmentSpread.Name { + continue + } + m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread) + } +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { + + var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) + check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { + + if fragmentSpreadA.Name == fragmentSpreadB.Name { + return + } + + if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) { + return + } + m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) + + if fragmentSpreadA.Definition == nil { + return + } + if fragmentSpreadB.Definition == nil { + return + } + + fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet) + fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet) + + // (F) First, collect all conflicts between these two collections of fields + // (not including any nested fragments). + m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB) + + // (G) Then collect conflicts between the first fragment and any nested + // fragments spread in the second fragment. + for _, fragmentSpread := range fragmentSpreadsB { + check(fragmentSpreadA, fragmentSpread) + } + // (G) Then collect conflicts between the second fragment and any nested + // fragments spread in the first fragment. + for _, fragmentSpread := range fragmentSpreadsA { + check(fragmentSpread, fragmentSpreadB) + } + } + + check(fragmentSpreadA, fragmentSpreadB) +} + +func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer { + var conflicts conflictMessageContainer + + fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA) + fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB) + + // (H) First, collect all conflicts between these two collections of field. + m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB) + + // (I) Then collect conflicts between the first collection of fields and + // those referenced by each fragment name associated with the second. + for _, fragmentSpread := range fragmentSpreadsB { + m.comparedFragments = make(map[string]bool) + m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread) + } + + // (I) Then collect conflicts between the second collection of fields and + // those referenced by each fragment name associated with the first. + for _, fragmentSpread := range fragmentSpreadsA { + m.comparedFragments = make(map[string]bool) + m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread) + } + + // (J) Also collect conflicts between any fragment names by the first and + // fragment names by the second. This compares each item in the first set of + // names to each item in the second set of names. + for _, fragmentSpreadA := range fragmentSpreadsA { + for _, fragmentSpreadB := range fragmentSpreadsB { + m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB) + } + } + + if len(conflicts.Conflicts) == 0 { + return nil + } + + return &conflicts +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) { + for _, fields := range fieldsMap.Iterator() { + for idx, fieldA := range fields { + for _, fieldB := range fields[idx+1:] { + conflict := m.findConflict(false, fieldA, fieldB) + if conflict != nil { + conflicts.Conflicts = append(conflicts.Conflicts, conflict) + } + } + } + } +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) { + for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() { + fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName) + if !ok { + continue + } + for _, fieldA := range fieldsEntryA.Fields { + for _, fieldB := range fieldsB { + conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB) + if conflict != nil { + conflicts.Conflicts = append(conflicts.Conflicts, conflict) + } + } + } + } +} + +func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage { + if fieldA.ObjectDefinition == nil || fieldB.ObjectDefinition == nil { + return nil + } + + areMutuallyExclusive := parentFieldsAreMutuallyExclusive + if !areMutuallyExclusive { + tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name + tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object + tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object + tmp = tmp && fieldA.Definition != nil && fieldB.Definition != nil + areMutuallyExclusive = tmp + } + + fieldNameA := fieldA.Name + if fieldA.Alias != "" { + fieldNameA = fieldA.Alias + } + + if !areMutuallyExclusive { + // Two aliases must refer to the same field. + if fieldA.Name != fieldB.Name { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: fmt.Sprintf(`"%s" and "%s" are different fields`, fieldA.Name, fieldB.Name), + Position: fieldB.Position, + } + } + + // Two field calls must have the same arguments. + if !sameArguments(fieldA.Arguments, fieldB.Arguments) { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: "they have differing arguments", + Position: fieldB.Position, + } + } + } + + if fieldA.Definition != nil && fieldB.Definition != nil && doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: fmt.Sprintf(`they return conflicting types "%s" and "%s"`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()), + Position: fieldB.Position, + } + } + + // Collect and compare sub-fields. Use the same "visited fragment names" list + // for both collections so fields in a fragment reference are never + // compared to themselves. + conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet) + if conflicts == nil { + return nil + } + return &ConflictMessage{ + ResponseName: fieldNameA, + SubMessage: conflicts.Conflicts, + Position: fieldB.Position, + } +} + +func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool { + if len(args1) != len(args2) { + return false + } + for _, arg1 := range args1 { + var matched bool + for _, arg2 := range args2 { + if arg1.Name == arg2.Name && sameValue(arg1.Value, arg2.Value) { + matched = true + break + } + } + if !matched { + return false + } + } + return true +} + +func sameValue(value1 *ast.Value, value2 *ast.Value) bool { + if value1.Kind != value2.Kind { + return false + } + if value1.Raw != value2.Raw { + return false + } + return true +} + +func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool { + if type1.Elem != nil { + if type2.Elem != nil { + return doTypesConflict(walker, type1.Elem, type2.Elem) + } + return true + } + if type2.Elem != nil { + return true + } + if type1.NonNull && !type2.NonNull { + return true + } + if !type1.NonNull && type2.NonNull { + return true + } + + t1 := walker.Schema.Types[type1.NamedType] + t2 := walker.Schema.Types[type2.NamedType] + if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) { + return t1.Name != t2.Name + } + + return false +} + +func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) { + fieldsMap := sequentialFieldsMap{ + data: make(map[string][]*ast.Field), + } + var fragmentSpreads []*ast.FragmentSpread + + var walk func(selectionSet ast.SelectionSet) + walk = func(selectionSet ast.SelectionSet) { + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + responseName := selection.Name + if selection.Alias != "" { + responseName = selection.Alias + } + fieldsMap.Push(responseName, selection) + + case *ast.InlineFragment: + walk(selection.SelectionSet) + + case *ast.FragmentSpread: + fragmentSpreads = append(fragmentSpreads, selection) + } + } + } + walk(selectionSet) + + return &fieldsMap, fragmentSpreads +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go new file mode 100644 index 00000000..a3f795c9 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go @@ -0,0 +1,68 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) { + + validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) { + if parentDef == nil { + return + } + + var parentDefs []*ast.Definition + switch parentDef.Kind { + case ast.Object: + parentDefs = []*ast.Definition{parentDef} + case ast.Interface, ast.Union: + parentDefs = walker.Schema.GetPossibleTypes(parentDef) + default: + return + } + + fragmentDefType := walker.Schema.Types[fragmentName] + if fragmentDefType == nil { + return + } + if !fragmentDefType.IsCompositeType() { + // checked by FragmentsOnCompositeTypes + return + } + fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType) + + for _, fragmentDef := range fragmentDefs { + for _, parentDef := range parentDefs { + if parentDef.Name == fragmentDef.Name { + return + } + } + } + + emitError() + } + + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() { + addError( + Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition), + At(inlineFragment.Position), + ) + }) + }) + + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if fragmentSpread.Definition == nil { + return + } + validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() { + addError( + Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition), + At(fragmentSpread.Position), + ) + }) + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go new file mode 100644 index 00000000..d8ed6520 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go @@ -0,0 +1,62 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil { + return + } + + argDef: + for _, argDef := range field.Definition.Arguments { + if !argDef.Type.NonNull { + continue + } + if argDef.DefaultValue != nil { + continue + } + for _, arg := range field.Arguments { + if arg.Name == argDef.Name { + continue argDef + } + } + + addError( + Message(`Field "%s" argument "%s" of type "%s" is required, but it was not provided.`, field.Name, argDef.Name, argDef.Type.String()), + At(field.Position), + ) + } + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + return + } + + argDef: + for _, argDef := range directive.Definition.Arguments { + if !argDef.Type.NonNull { + continue + } + if argDef.DefaultValue != nil { + continue + } + for _, arg := range directive.Arguments { + if arg.Name == argDef.Name { + continue argDef + } + } + + addError( + Message(`Directive "@%s" argument "%s" of type "%s" is required, but it was not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()), + At(directive.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go new file mode 100644 index 00000000..718bc683 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go @@ -0,0 +1,36 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil { + return + } + + fieldType := walker.Schema.Types[field.Definition.Type.Name()] + if fieldType == nil { + return + } + + if fieldType.IsLeafType() && len(field.SelectionSet) > 0 { + addError( + Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name), + At(field.Position), + ) + } + + if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 { + addError( + Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()), + Suggestf(`"%s { ... }"`, field.Name), + At(field.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go new file mode 100644 index 00000000..a9e5bf63 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go @@ -0,0 +1,86 @@ +package validator + +import ( + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if walker.Schema.Subscription == nil || operation.Operation != ast.Subscription { + return + } + + fields := retrieveTopFieldNames(operation.SelectionSet) + + name := "Anonymous Subscription" + if operation.Name != "" { + name = `Subscription ` + strconv.Quote(operation.Name) + } + + if len(fields) > 1 { + addError( + Message(`%s must select only one top level field.`, name), + At(fields[1].position), + ) + } + + for _, field := range fields { + if strings.HasPrefix(field.name, "__") { + addError( + Message(`%s must not select an introspection top level field.`, name), + At(field.position), + ) + } + } + }) + }) +} + +type topField struct { + name string + position *ast.Position +} + +func retrieveTopFieldNames(selectionSet ast.SelectionSet) []*topField { + fields := []*topField{} + inFragmentRecursive := map[string]bool{} + var walk func(selectionSet ast.SelectionSet) + walk = func(selectionSet ast.SelectionSet) { + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + fields = append(fields, &topField{ + name: selection.Name, + position: selection.GetPosition(), + }) + case *ast.InlineFragment: + walk(selection.SelectionSet) + case *ast.FragmentSpread: + if selection.Definition == nil { + return + } + fragment := selection.Definition.Name + if !inFragmentRecursive[fragment] { + inFragmentRecursive[fragment] = true + walk(selection.Definition.SelectionSet) + } + } + } + } + walk(selectionSet) + + seen := make(map[string]bool, len(fields)) + uniquedFields := make([]*topField, 0, len(fields)) + for _, field := range fields { + if !seen[field.name] { + uniquedFields = append(uniquedFields, field) + } + seen[field.name] = true + } + return uniquedFields +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go new file mode 100644 index 00000000..1d9a50ab --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go @@ -0,0 +1,33 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + checkUniqueArgs(field.Arguments, addError) + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + checkUniqueArgs(directive.Arguments, addError) + }) + }) +} + +func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) { + knownArgNames := map[string]int{} + + for _, arg := range args { + if knownArgNames[arg.Name] == 1 { + addError( + Message(`There can be only one argument named "%s".`, arg.Name), + At(arg.Position), + ) + } + + knownArgNames[arg.Name]++ + } +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go new file mode 100644 index 00000000..52dfb21e --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go @@ -0,0 +1,24 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) { + observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) { + seen := map[string]bool{} + + for _, dir := range directives { + if dir.Name != "repeatable" && seen[dir.Name] { + addError( + Message(`The directive "@%s" can only be used once at this location.`, dir.Name), + At(dir.Position), + ) + } + seen[dir.Name] = true + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go new file mode 100644 index 00000000..8c348aea --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go @@ -0,0 +1,22 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) { + seenFragments := map[string]bool{} + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + if seenFragments[fragment.Name] { + addError( + Message(`There can be only one fragment named "%s".`, fragment.Name), + At(fragment.Position), + ) + } + seenFragments[fragment.Name] = true + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go new file mode 100644 index 00000000..092be671 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go @@ -0,0 +1,27 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Kind != ast.ObjectValue { + return + } + + seen := map[string]bool{} + for _, field := range value.Children { + if seen[field.Name] { + addError( + Message(`There can be only one input field named "%s".`, field.Name), + At(field.Position), + ) + } + seen[field.Name] = true + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go new file mode 100644 index 00000000..4d41b60a --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go @@ -0,0 +1,22 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) { + seen := map[string]bool{} + + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if seen[operation.Name] { + addError( + Message(`There can be only one operation named "%s".`, operation.Name), + At(operation.Position), + ) + } + seen[operation.Name] = true + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go new file mode 100644 index 00000000..6481ef4c --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go @@ -0,0 +1,24 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + seen := map[string]int{} + for _, def := range operation.VariableDefinitions { + // add the same error only once per a variable. + if seen[def.Variable] == 1 { + addError( + Message(`There can be only one variable named "$%s".`, def.Variable), + At(def.Position), + ) + } + seen[def.Variable]++ + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go new file mode 100644 index 00000000..22bea771 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go @@ -0,0 +1,168 @@ +package validator + +import ( + "errors" + "fmt" + "strconv" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Definition == nil || value.ExpectedType == nil { + return + } + + if value.Kind == ast.NullValue && value.ExpectedType.NonNull { + addError( + Message(`Expected value of type "%s", found %s.`, value.ExpectedType.String(), value.String()), + At(value.Position), + ) + } + + if value.Definition.Kind == ast.Scalar { + // Skip custom validating scalars + if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") { + return + } + } + + var possibleEnums []string + if value.Definition.Kind == ast.Enum { + for _, val := range value.Definition.EnumValues { + possibleEnums = append(possibleEnums, val.Name) + } + } + + rawVal, err := value.Value(nil) + if err != nil { + unexpectedTypeMessage(addError, value) + } + + switch value.Kind { + case ast.NullValue: + return + case ast.ListValue: + if value.ExpectedType.Elem == nil { + unexpectedTypeMessage(addError, value) + return + } + + case ast.IntValue: + if !value.Definition.OneOf("Int", "Float", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.FloatValue: + if !value.Definition.OneOf("Float") { + unexpectedTypeMessage(addError, value) + } + + case ast.StringValue, ast.BlockValue: + if value.Definition.Kind == ast.Enum { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Enum "%s" cannot represent non-enum value: %s.`, value.ExpectedType.String(), value.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } else if !value.Definition.OneOf("String", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.EnumValue: + if value.Definition.Kind != ast.Enum { + rawValStr := fmt.Sprint(rawVal) + addError( + unexpectedTypeMessageOnly(value), + SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } else if value.Definition.EnumValues.ForName(value.Raw) == nil { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Value "%s" does not exist in "%s" enum.`, value.String(), value.ExpectedType.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } + + case ast.BooleanValue: + if !value.Definition.OneOf("Boolean") { + unexpectedTypeMessage(addError, value) + } + + case ast.ObjectValue: + + for _, field := range value.Definition.Fields { + if field.Type.NonNull { + fieldValue := value.Children.ForName(field.Name) + if fieldValue == nil && field.DefaultValue == nil { + addError( + Message(`Field "%s.%s" of required type "%s" was not provided.`, value.Definition.Name, field.Name, field.Type.String()), + At(value.Position), + ) + continue + } + } + } + + for _, fieldValue := range value.Children { + if value.Definition.Fields.ForName(fieldValue.Name) == nil { + var suggestions []string + for _, fieldValue := range value.Definition.Fields { + suggestions = append(suggestions, fieldValue.Name) + } + + addError( + Message(`Field "%s" is not defined by type "%s".`, fieldValue.Name, value.Definition.Name), + SuggestListQuoted("Did you mean", fieldValue.Name, suggestions), + At(fieldValue.Position), + ) + } + } + + case ast.Variable: + return + + default: + panic(fmt.Errorf("unhandled %T", value)) + } + }) + }) +} + +func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) { + addError( + unexpectedTypeMessageOnly(v), + At(v.Position), + ) +} + +func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption { + switch v.ExpectedType.String() { + case "Int", "Int!": + if _, err := strconv.ParseInt(v.Raw, 10, 32); err != nil && errors.Is(err, strconv.ErrRange) { + return Message(`Int cannot represent non 32-bit signed integer value: %s`, v.String()) + } + return Message(`Int cannot represent non-integer value: %s`, v.String()) + case "String", "String!", "[String]": + return Message(`String cannot represent a non string value: %s`, v.String()) + case "Boolean", "Boolean!": + return Message(`Boolean cannot represent a non boolean value: %s`, v.String()) + case "Float", "Float!": + return Message(`Float cannot represent non numeric value: %s`, v.String()) + case "ID", "ID!": + return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String()) + //case "Enum": + // return Message(`Enum "%s" cannot represent non-enum value: %s`, v.ExpectedType.String(), v.String()) + default: + if v.Definition.Kind == ast.Enum { + return Message(`Enum "%s" cannot represent non-enum value: %s.`, v.ExpectedType.String(), v.String()) + } + return Message(`Expected value of type "%s", found %s.`, v.ExpectedType.String(), v.String()) + } +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go new file mode 100644 index 00000000..4ea94e5a --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go @@ -0,0 +1,28 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, def := range operation.VariableDefinitions { + if def.Definition == nil { + continue + } + if !def.Definition.IsInputType() { + addError( + Message( + `Variable "$%s" cannot be non-input type "%s".`, + def.Variable, + def.Type.String(), + ), + At(def.Position), + ) + } + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go new file mode 100644 index 00000000..eef74354 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go @@ -0,0 +1,38 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil { + return + } + + tmp := *value.ExpectedType + + // todo: move me into walk + // If there is a default non nullable types can be null + if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue { + if value.ExpectedType.NonNull { + tmp.NonNull = false + } + } + + if !value.VariableDefinition.Type.IsCompatible(&tmp) { + addError( + Message( + `Variable "%s" of type "%s" used in position expecting type "%s".`, + value, + value.VariableDefinition.Type.String(), + value.ExpectedType.String(), + ), + At(value.Position), + ) + } + }) + }) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/schema.go b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go new file mode 100644 index 00000000..976ed830 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go @@ -0,0 +1,512 @@ +package validator + +import ( + "sort" + "strconv" + "strings" + + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/parser" +) + +func LoadSchema(inputs ...*Source) (*Schema, error) { + ast, err := parser.ParseSchemas(inputs...) + if err != nil { + return nil, err + } + return ValidateSchemaDocument(ast) +} + +func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { + schema := Schema{ + Types: map[string]*Definition{}, + Directives: map[string]*DirectiveDefinition{}, + PossibleTypes: map[string][]*Definition{}, + Implements: map[string][]*Definition{}, + } + + for i, def := range ast.Definitions { + if schema.Types[def.Name] != nil { + return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name) + } + schema.Types[def.Name] = ast.Definitions[i] + } + + defs := append(DefinitionList{}, ast.Definitions...) + + for _, ext := range ast.Extensions { + def := schema.Types[ext.Name] + if def == nil { + schema.Types[ext.Name] = &Definition{ + Kind: ext.Kind, + Name: ext.Name, + Position: ext.Position, + } + def = schema.Types[ext.Name] + defs = append(defs, def) + } + + if def.Kind != ext.Kind { + return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind) + } + + def.Directives = append(def.Directives, ext.Directives...) + def.Interfaces = append(def.Interfaces, ext.Interfaces...) + def.Fields = append(def.Fields, ext.Fields...) + def.Types = append(def.Types, ext.Types...) + def.EnumValues = append(def.EnumValues, ext.EnumValues...) + } + + for _, def := range defs { + switch def.Kind { + case Union: + for _, t := range def.Types { + schema.AddPossibleType(def.Name, schema.Types[t]) + schema.AddImplements(t, def) + } + case InputObject, Object: + for _, intf := range def.Interfaces { + schema.AddPossibleType(intf, def) + schema.AddImplements(def.Name, schema.Types[intf]) + } + schema.AddPossibleType(def.Name, def) + case Interface: + for _, intf := range def.Interfaces { + schema.AddPossibleType(intf, def) + schema.AddImplements(def.Name, schema.Types[intf]) + } + } + } + + for i, dir := range ast.Directives { + if schema.Directives[dir.Name] != nil { + // While the spec says SDL must not (§3.5) explicitly define builtin + // scalars, it may (§3.13) define builtin directives. Here we check for + // that, and reject doubly-defined directives otherwise. + switch dir.Name { + case "include", "skip", "deprecated", "specifiedBy": // the builtins + // In principle here we might want to validate that the + // directives are the same. But they might not be, if the + // server has an older spec than we do. (Plus, validating this + // is a lot of work.) So we just keep the first one we saw. + // That's an arbitrary choice, but in theory the only way it + // fails is if the server is using features newer than this + // version of gqlparser, in which case they're in trouble + // anyway. + default: + return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name) + } + } + schema.Directives[dir.Name] = ast.Directives[i] + } + + if len(ast.Schema) > 1 { + return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") + } + + if len(ast.Schema) == 1 { + schema.Description = ast.Schema[0].Description + for _, entrypoint := range ast.Schema[0].OperationTypes { + def := schema.Types[entrypoint.Type] + if def == nil { + return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) + } + switch entrypoint.Operation { + case Query: + schema.Query = def + case Mutation: + schema.Mutation = def + case Subscription: + schema.Subscription = def + } + } + } + + for _, ext := range ast.SchemaExtension { + for _, entrypoint := range ext.OperationTypes { + def := schema.Types[entrypoint.Type] + if def == nil { + return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) + } + switch entrypoint.Operation { + case Query: + schema.Query = def + case Mutation: + schema.Mutation = def + case Subscription: + schema.Subscription = def + } + } + } + + if err := validateTypeDefinitions(&schema); err != nil { + return nil, err + } + + if err := validateDirectiveDefinitions(&schema); err != nil { + return nil, err + } + + // Inferred root operation type names should be performed only when a `schema` directive is + // **not** provided, when it is, `Mutation` and `Subscription` becomes valid types and are not + // assigned as a root operation on the schema. + if len(ast.Schema) == 0 { + if schema.Query == nil && schema.Types["Query"] != nil { + schema.Query = schema.Types["Query"] + } + + if schema.Mutation == nil && schema.Types["Mutation"] != nil { + schema.Mutation = schema.Types["Mutation"] + } + + if schema.Subscription == nil && schema.Types["Subscription"] != nil { + schema.Subscription = schema.Types["Subscription"] + } + } + + if schema.Query != nil { + schema.Query.Fields = append( + schema.Query.Fields, + &FieldDefinition{ + Name: "__schema", + Type: NonNullNamedType("__Schema", nil), + }, + &FieldDefinition{ + Name: "__type", + Type: NamedType("__Type", nil), + Arguments: ArgumentDefinitionList{ + {Name: "name", Type: NonNullNamedType("String", nil)}, + }, + }, + ) + } + + return &schema, nil +} + +func validateTypeDefinitions(schema *Schema) *gqlerror.Error { + types := make([]string, 0, len(schema.Types)) + for typ := range schema.Types { + types = append(types, typ) + } + sort.Strings(types) + for _, typ := range types { + err := validateDefinition(schema, schema.Types[typ]) + if err != nil { + return err + } + } + return nil +} + +func validateDirectiveDefinitions(schema *Schema) *gqlerror.Error { + directives := make([]string, 0, len(schema.Directives)) + for directive := range schema.Directives { + directives = append(directives, directive) + } + sort.Strings(directives) + for _, directive := range directives { + err := validateDirective(schema, schema.Directives[directive]) + if err != nil { + return err + } + } + return nil +} + +func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error { + if err := validateName(def.Position, def.Name); err != nil { + // now, GraphQL spec doesn't have reserved directive name + return err + } + + return validateArgs(schema, def.Arguments, def) +} + +func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error { + for _, field := range def.Fields { + if err := validateName(field.Position, field.Name); err != nil { + // now, GraphQL spec doesn't have reserved field name + return err + } + if err := validateTypeRef(schema, field.Type); err != nil { + return err + } + if err := validateArgs(schema, field.Arguments, nil); err != nil { + return err + } + wantDirLocation := LocationFieldDefinition + if def.Kind == InputObject { + wantDirLocation = LocationInputFieldDefinition + } + if err := validateDirectives(schema, field.Directives, wantDirLocation, nil); err != nil { + return err + } + } + + for _, typ := range def.Types { + typDef := schema.Types[typ] + if typDef == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(typ)) + } + if !isValidKind(typDef.Kind, Object) { + return gqlerror.ErrorPosf(def.Position, "%s type %s must be %s.", def.Kind, strconv.Quote(typ), kindList(Object)) + } + } + + for _, intf := range def.Interfaces { + if err := validateImplements(schema, def, intf); err != nil { + return err + } + } + + switch def.Kind { + case Object, Interface: + if len(def.Fields) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more fields.", def.Kind, def.Name) + } + for _, field := range def.Fields { + if typ, ok := schema.Types[field.Type.Name()]; ok { + if !isValidKind(typ.Kind, Scalar, Object, Interface, Union, Enum) { + return gqlerror.ErrorPosf(field.Position, "%s %s: field must be one of %s.", def.Kind, def.Name, kindList(Scalar, Object, Interface, Union, Enum)) + } + } + } + case Enum: + if len(def.EnumValues) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more unique enum values.", def.Kind, def.Name) + } + for _, value := range def.EnumValues { + for _, nonEnum := range [3]string{"true", "false", "null"} { + if value.Name == nonEnum { + return gqlerror.ErrorPosf(def.Position, "%s %s: non-enum value %s.", def.Kind, def.Name, value.Name) + } + } + } + case InputObject: + if len(def.Fields) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more input fields.", def.Kind, def.Name) + } + for _, field := range def.Fields { + if typ, ok := schema.Types[field.Type.Name()]; ok { + if !isValidKind(typ.Kind, Scalar, Enum, InputObject) { + return gqlerror.ErrorPosf(field.Position, "%s %s: field must be one of %s.", typ.Kind, field.Name, kindList(Scalar, Enum, InputObject)) + } + } + } + } + + for idx, field1 := range def.Fields { + for _, field2 := range def.Fields[idx+1:] { + if field1.Name == field2.Name { + return gqlerror.ErrorPosf(field2.Position, "Field %s.%s can only be defined once.", def.Name, field2.Name) + } + } + } + + if !def.BuiltIn { + // GraphQL spec has reserved type names a lot! + err := validateName(def.Position, def.Name) + if err != nil { + return err + } + } + + return validateDirectives(schema, def.Directives, DirectiveLocation(def.Kind), nil) +} + +func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error { + if schema.Types[typ.Name()] == nil { + return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name()) + } + return nil +} + +func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error { + for _, arg := range args { + if err := validateName(arg.Position, arg.Name); err != nil { + // now, GraphQL spec doesn't have reserved argument name + return err + } + if err := validateTypeRef(schema, arg.Type); err != nil { + return err + } + def := schema.Types[arg.Type.Name()] + if !def.IsInputType() { + return gqlerror.ErrorPosf( + arg.Position, + "cannot use %s as argument %s because %s is not a valid input type", + arg.Type.String(), + arg.Name, + def.Kind, + ) + } + if err := validateDirectives(schema, arg.Directives, LocationArgumentDefinition, currentDirective); err != nil { + return err + } + } + return nil +} + +func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLocation, currentDirective *DirectiveDefinition) *gqlerror.Error { + for _, dir := range dirs { + if err := validateName(dir.Position, dir.Name); err != nil { + // now, GraphQL spec doesn't have reserved directive name + return err + } + if currentDirective != nil && dir.Name == currentDirective.Name { + return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name) + } + if schema.Directives[dir.Name] == nil { + return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name) + } + validKind := false + for _, dirLocation := range schema.Directives[dir.Name].Locations { + if dirLocation == location { + validKind = true + break + } + } + if !validKind { + return gqlerror.ErrorPosf(dir.Position, "Directive %s is not applicable on %s.", dir.Name, location) + } + dir.Definition = schema.Directives[dir.Name] + } + return nil +} + +func validateImplements(schema *Schema, def *Definition, intfName string) *gqlerror.Error { + // see validation rules at the bottom of + // https://facebook.github.io/graphql/October2021/#sec-Objects + intf := schema.Types[intfName] + if intf == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) + } + if intf.Kind != Interface { + return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intfName), intf.Kind) + } + for _, requiredField := range intf.Fields { + foundField := def.Fields.ForName(requiredField.Name) + if foundField == nil { + return gqlerror.ErrorPosf(def.Position, + `For %s to implement %s it must have a field called %s.`, + def.Name, intf.Name, requiredField.Name, + ) + } + + if !isCovariant(schema, requiredField.Type, foundField.Type) { + return gqlerror.ErrorPosf(foundField.Position, + `For %s to implement %s the field %s must have type %s.`, + def.Name, intf.Name, requiredField.Name, requiredField.Type.String(), + ) + } + + for _, requiredArg := range requiredField.Arguments { + foundArg := foundField.Arguments.ForName(requiredArg.Name) + if foundArg == nil { + return gqlerror.ErrorPosf(foundField.Position, + `For %s to implement %s the field %s must have the same arguments but it is missing %s.`, + def.Name, intf.Name, requiredField.Name, requiredArg.Name, + ) + } + + if !requiredArg.Type.IsCompatible(foundArg.Type) { + return gqlerror.ErrorPosf(foundArg.Position, + `For %s to implement %s the field %s must have the same arguments but %s has the wrong type.`, + def.Name, intf.Name, requiredField.Name, requiredArg.Name, + ) + } + } + for _, foundArgs := range foundField.Arguments { + if requiredField.Arguments.ForName(foundArgs.Name) == nil && foundArgs.Type.NonNull && foundArgs.DefaultValue == nil { + return gqlerror.ErrorPosf(foundArgs.Position, + `For %s to implement %s any additional arguments on %s must be optional or have a default value but %s is required.`, + def.Name, intf.Name, foundField.Name, foundArgs.Name, + ) + } + } + } + return validateTypeImplementsAncestors(schema, def, intfName) +} + +// validateTypeImplementsAncestors +// https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/type/validate.ts#L428 +func validateTypeImplementsAncestors(schema *Schema, def *Definition, intfName string) *gqlerror.Error { + intf := schema.Types[intfName] + if intf == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) + } + for _, transitive := range intf.Interfaces { + if !containsString(def.Interfaces, transitive) { + if transitive == def.Name { + return gqlerror.ErrorPosf(def.Position, + `Type %s cannot implement %s because it would create a circular reference.`, + def.Name, intfName, + ) + } + return gqlerror.ErrorPosf(def.Position, + `Type %s must implement %s because it is implemented by %s.`, + def.Name, transitive, intfName, + ) + } + } + return nil +} + +func containsString(slice []string, want string) bool { + for _, str := range slice { + if want == str { + return true + } + } + return false +} + +func isCovariant(schema *Schema, required *Type, actual *Type) bool { + if required.NonNull && !actual.NonNull { + return false + } + + if required.NamedType != "" { + if required.NamedType == actual.NamedType { + return true + } + for _, pt := range schema.PossibleTypes[required.NamedType] { + if pt.Name == actual.NamedType { + return true + } + } + return false + } + + if required.Elem != nil && actual.Elem == nil { + return false + } + + return isCovariant(schema, required.Elem, actual.Elem) +} + +func validateName(pos *Position, name string) *gqlerror.Error { + if strings.HasPrefix(name, "__") { + return gqlerror.ErrorPosf(pos, `Name "%s" must not begin with "__", which is reserved by GraphQL introspection.`, name) + } + return nil +} + +func isValidKind(kind DefinitionKind, valid ...DefinitionKind) bool { + for _, k := range valid { + if kind == k { + return true + } + } + return false +} + +func kindList(kinds ...DefinitionKind) string { + s := make([]string, len(kinds)) + for i, k := range kinds { + s[i] = string(k) + } + return strings.Join(s, ", ") +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml new file mode 100644 index 00000000..7034a469 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml @@ -0,0 +1,678 @@ +types: + - name: cannot be redeclared + input: | + type A { + name: String + } + type A { + name: String + } + error: + message: "Cannot redeclare type A." + locations: [{line: 4, column: 6}] + - name: cannot be duplicated field at same definition 1 + input: | + type A { + name: String + name: String + } + error: + message: "Field A.name can only be defined once." + locations: [{line: 3, column: 3}] + - name: cannot be duplicated field at same definition 2 + input: | + type A { + name: String + } + extend type A { + name: String + } + error: + message: "Field A.name can only be defined once." + locations: [{line: 5, column: 3}] + - name: cannot be duplicated field at same definition 3 + input: | + type A { + name: String + } + extend type A { + age: Int + age: Int + } + error: + message: "Field A.age can only be defined once." + locations: [{line: 6, column: 3}] + +object types: + - name: must define one or more fields + input: | + directive @D on OBJECT + + # This pattern rejected by parser + # type InvalidObject1 {} + + type InvalidObject2 @D + + type ValidObject { + id: ID + } + extend type ValidObject @D + extend type ValidObject { + b: Int + } + error: + message: 'OBJECT InvalidObject2: must define one or more fields.' + locations: [{line: 6, column: 6}] + - name: check reserved names on type name + input: | + type __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 6}] + - name: check reserved names on type field + input: | + type FooBar { + __id: ID + } + error: + message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 2, column: 3}] + + - name: check reserved names on type field argument + input: | + type FooBar { + foo(__bar: ID): ID + } + error: + message: 'Name "__bar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 2, column: 7}] + + - name: must not allow input object as field type + input: | + input Input { + id: ID + } + type Query { + input: Input! + } + error: + message: 'OBJECT Query: field must be one of SCALAR, OBJECT, INTERFACE, UNION, ENUM.' + locations: [{line: 5, column: 3}] + +interfaces: + - name: must exist + input: | + type Thing implements Object { + id: ID! + } + + type Query { + Things: [Thing!]! + } + error: + message: 'Undefined type "Object".' + locations: [{line: 1, column: 6}] + + - name: must be an interface + input: | + type Thing implements Object { + id: ID! + } + + type Query { + Things: [Thing!]! + } + + type Object { + name: String + } + error: + message: '"Object" is a non interface type OBJECT.' + locations: [{line: 1, column: 6}] + + - name: must define one or more fields + input: | + directive @D on INTERFACE + + # This pattern rejected by parser + # interface InvalidInterface1 {} + + interface InvalidInterface2 @D + + interface ValidInterface { + id: ID + } + extend interface ValidInterface @D + extend interface ValidInterface { + b: Int + } + error: + message: 'INTERFACE InvalidInterface2: must define one or more fields.' + locations: [{line: 6, column: 11}] + + - name: check reserved names on type name + input: | + interface __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 11}] + + - name: must not allow input object as field type + input: | + input Input { + id: ID + } + type Query { + foo: Foo! + } + interface Foo { + input: Input! + } + error: + message: 'INTERFACE Foo: field must be one of SCALAR, OBJECT, INTERFACE, UNION, ENUM.' + locations: [{line: 8, column: 3}] + + - name: must have all fields from interface + input: | + type Bar implements BarInterface { + someField: Int! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface it must have a field called id.' + locations: [{line: 1, column: 6}] + + - name: must have same type of fields + input: | + type Bar implements BarInterface { + id: Int! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have type ID!.' + locations: [{line: 2, column: 5}] + + - name: must have all required arguments + input: | + type Bar implements BarInterface { + id: ID! + } + + interface BarInterface { + id(ff: Int!): ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have the same arguments but it is missing ff.' + locations: [{line: 2, column: 5}] + + - name: must have same argument types + input: | + type Bar implements BarInterface { + id(ff: ID!): ID! + } + + interface BarInterface { + id(ff: Int!): ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have the same arguments but ff has the wrong type.' + locations: [{line: 2, column: 8}] + + - name: may defined additional nullable arguments + input: | + type Bar implements BarInterface { + id(opt: Int): ID! + } + + interface BarInterface { + id: ID! + } + + - name: may defined additional required arguments with defaults + input: | + type Bar implements BarInterface { + id(opt: Int! = 1): ID! + } + + interface BarInterface { + id: ID! + } + + - name: must not define additional required arguments without defaults + input: | + type Bar implements BarInterface { + id(opt: Int!): ID! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface any additional arguments on id must be optional or have a default value but opt is required.' + locations: [{line: 2, column: 8}] + + - name: can have covariant argument types + input: | + union U = A|B + + type A { name: String } + type B { name: String } + + type Bar implements BarInterface { + f: A! + } + + interface BarInterface { + f: U! + } + + - name: may define intermediate interfaces + input: | + interface IA { + id: ID! + } + + interface IIA implements IA { + id: ID! + } + + type A implements IIA & IA { + id: ID! + } + + - name: Type Foo must implement Baz because it is implemented by Bar + input: | + interface Baz { + baz: String + } + + interface Bar implements Baz { + bar: String + baz: String + } + + type Foo implements Bar { + foo: String + bar: String + baz: String + } + error: + message: 'Type Foo must implement Baz because it is implemented by Bar.' + locations: [{line: 10, column: 6}] + + - name: circular reference error + input: | + interface Circular1 implements Circular2 { + id: ID! + } + + interface Circular2 implements Circular1 { + id: ID! + } + error: + message: 'Type Circular1 cannot implement Circular2 because it would create a circular reference.' + locations: [{line: 1, column: 11}] + +inputs: + - name: must define one or more input fields + input: | + directive @D on INPUT_OBJECT + + # This pattern rejected by parser + # input InvalidInput1 {} + + input InvalidInput2 @D + + input ValidInput { + id: ID + } + extend input ValidInput @D + extend input ValidInput { + b: Int + } + error: + message: 'INPUT_OBJECT InvalidInput2: must define one or more input fields.' + locations: [{line: 6, column: 7}] + - name: check reserved names on type name + input: | + input __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 7}] + + - name: fields cannot be Objects + input: | + type Object { id: ID } + input Foo { a: Object! } + error: + message: 'OBJECT a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 2, column: 13}] + + - name: fields cannot be Interfaces + input: | + interface Interface { id: ID! } + input Foo { a: Interface! } + error: + message: 'INTERFACE a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 2, column: 13}] + + - name: fields cannot be Unions + input: | + type Object { id: ID } + union Union = Object + input Foo { a: Union! } + error: + message: 'UNION a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 3, column: 13}] + +args: + - name: Valid arg types + input: | + input Input { id: ID } + enum Enum { A } + scalar Scalar + + type Query { + f(a: Input, b: Scalar, c: Enum): Boolean! + } + + - name: Objects not allowed + input: | + type Object { id: ID } + type Query { f(a: Object): Boolean! } + + error: + message: 'cannot use Object as argument a because OBJECT is not a valid input type' + locations: [{line: 2, column: 16}] + + - name: Union not allowed + input: | + type Object { id: ID } + union Union = Object + type Query { f(a: Union): Boolean! } + + error: + message: 'cannot use Union as argument a because UNION is not a valid input type' + locations: [{line: 3, column: 16}] + + - name: Interface not allowed + input: | + interface Interface { id: ID } + type Query { f(a: Interface): Boolean! } + + error: + message: 'cannot use Interface as argument a because INTERFACE is not a valid input type' + locations: [{line: 2, column: 16}] + +enums: + - name: must define one or more unique enum values + input: | + directive @D on ENUM + + # This pattern rejected by parser + # enum InvalidEmum1 {} + + enum InvalidEnum2 @D + + enum ValidEnum { + FOO + } + extend enum ValidEnum @D + extend enum ValidEnum { + BAR + } + error: + message: 'ENUM InvalidEnum2: must define one or more unique enum values.' + locations: [{line: 6, column: 6}] + - name: check reserved names on type name + input: | + enum __FooBar { + A + B + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 6}] + +unions: + - name: union types must be defined + input: | + union Foo = Bar | Baz + type Bar { + id: ID + } + error: + message: "Undefined type \"Baz\"." + locations: [{line: 1, column: 7}] + - name: union types must be objects + input: | + union Foo = Baz + interface Baz { + id: ID + } + error: + message: "UNION type \"Baz\" must be OBJECT." + locations: [{line: 1, column: 7}] + + - name: unions of pure type extensions are valid + input: | + + type Review { + body: String! + author: User! @provides(fields: "username") + product: Product! + } + + extend type User @key(fields: "id") { + id: ID! @external + reviews: [Review] + } + + extend type Product @key(fields: "upc") { + upc: String! @external + reviews: [Review] + } + + union Foo = User | Product + scalar _Any + scalar _FieldSet + directive @external on FIELD_DEFINITION + directive @requires(fields: _FieldSet!) on FIELD_DEFINITION + directive @provides(fields: _FieldSet!) on FIELD_DEFINITION + directive @key(fields: _FieldSet!) on OBJECT | INTERFACE + directive @extends on OBJECT + + + +type extensions: + - name: can extend non existant types + input: | + extend type A { + name: String + } + + + - name: cannot extend incorret type existant types + input: | + scalar A + extend type A { + name: String + } + error: + message: "Cannot extend type A because the base type is a SCALAR, not OBJECT." + locations: [{line: 2, column: 13}] + +directives: + - name: cannot redeclare directives + input: | + directive @A on FIELD_DEFINITION + directive @A on FIELD_DEFINITION + error: + message: "Cannot redeclare directive A." + locations: [{line: 2, column: 12}] + + - name: can redeclare builtin directives + input: | + directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + - name: must be declared + input: | + type User { + name: String @foo + } + error: + message: "Undefined directive foo." + locations: [{line: 2, column: 17}] + + - name: cannot be self-referential + input: | + directive @A(foo: Int! @A) on FIELD_DEFINITION + error: + message: "Directive A cannot refer to itself." + locations: [{line: 1, column: 25}] + - name: check reserved names on type name + input: | + directive @__A on FIELD_DEFINITION + error: + message: 'Name "__A" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 12}] + + - name: Valid arg types + input: | + input Input { id: ID } + enum Enum { A } + scalar Scalar + + directive @A(a: Input, b: Scalar, c: Enum) on FIELD_DEFINITION + + - name: Objects not allowed + input: | + type Object { id: ID } + directive @A(a: Object) on FIELD_DEFINITION + + error: + message: 'cannot use Object as argument a because OBJECT is not a valid input type' + locations: [{line: 2, column: 14}] + + - name: Union not allowed + input: | + type Object { id: ID } + union Union = Object + directive @A(a: Union) on FIELD_DEFINITION + + error: + message: 'cannot use Union as argument a because UNION is not a valid input type' + locations: [{line: 3, column: 14}] + + - name: Interface not allowed + input: | + interface Interface { id: ID } + directive @A(a: Interface) on FIELD_DEFINITION + + error: + message: 'cannot use Interface as argument a because INTERFACE is not a valid input type' + locations: [{line: 2, column: 14}] + + - name: Invalid location usage not allowed + input: | + directive @test on FIELD_DEFINITION + input I1 @test { f: String } + + error: + message: 'Directive test is not applicable on INPUT_OBJECT.' + locations: [{line: 2, column: 11}] + + - name: Valid location usage + input: | + directive @testInputField on INPUT_FIELD_DEFINITION + directive @testField on FIELD_DEFINITION + directive @inp on INPUT_OBJECT + input I1 @inp { f: String @testInputField } + type P { name: String @testField } + interface I { id: ID @testField } + + +entry points: + - name: multiple schema entry points + input: | + schema { + query: Query + } + schema { + query: Query + } + scalar Query + error: + message: "Cannot have multiple schema entry points, consider schema extensions instead." + locations: [{line: 4, column: 8}] + + - name: Undefined schema entrypoint + input: | + schema { + query: Query + } + error: + message: "Schema root query refers to a type Query that does not exist." + locations: [{line: 2, column: 3}] + +entry point extensions: + - name: Undefined schema entrypoint + input: | + schema { + query: Query + } + scalar Query + extend schema { + mutation: Mutation + } + error: + message: "Schema root mutation refers to a type Mutation that does not exist." + locations: [{line: 6, column: 3}] + +type references: + - name: Field types + input: | + type User { + posts: Post + } + error: + message: "Undefined type Post." + locations: [{line: 2, column: 10}] + + - name: Arg types + input: | + type User { + posts(foo: FooBar): String + } + error: + message: "Undefined type FooBar." + locations: [{line: 2, column: 14}] + + - name: Directive arg types + input: | + directive @Foo(foo: FooBar) on FIELD_DEFINITION + + error: + message: "Undefined type FooBar." + locations: [{line: 1, column: 21}] + + - name: Invalid enum value + input: | + enum Enum { true } + + error: + message: "ENUM Enum: non-enum value true." + locations: [{line: 1, column: 6}] diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go new file mode 100644 index 00000000..f0bbc327 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go @@ -0,0 +1,69 @@ +package validator + +import ( + "math" + "sort" + "strings" + + "github.com/agnivade/levenshtein" +) + +// Given an invalid input string and a list of valid options, returns a filtered +// list of valid options sorted based on their similarity with the input. +func SuggestionList(input string, options []string) []string { + var results []string + optionsByDistance := map[string]int{} + + for _, option := range options { + distance := lexicalDistance(input, option) + threshold := calcThreshold(input) + if distance <= threshold { + results = append(results, option) + optionsByDistance[option] = distance + } + } + + sort.Slice(results, func(i, j int) bool { + return optionsByDistance[results[i]] < optionsByDistance[results[j]] + }) + return results +} + +func calcThreshold(a string) (threshold int) { + // the logic is copied from here + // https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/jsutils/suggestionList.ts#L14 + threshold = int(math.Floor(float64(len(a))*0.4) + 1) + + if threshold < 1 { + threshold = 1 + } + return +} + +// Computes the lexical distance between strings A and B. +// +// The "distance" between two strings is given by counting the minimum number +// of edits needed to transform string A into string B. An edit can be an +// insertion, deletion, or substitution of a single character, or a swap of two +// adjacent characters. +// +// Includes a custom alteration from Damerau-Levenshtein to treat case changes +// as a single edit which helps identify mis-cased values with an edit distance +// of 1. +// +// This distance can be useful for detecting typos in input or sorting +func lexicalDistance(a, b string) int { + if a == b { + return 0 + } + + a = strings.ToLower(a) + b = strings.ToLower(b) + + // Any case change counts as a single edit + if a == b { + return 1 + } + + return levenshtein.ComputeDistance(a, b) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/validator.go b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go new file mode 100644 index 00000000..34bf93db --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go @@ -0,0 +1,44 @@ +package validator + +import ( + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type AddErrFunc func(options ...ErrorOption) + +type ruleFunc func(observers *Events, addError AddErrFunc) + +type rule struct { + name string + rule ruleFunc +} + +var rules []rule + +// addRule to rule set. +// f is called once each time `Validate` is executed. +func AddRule(name string, f ruleFunc) { + rules = append(rules, rule{name: name, rule: f}) +} + +func Validate(schema *Schema, doc *QueryDocument) gqlerror.List { + var errs gqlerror.List + + observers := &Events{} + for i := range rules { + rule := rules[i] + rule.rule(observers, func(options ...ErrorOption) { + err := &gqlerror.Error{ + Rule: rule.name, + } + for _, o := range options { + o(err) + } + errs = append(errs, err) + }) + } + + Walk(schema, doc, observers) + return errs +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/vars.go b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go new file mode 100644 index 00000000..8dbb05bc --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go @@ -0,0 +1,258 @@ +package validator + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +var UnexpectedType = fmt.Errorf("Unexpected Type") + +// VariableValues coerces and validates variable values +func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, error) { + coercedVars := map[string]interface{}{} + + validator := varValidator{ + path: ast.Path{ast.PathName("variable")}, + schema: schema, + } + + for _, v := range op.VariableDefinitions { + validator.path = append(validator.path, ast.PathName(v.Variable)) + + if !v.Definition.IsInputType() { + return nil, gqlerror.ErrorPathf(validator.path, "must an input type") + } + + val, hasValue := variables[v.Variable] + + if !hasValue { + if v.DefaultValue != nil { + var err error + val, err = v.DefaultValue.Value(nil) + if err != nil { + return nil, gqlerror.WrapPath(validator.path, err) + } + hasValue = true + } else if v.Type.NonNull { + return nil, gqlerror.ErrorPathf(validator.path, "must be defined") + } + } + + if hasValue { + if val == nil { + if v.Type.NonNull { + return nil, gqlerror.ErrorPathf(validator.path, "cannot be null") + } + coercedVars[v.Variable] = nil + } else { + rv := reflect.ValueOf(val) + + jsonNumber, isJsonNumber := val.(json.Number) + if isJsonNumber { + if v.Type.NamedType == "Int" { + n, err := jsonNumber.Int64() + if err != nil { + return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %d as %s", n, v.Type.NamedType) + } + rv = reflect.ValueOf(n) + } else if v.Type.NamedType == "Float" { + f, err := jsonNumber.Float64() + if err != nil { + return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %f as %s", f, v.Type.NamedType) + } + rv = reflect.ValueOf(f) + + } + } + if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface { + rv = rv.Elem() + } + + rval, err := validator.validateVarType(v.Type, rv) + if err != nil { + return nil, err + } + coercedVars[v.Variable] = rval.Interface() + } + } + + validator.path = validator.path[0 : len(validator.path)-1] + } + return coercedVars, nil +} + +type varValidator struct { + path ast.Path + schema *ast.Schema +} + +func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflect.Value, *gqlerror.Error) { + currentPath := v.path + resetPath := func() { + v.path = currentPath + } + defer resetPath() + if typ.Elem != nil { + if val.Kind() != reflect.Slice { + // GraphQL spec says that non-null values should be coerced to an array when possible. + // Hence if the value is not a slice, we create a slice and add val to it. + slc := reflect.MakeSlice(reflect.SliceOf(val.Type()), 0, 0) + slc = reflect.Append(slc, val) + val = slc + } + for i := 0; i < val.Len(); i++ { + resetPath() + v.path = append(v.path, ast.PathIndex(i)) + field := val.Index(i) + if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface { + if typ.Elem.NonNull && field.IsNil() { + return val, gqlerror.ErrorPathf(v.path, "cannot be null") + } + field = field.Elem() + } + _, err := v.validateVarType(typ.Elem, field) + if err != nil { + return val, err + } + } + return val, nil + } + def := v.schema.Types[typ.NamedType] + if def == nil { + panic(fmt.Errorf("missing def for %s", typ.NamedType)) + } + + if !typ.NonNull && !val.IsValid() { + // If the type is not null and we got a invalid value namely null/nil, then it's valid + return val, nil + } + + switch def.Kind { + case ast.Enum: + kind := val.Type().Kind() + if kind != reflect.Int && kind != reflect.Int32 && kind != reflect.Int64 && kind != reflect.String { + return val, gqlerror.ErrorPathf(v.path, "enums must be ints or strings") + } + isValidEnum := false + for _, enumVal := range def.EnumValues { + if strings.EqualFold(val.String(), enumVal.Name) { + isValidEnum = true + } + } + if !isValidEnum { + return val, gqlerror.ErrorPathf(v.path, "%s is not a valid %s", val.String(), def.Name) + } + return val, nil + case ast.Scalar: + kind := val.Type().Kind() + switch typ.NamedType { + case "Int": + if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.Float32 || kind == reflect.Float64 || IsValidIntString(val, kind) { + return val, nil + } + case "Float": + if kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || IsValidFloatString(val, kind) { + return val, nil + } + case "String": + if kind == reflect.String { + return val, nil + } + + case "Boolean": + if kind == reflect.Bool { + return val, nil + } + + case "ID": + if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String { + return val, nil + } + default: + // assume custom scalars are ok + return val, nil + } + return val, gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType) + case ast.InputObject: + if val.Kind() != reflect.Map { + return val, gqlerror.ErrorPathf(v.path, "must be a %s", def.Name) + } + + // check for unknown fields + for _, name := range val.MapKeys() { + val.MapIndex(name) + fieldDef := def.Fields.ForName(name.String()) + resetPath() + v.path = append(v.path, ast.PathName(name.String())) + + switch { + case name.String() == "__typename": + continue + case fieldDef == nil: + return val, gqlerror.ErrorPathf(v.path, "unknown field") + } + } + + for _, fieldDef := range def.Fields { + resetPath() + v.path = append(v.path, ast.PathName(fieldDef.Name)) + + field := val.MapIndex(reflect.ValueOf(fieldDef.Name)) + if !field.IsValid() { + if fieldDef.Type.NonNull { + if fieldDef.DefaultValue != nil { + var err error + _, err = fieldDef.DefaultValue.Value(nil) + if err == nil { + continue + } + } + return val, gqlerror.ErrorPathf(v.path, "must be defined") + } + continue + } + + if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface { + if fieldDef.Type.NonNull && field.IsNil() { + return val, gqlerror.ErrorPathf(v.path, "cannot be null") + } + //allow null object field and skip it + if !fieldDef.Type.NonNull && field.IsNil() { + continue + } + field = field.Elem() + } + cval, err := v.validateVarType(fieldDef.Type, field) + if err != nil { + return val, err + } + val.SetMapIndex(reflect.ValueOf(fieldDef.Name), cval) + } + default: + panic(fmt.Errorf("unsupported type %s", def.Kind)) + } + return val, nil +} + +func IsValidIntString(val reflect.Value, kind reflect.Kind) bool { + if kind != reflect.String { + return false + } + _, e := strconv.ParseInt(fmt.Sprintf("%v", val.Interface()), 10, 64) + + return e == nil +} + +func IsValidFloatString(val reflect.Value, kind reflect.Kind) bool { + if kind != reflect.String { + return false + } + _, e := strconv.ParseFloat(fmt.Sprintf("%v", val.Interface()), 64) + return e == nil +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/walk.go b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go new file mode 100644 index 00000000..6ee69e4c --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go @@ -0,0 +1,292 @@ +package validator + +import ( + "context" + "fmt" + + "github.com/vektah/gqlparser/v2/ast" +) + +type Events struct { + operationVisitor []func(walker *Walker, operation *ast.OperationDefinition) + field []func(walker *Walker, field *ast.Field) + fragment []func(walker *Walker, fragment *ast.FragmentDefinition) + inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment) + fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread) + directive []func(walker *Walker, directive *ast.Directive) + directiveList []func(walker *Walker, directives []*ast.Directive) + value []func(walker *Walker, value *ast.Value) + variable []func(walker *Walker, variable *ast.VariableDefinition) +} + +func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) { + o.operationVisitor = append(o.operationVisitor, f) +} +func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) { + o.field = append(o.field, f) +} +func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) { + o.fragment = append(o.fragment, f) +} +func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) { + o.inlineFragment = append(o.inlineFragment, f) +} +func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) { + o.fragmentSpread = append(o.fragmentSpread, f) +} +func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) { + o.directive = append(o.directive, f) +} +func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) { + o.directiveList = append(o.directiveList, f) +} +func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) { + o.value = append(o.value, f) +} +func (o *Events) OnVariable(f func(walker *Walker, variable *ast.VariableDefinition)) { + o.variable = append(o.variable, f) +} + +func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) { + w := Walker{ + Observers: observers, + Schema: schema, + Document: document, + } + + w.walk() +} + +type Walker struct { + Context context.Context + Observers *Events + Schema *ast.Schema + Document *ast.QueryDocument + + validatedFragmentSpreads map[string]bool + CurrentOperation *ast.OperationDefinition +} + +func (w *Walker) walk() { + for _, child := range w.Document.Operations { + w.validatedFragmentSpreads = make(map[string]bool) + w.walkOperation(child) + } + for _, child := range w.Document.Fragments { + w.validatedFragmentSpreads = make(map[string]bool) + w.walkFragment(child) + } +} + +func (w *Walker) walkOperation(operation *ast.OperationDefinition) { + w.CurrentOperation = operation + for _, varDef := range operation.VariableDefinitions { + varDef.Definition = w.Schema.Types[varDef.Type.Name()] + for _, v := range w.Observers.variable { + v(w, varDef) + } + if varDef.DefaultValue != nil { + varDef.DefaultValue.ExpectedType = varDef.Type + varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()] + } + } + + var def *ast.Definition + var loc ast.DirectiveLocation + switch operation.Operation { + case ast.Query, "": + def = w.Schema.Query + loc = ast.LocationQuery + case ast.Mutation: + def = w.Schema.Mutation + loc = ast.LocationMutation + case ast.Subscription: + def = w.Schema.Subscription + loc = ast.LocationSubscription + } + + for _, varDef := range operation.VariableDefinitions { + if varDef.DefaultValue != nil { + w.walkValue(varDef.DefaultValue) + } + w.walkDirectives(varDef.Definition, varDef.Directives, ast.LocationVariableDefinition) + } + + w.walkDirectives(def, operation.Directives, loc) + w.walkSelectionSet(def, operation.SelectionSet) + + for _, v := range w.Observers.operationVisitor { + v(w, operation) + } + w.CurrentOperation = nil +} + +func (w *Walker) walkFragment(it *ast.FragmentDefinition) { + def := w.Schema.Types[it.TypeCondition] + + it.Definition = def + + w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition) + w.walkSelectionSet(def, it.SelectionSet) + + for _, v := range w.Observers.fragment { + v(w, it) + } +} + +func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) { + for _, dir := range directives { + def := w.Schema.Directives[dir.Name] + dir.Definition = def + dir.ParentDefinition = parentDef + dir.Location = location + + for _, arg := range dir.Arguments { + var argDef *ast.ArgumentDefinition + if def != nil { + argDef = def.Arguments.ForName(arg.Name) + } + + w.walkArgument(argDef, arg) + } + + for _, v := range w.Observers.directive { + v(w, dir) + } + } + + for _, v := range w.Observers.directiveList { + v(w, directives) + } +} + +func (w *Walker) walkValue(value *ast.Value) { + if value.Kind == ast.Variable && w.CurrentOperation != nil { + value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw) + if value.VariableDefinition != nil { + value.VariableDefinition.Used = true + } + } + + if value.Kind == ast.ObjectValue { + for _, child := range value.Children { + if value.Definition != nil { + fieldDef := value.Definition.Fields.ForName(child.Name) + if fieldDef != nil { + child.Value.ExpectedType = fieldDef.Type + child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()] + } + } + w.walkValue(child.Value) + } + } + + if value.Kind == ast.ListValue { + for _, child := range value.Children { + if value.ExpectedType != nil && value.ExpectedType.Elem != nil { + child.Value.ExpectedType = value.ExpectedType.Elem + child.Value.Definition = value.Definition + } + + w.walkValue(child.Value) + } + } + + for _, v := range w.Observers.value { + v(w, value) + } +} + +func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) { + if argDef != nil { + arg.Value.ExpectedType = argDef.Type + arg.Value.Definition = w.Schema.Types[argDef.Type.Name()] + } + + w.walkValue(arg.Value) +} + +func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) { + for _, child := range it { + w.walkSelection(parentDef, child) + } +} + +func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) { + switch it := it.(type) { + case *ast.Field: + var def *ast.FieldDefinition + if it.Name == "__typename" { + def = &ast.FieldDefinition{ + Name: "__typename", + Type: ast.NamedType("String", nil), + } + } else if parentDef != nil { + def = parentDef.Fields.ForName(it.Name) + } + + it.Definition = def + it.ObjectDefinition = parentDef + + var nextParentDef *ast.Definition + if def != nil { + nextParentDef = w.Schema.Types[def.Type.Name()] + } + + for _, arg := range it.Arguments { + var argDef *ast.ArgumentDefinition + if def != nil { + argDef = def.Arguments.ForName(arg.Name) + } + + w.walkArgument(argDef, arg) + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationField) + w.walkSelectionSet(nextParentDef, it.SelectionSet) + + for _, v := range w.Observers.field { + v(w, it) + } + + case *ast.InlineFragment: + it.ObjectDefinition = parentDef + + nextParentDef := parentDef + if it.TypeCondition != "" { + nextParentDef = w.Schema.Types[it.TypeCondition] + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment) + w.walkSelectionSet(nextParentDef, it.SelectionSet) + + for _, v := range w.Observers.inlineFragment { + v(w, it) + } + + case *ast.FragmentSpread: + def := w.Document.Fragments.ForName(it.Name) + it.Definition = def + it.ObjectDefinition = parentDef + + var nextParentDef *ast.Definition + if def != nil { + nextParentDef = w.Schema.Types[def.TypeCondition] + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread) + + if def != nil && !w.validatedFragmentSpreads[def.Name] { + // prevent inifinite recursion + w.validatedFragmentSpreads[def.Name] = true + w.walkSelectionSet(nextParentDef, def.SelectionSet) + } + + for _, v := range w.Observers.fragmentSpread { + v(w, it) + } + + default: + panic(fmt.Errorf("unsupported %T", it)) + } +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go new file mode 100644 index 00000000..9fa5aa19 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -0,0 +1,636 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +// This file defines utilities for working with source positions. + +import ( + "fmt" + "go/ast" + "go/token" + "sort" + + "golang.org/x/tools/internal/typeparams" +) + +// PathEnclosingInterval returns the node that encloses the source +// interval [start, end), and all its ancestors up to the AST root. +// +// The definition of "enclosing" used by this function considers +// additional whitespace abutting a node to be enclosed by it. +// In this example: +// +// z := x + y // add them +// <-A-> +// <----B-----> +// +// the ast.BinaryExpr(+) node is considered to enclose interval B +// even though its [Pos()..End()) is actually only interval A. +// This behaviour makes user interfaces more tolerant of imperfect +// input. +// +// This function treats tokens as nodes, though they are not included +// in the result. e.g. PathEnclosingInterval("+") returns the +// enclosing ast.BinaryExpr("x + y"). +// +// If start==end, the 1-char interval following start is used instead. +// +// The 'exact' result is true if the interval contains only path[0] +// and perhaps some adjacent whitespace. It is false if the interval +// overlaps multiple children of path[0], or if it contains only +// interior whitespace of path[0]. +// In this example: +// +// z := x + y // add them +// <--C--> <---E--> +// ^ +// D +// +// intervals C, D and E are inexact. C is contained by the +// z-assignment statement, because it spans three of its children (:=, +// x, +). So too is the 1-char interval D, because it contains only +// interior whitespace of the assignment. E is considered interior +// whitespace of the BlockStmt containing the assignment. +// +// The resulting path is never empty; it always contains at least the +// 'root' *ast.File. Ideally PathEnclosingInterval would reject +// intervals that lie wholly or partially outside the range of the +// file, but unfortunately ast.File records only the token.Pos of +// the 'package' keyword, but not of the start of the file itself. +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging + + // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). + var visit func(node ast.Node) bool + visit = func(node ast.Node) bool { + path = append(path, node) + + nodePos := node.Pos() + nodeEnd := node.End() + + // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging + + // Intersect [start, end) with interval of node. + if start < nodePos { + start = nodePos + } + if end > nodeEnd { + end = nodeEnd + } + + // Find sole child that contains [start, end). + children := childrenOf(node) + l := len(children) + for i, child := range children { + // [childPos, childEnd) is unaugmented interval of child. + childPos := child.Pos() + childEnd := child.End() + + // [augPos, augEnd) is whitespace-augmented interval of child. + augPos := childPos + augEnd := childEnd + if i > 0 { + augPos = children[i-1].End() // start of preceding whitespace + } + if i < l-1 { + nextChildPos := children[i+1].Pos() + // Does [start, end) lie between child and next child? + if start >= augEnd && end <= nextChildPos { + return false // inexact match + } + augEnd = nextChildPos // end of following whitespace + } + + // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", + // i, augPos, augEnd, start, end) // debugging + + // Does augmented child strictly contain [start, end)? + if augPos <= start && end <= augEnd { + _, isToken := child.(tokenNode) + return isToken || visit(child) + } + + // Does [start, end) overlap multiple children? + // i.e. left-augmented child contains start + // but LR-augmented child does not contain end. + if start < childEnd && end > augEnd { + break + } + } + + // No single child contained [start, end), + // so node is the result. Is it exact? + + // (It's tempting to put this condition before the + // child loop, but it gives the wrong result in the + // case where a node (e.g. ExprStmt) and its sole + // child have equal intervals.) + if start == nodePos && end == nodeEnd { + return true // exact match + } + + return false // inexact: overlaps multiple children + } + + // Ensure [start,end) is nondecreasing. + if start > end { + start, end = end, start + } + + if start < root.End() && end > root.Pos() { + if start == end { + end = start + 1 // empty interval => interval of size 1 + } + exact = visit(root) + + // Reverse the path: + for i, l := 0, len(path); i < l/2; i++ { + path[i], path[l-1-i] = path[l-1-i], path[i] + } + } else { + // Selection lies within whitespace preceding the + // first (or following the last) declaration in the file. + // The result nonetheless always includes the ast.File. + path = append(path, root) + } + + return +} + +// tokenNode is a dummy implementation of ast.Node for a single token. +// They are used transiently by PathEnclosingInterval but never escape +// this package. +type tokenNode struct { + pos token.Pos + end token.Pos +} + +func (n tokenNode) Pos() token.Pos { + return n.pos +} + +func (n tokenNode) End() token.Pos { + return n.end +} + +func tok(pos token.Pos, len int) ast.Node { + return tokenNode{pos, pos + token.Pos(len)} +} + +// childrenOf returns the direct non-nil children of ast.Node n. +// It may include fake ast.Node implementations for bare tokens. +// it is not safe to call (e.g.) ast.Walk on such nodes. +func childrenOf(n ast.Node) []ast.Node { + var children []ast.Node + + // First add nodes for all true subtrees. + ast.Inspect(n, func(node ast.Node) bool { + if node == n { // push n + return true // recur + } + if node != nil { // push child + children = append(children, node) + } + return false // no recursion + }) + + // Then add fake Nodes for bare tokens. + switch n := n.(type) { + case *ast.ArrayType: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Elt.End(), len("]"))) + + case *ast.AssignStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.BasicLit: + children = append(children, + tok(n.ValuePos, len(n.Value))) + + case *ast.BinaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.BlockStmt: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("}"))) + + case *ast.BranchStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.CallExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + if n.Ellipsis != 0 { + children = append(children, tok(n.Ellipsis, len("..."))) + } + + case *ast.CaseClause: + if n.List == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.ChanType: + switch n.Dir { + case ast.RECV: + children = append(children, tok(n.Begin, len("<-chan"))) + case ast.SEND: + children = append(children, tok(n.Begin, len("chan<-"))) + case ast.RECV | ast.SEND: + children = append(children, tok(n.Begin, len("chan"))) + } + + case *ast.CommClause: + if n.Comm == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.Comment: + // nop + + case *ast.CommentGroup: + // nop + + case *ast.CompositeLit: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("{"))) + + case *ast.DeclStmt: + // nop + + case *ast.DeferStmt: + children = append(children, + tok(n.Defer, len("defer"))) + + case *ast.Ellipsis: + children = append(children, + tok(n.Ellipsis, len("..."))) + + case *ast.EmptyStmt: + // nop + + case *ast.ExprStmt: + // nop + + case *ast.Field: + // TODO(adonovan): Field.{Doc,Comment,Tag}? + + case *ast.FieldList: + children = append(children, + tok(n.Opening, len("(")), // or len("[") + tok(n.Closing, len(")"))) // or len("]") + + case *ast.File: + // TODO test: Doc + children = append(children, + tok(n.Package, len("package"))) + + case *ast.ForStmt: + children = append(children, + tok(n.For, len("for"))) + + case *ast.FuncDecl: + // TODO(adonovan): FuncDecl.Comment? + + // Uniquely, FuncDecl breaks the invariant that + // preorder traversal yields tokens in lexical order: + // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. + // + // As a workaround, we inline the case for FuncType + // here and order things correctly. + // + children = nil // discard ast.Walk(FuncDecl) info subtrees + children = append(children, tok(n.Type.Func, len("func"))) + if n.Recv != nil { + children = append(children, n.Recv) + } + children = append(children, n.Name) + if tparams := typeparams.ForFuncType(n.Type); tparams != nil { + children = append(children, tparams) + } + if n.Type.Params != nil { + children = append(children, n.Type.Params) + } + if n.Type.Results != nil { + children = append(children, n.Type.Results) + } + if n.Body != nil { + children = append(children, n.Body) + } + + case *ast.FuncLit: + // nop + + case *ast.FuncType: + if n.Func != 0 { + children = append(children, + tok(n.Func, len("func"))) + } + + case *ast.GenDecl: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + if n.Lparen != 0 { + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + } + + case *ast.GoStmt: + children = append(children, + tok(n.Go, len("go"))) + + case *ast.Ident: + children = append(children, + tok(n.NamePos, len(n.Name))) + + case *ast.IfStmt: + children = append(children, + tok(n.If, len("if"))) + + case *ast.ImportSpec: + // TODO(adonovan): ImportSpec.{Doc,EndPos}? + + case *ast.IncDecStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.IndexExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *typeparams.IndexListExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.InterfaceType: + children = append(children, + tok(n.Interface, len("interface"))) + + case *ast.KeyValueExpr: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.LabeledStmt: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.MapType: + children = append(children, + tok(n.Map, len("map"))) + + case *ast.ParenExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.RangeStmt: + children = append(children, + tok(n.For, len("for")), + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.ReturnStmt: + children = append(children, + tok(n.Return, len("return"))) + + case *ast.SelectStmt: + children = append(children, + tok(n.Select, len("select"))) + + case *ast.SelectorExpr: + // nop + + case *ast.SendStmt: + children = append(children, + tok(n.Arrow, len("<-"))) + + case *ast.SliceExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.StarExpr: + children = append(children, tok(n.Star, len("*"))) + + case *ast.StructType: + children = append(children, tok(n.Struct, len("struct"))) + + case *ast.SwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.TypeAssertExpr: + children = append(children, + tok(n.Lparen-1, len(".")), + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.TypeSpec: + // TODO(adonovan): TypeSpec.{Doc,Comment}? + + case *ast.TypeSwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.UnaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.ValueSpec: + // TODO(adonovan): ValueSpec.{Doc,Comment}? + + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + // nop + } + + // TODO(adonovan): opt: merge the logic of ast.Inspect() into + // the switch above so we can make interleaved callbacks for + // both Nodes and Tokens in the right order and avoid the need + // to sort. + sort.Sort(byPos(children)) + + return children +} + +type byPos []ast.Node + +func (sl byPos) Len() int { + return len(sl) +} +func (sl byPos) Less(i, j int) bool { + return sl[i].Pos() < sl[j].Pos() +} +func (sl byPos) Swap(i, j int) { + sl[i], sl[j] = sl[j], sl[i] +} + +// NodeDescription returns a description of the concrete type of n suitable +// for a user interface. +// +// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, +// StarExpr) we could be much more specific given the path to the AST +// root. Perhaps we should do that. +func NodeDescription(n ast.Node) string { + switch n := n.(type) { + case *ast.ArrayType: + return "array type" + case *ast.AssignStmt: + return "assignment" + case *ast.BadDecl: + return "bad declaration" + case *ast.BadExpr: + return "bad expression" + case *ast.BadStmt: + return "bad statement" + case *ast.BasicLit: + return "basic literal" + case *ast.BinaryExpr: + return fmt.Sprintf("binary %s operation", n.Op) + case *ast.BlockStmt: + return "block" + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + return "break statement" + case token.CONTINUE: + return "continue statement" + case token.GOTO: + return "goto statement" + case token.FALLTHROUGH: + return "fall-through statement" + } + case *ast.CallExpr: + if len(n.Args) == 1 && !n.Ellipsis.IsValid() { + return "function call (or conversion)" + } + return "function call" + case *ast.CaseClause: + return "case clause" + case *ast.ChanType: + return "channel type" + case *ast.CommClause: + return "communication clause" + case *ast.Comment: + return "comment" + case *ast.CommentGroup: + return "comment group" + case *ast.CompositeLit: + return "composite literal" + case *ast.DeclStmt: + return NodeDescription(n.Decl) + " statement" + case *ast.DeferStmt: + return "defer statement" + case *ast.Ellipsis: + return "ellipsis" + case *ast.EmptyStmt: + return "empty statement" + case *ast.ExprStmt: + return "expression statement" + case *ast.Field: + // Can be any of these: + // struct {x, y int} -- struct field(s) + // struct {T} -- anon struct field + // interface {I} -- interface embedding + // interface {f()} -- interface method + // func (A) func(B) C -- receiver, param(s), result(s) + return "field/method/parameter" + case *ast.FieldList: + return "field/method/parameter list" + case *ast.File: + return "source file" + case *ast.ForStmt: + return "for loop" + case *ast.FuncDecl: + return "function declaration" + case *ast.FuncLit: + return "function literal" + case *ast.FuncType: + return "function type" + case *ast.GenDecl: + switch n.Tok { + case token.IMPORT: + return "import declaration" + case token.CONST: + return "constant declaration" + case token.TYPE: + return "type declaration" + case token.VAR: + return "variable declaration" + } + case *ast.GoStmt: + return "go statement" + case *ast.Ident: + return "identifier" + case *ast.IfStmt: + return "if statement" + case *ast.ImportSpec: + return "import specification" + case *ast.IncDecStmt: + if n.Tok == token.INC { + return "increment statement" + } + return "decrement statement" + case *ast.IndexExpr: + return "index expression" + case *typeparams.IndexListExpr: + return "index list expression" + case *ast.InterfaceType: + return "interface type" + case *ast.KeyValueExpr: + return "key/value association" + case *ast.LabeledStmt: + return "statement label" + case *ast.MapType: + return "map type" + case *ast.Package: + return "package" + case *ast.ParenExpr: + return "parenthesized " + NodeDescription(n.X) + case *ast.RangeStmt: + return "range loop" + case *ast.ReturnStmt: + return "return statement" + case *ast.SelectStmt: + return "select statement" + case *ast.SelectorExpr: + return "selector" + case *ast.SendStmt: + return "channel send" + case *ast.SliceExpr: + return "slice expression" + case *ast.StarExpr: + return "*-operation" // load/store expr or pointer type + case *ast.StructType: + return "struct type" + case *ast.SwitchStmt: + return "switch statement" + case *ast.TypeAssertExpr: + return "type assertion" + case *ast.TypeSpec: + return "type specification" + case *ast.TypeSwitchStmt: + return "type switch" + case *ast.UnaryExpr: + return fmt.Sprintf("unary %s operation", n.Op) + case *ast.ValueSpec: + return "value specification" + + } + panic(fmt.Sprintf("unexpected node type: %T", n)) +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go new file mode 100644 index 00000000..18d1adb0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -0,0 +1,485 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astutil contains common utilities for working with the Go AST. +package astutil // import "golang.org/x/tools/go/ast/astutil" + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" +) + +// AddImport adds the import path to the file f, if absent. +func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) { + return AddNamedImport(fset, f, "", path) +} + +// AddNamedImport adds the import with the given name and path to the file f, if absent. +// If name is not empty, it is used to rename the import. +// +// For example, calling +// +// AddNamedImport(fset, f, "pathpkg", "path") +// +// adds +// +// import pathpkg "path" +func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) { + if imports(f, name, path) { + return false + } + + newImport := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(path), + }, + } + if name != "" { + newImport.Name = &ast.Ident{Name: name} + } + + // Find an import decl to add to. + // The goal is to find an existing import + // whose import path has the longest shared + // prefix with path. + var ( + bestMatch = -1 // length of longest shared prefix + lastImport = -1 // index in f.Decls of the file's final import decl + impDecl *ast.GenDecl // import decl containing the best match + impIndex = -1 // spec index in impDecl containing the best match + + isThirdPartyPath = isThirdParty(path) + ) + for i, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if ok && gen.Tok == token.IMPORT { + lastImport = i + // Do not add to import "C", to avoid disrupting the + // association with its doc comment, breaking cgo. + if declImports(gen, "C") { + continue + } + + // Match an empty import decl if that's all that is available. + if len(gen.Specs) == 0 && bestMatch == -1 { + impDecl = gen + } + + // Compute longest shared prefix with imports in this group and find best + // matched import spec. + // 1. Always prefer import spec with longest shared prefix. + // 2. While match length is 0, + // - for stdlib package: prefer first import spec. + // - for third party package: prefer first third party import spec. + // We cannot use last import spec as best match for third party package + // because grouped imports are usually placed last by goimports -local + // flag. + // See issue #19190. + seenAnyThirdParty := false + for j, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + p := importPath(impspec) + n := matchLen(p, path) + if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { + bestMatch = n + impDecl = gen + impIndex = j + } + seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) + } + } + } + + // If no import decl found, add one after the last import. + if impDecl == nil { + impDecl = &ast.GenDecl{ + Tok: token.IMPORT, + } + if lastImport >= 0 { + impDecl.TokPos = f.Decls[lastImport].End() + } else { + // There are no existing imports. + // Our new import, preceded by a blank line, goes after the package declaration + // and after the comment, if any, that starts on the same line as the + // package declaration. + impDecl.TokPos = f.Package + + file := fset.File(f.Package) + pkgLine := file.Line(f.Package) + for _, c := range f.Comments { + if file.Line(c.Pos()) > pkgLine { + break + } + // +2 for a blank line + impDecl.TokPos = c.End() + 2 + } + } + f.Decls = append(f.Decls, nil) + copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) + f.Decls[lastImport+1] = impDecl + } + + // Insert new import at insertAt. + insertAt := 0 + if impIndex >= 0 { + // insert after the found import + insertAt = impIndex + 1 + } + impDecl.Specs = append(impDecl.Specs, nil) + copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) + impDecl.Specs[insertAt] = newImport + pos := impDecl.Pos() + if insertAt > 0 { + // If there is a comment after an existing import, preserve the comment + // position by adding the new import after the comment. + if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { + pos = spec.Comment.End() + } else { + // Assign same position as the previous import, + // so that the sorter sees it as being in the same block. + pos = impDecl.Specs[insertAt-1].Pos() + } + } + if newImport.Name != nil { + newImport.Name.NamePos = pos + } + newImport.Path.ValuePos = pos + newImport.EndPos = pos + + // Clean up parens. impDecl contains at least one spec. + if len(impDecl.Specs) == 1 { + // Remove unneeded parens. + impDecl.Lparen = token.NoPos + } else if !impDecl.Lparen.IsValid() { + // impDecl needs parens added. + impDecl.Lparen = impDecl.Specs[0].Pos() + } + + f.Imports = append(f.Imports, newImport) + + if len(f.Decls) <= 1 { + return true + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } + + return true +} + +func isThirdParty(importPath string) bool { + // Third party package import path usually contains "." (".com", ".org", ...) + // This logic is taken from golang.org/x/tools/imports package. + return strings.Contains(importPath, ".") +} + +// DeleteImport deletes the import path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { + return DeleteNamedImport(fset, f, "", path) +} + +// DeleteNamedImport deletes the import with the given name and path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { + var delspecs []*ast.ImportSpec + var delcomments []*ast.CommentGroup + + // Find the import nodes that import path, if any. + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT { + continue + } + for j := 0; j < len(gen.Specs); j++ { + spec := gen.Specs[j] + impspec := spec.(*ast.ImportSpec) + if importName(impspec) != name || importPath(impspec) != path { + continue + } + + // We found an import spec that imports path. + // Delete it. + delspecs = append(delspecs, impspec) + deleted = true + copy(gen.Specs[j:], gen.Specs[j+1:]) + gen.Specs = gen.Specs[:len(gen.Specs)-1] + + // If this was the last import spec in this decl, + // delete the decl, too. + if len(gen.Specs) == 0 { + copy(f.Decls[i:], f.Decls[i+1:]) + f.Decls = f.Decls[:len(f.Decls)-1] + i-- + break + } else if len(gen.Specs) == 1 { + if impspec.Doc != nil { + delcomments = append(delcomments, impspec.Doc) + } + if impspec.Comment != nil { + delcomments = append(delcomments, impspec.Comment) + } + for _, cg := range f.Comments { + // Found comment on the same line as the import spec. + if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { + delcomments = append(delcomments, cg) + break + } + } + + spec := gen.Specs[0].(*ast.ImportSpec) + + // Move the documentation right after the import decl. + if spec.Doc != nil { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + } + for _, cg := range f.Comments { + if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + break + } + } + } + if j > 0 { + lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) + lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line + line := fset.PositionFor(impspec.Path.ValuePos, false).Line + + // We deleted an entry but now there may be + // a blank line-sized hole where the import was. + if line-lastLine > 1 || !gen.Rparen.IsValid() { + // There was a blank line immediately preceding the deleted import, + // so there's no need to close the hole. The right parenthesis is + // invalid after AddImport to an import statement without parenthesis. + // Do nothing. + } else if line != fset.File(gen.Rparen).LineCount() { + // There was no blank line. Close the hole. + fset.File(gen.Rparen).MergeLine(line) + } + } + j-- + } + } + + // Delete imports from f.Imports. + for i := 0; i < len(f.Imports); i++ { + imp := f.Imports[i] + for j, del := range delspecs { + if imp == del { + copy(f.Imports[i:], f.Imports[i+1:]) + f.Imports = f.Imports[:len(f.Imports)-1] + copy(delspecs[j:], delspecs[j+1:]) + delspecs = delspecs[:len(delspecs)-1] + i-- + break + } + } + } + + // Delete comments from f.Comments. + for i := 0; i < len(f.Comments); i++ { + cg := f.Comments[i] + for j, del := range delcomments { + if cg == del { + copy(f.Comments[i:], f.Comments[i+1:]) + f.Comments = f.Comments[:len(f.Comments)-1] + copy(delcomments[j:], delcomments[j+1:]) + delcomments = delcomments[:len(delcomments)-1] + i-- + break + } + } + } + + if len(delspecs) > 0 { + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) + } + + return +} + +// RewriteImport rewrites any import of path oldPath to path newPath. +func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { + for _, imp := range f.Imports { + if importPath(imp) == oldPath { + rewrote = true + // record old End, because the default is to compute + // it using the length of imp.Path.Value. + imp.EndPos = imp.End() + imp.Path.Value = strconv.Quote(newPath) + } + } + return +} + +// UsesImport reports whether a given import is used. +func UsesImport(f *ast.File, path string) (used bool) { + spec := importSpec(f, path) + if spec == nil { + return + } + + name := spec.Name.String() + switch name { + case "": + // If the package name is not explicitly specified, + // make an educated guess. This is not guaranteed to be correct. + lastSlash := strings.LastIndex(path, "/") + if lastSlash == -1 { + name = path + } else { + name = path[lastSlash+1:] + } + case "_", ".": + // Not sure if this import is used - err on the side of caution. + return true + } + + ast.Walk(visitFn(func(n ast.Node) { + sel, ok := n.(*ast.SelectorExpr) + if ok && isTopName(sel.X, name) { + used = true + } + }), f) + + return +} + +type visitFn func(node ast.Node) + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + fn(node) + return fn +} + +// imports reports whether f has an import with the specified name and path. +func imports(f *ast.File, name, path string) bool { + for _, s := range f.Imports { + if importName(s) == name && importPath(s) == path { + return true + } + } + return false +} + +// importSpec returns the import spec if f imports path, +// or nil otherwise. +func importSpec(f *ast.File, path string) *ast.ImportSpec { + for _, s := range f.Imports { + if importPath(s) == path { + return s + } + } + return nil +} + +// importName returns the name of s, +// or "" if the import is not named. +func importName(s *ast.ImportSpec) string { + if s.Name == nil { + return "" + } + return s.Name.Name +} + +// importPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err != nil { + return "" + } + return t +} + +// declImports reports whether gen contains an import of path. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +// matchLen returns the length of the longest path segment prefix shared by x and y. +func matchLen(x, y string) int { + n := 0 + for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { + if x[i] == '/' { + n++ + } + } + return n +} + +// isTopName returns true if n is a top-level unresolved identifier with the given name. +func isTopName(n ast.Expr, name string) bool { + id, ok := n.(*ast.Ident) + return ok && id.Name == name && id.Obj == nil +} + +// Imports returns the file imports grouped by paragraph. +func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { + var groups [][]*ast.ImportSpec + + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.IMPORT { + break + } + + group := []*ast.ImportSpec{} + + var lastLine int + for _, spec := range genDecl.Specs { + importSpec := spec.(*ast.ImportSpec) + pos := importSpec.Path.ValuePos + line := fset.Position(pos).Line + if lastLine > 0 && pos > 0 && line-lastLine > 1 { + groups = append(groups, group) + group = []*ast.ImportSpec{} + } + group = append(group, importSpec) + lastLine = line + } + groups = append(groups, group) + } + + return groups +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go new file mode 100644 index 00000000..f430b21b --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -0,0 +1,488 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "reflect" + "sort" + + "golang.org/x/tools/internal/typeparams" +) + +// An ApplyFunc is invoked by Apply for each node n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. A package's files are +// traversed in the filenames' alphabetical order. +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + parent := &struct{ ast.Node }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Node + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Node", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +// +// If p is a variable of type and value of the current parent node +// c.Parent(), and f is the field identifier with name c.Name(), +// the following invariants hold: +// +// p.f == c.Node() if c.Index() < 0 +// p.f[c.Index()] == c.Node() if c.Index() >= 0 +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +type Cursor struct { + parent ast.Node + name string + iter *iterator // valid if non-nil + node ast.Node +} + +// Node returns the current Node. +func (c *Cursor) Node() ast.Node { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() ast.Node { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// Index reports the index >= 0 of the current Node in the slice of Nodes that +// contains it, or a value < 0 if the current Node is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n ast.Node) { + if _, ok := c.node.(*ast.File); ok { + file, ok := n.(*ast.File) + if !ok { + panic("attempt to replace *ast.File with non-*ast.File") + } + c.parent.(*ast.Package).Files[c.name] = file + return + } + + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// Delete deletes the current Node from its containing slice. +// If the current Node is not part of a slice, Delete panics. +// As a special case, if the current node is a package file, +// Delete removes it from the package's Files map. +func (c *Cursor) Delete() { + if _, ok := c.node.(*ast.File); ok { + delete(c.parent.(*ast.Package).Files, c.name) + return + } + + i := c.Index() + if i < 0 { + panic("Delete node not contained in slice") + } + v := c.field() + l := v.Len() + reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) + v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) + v.SetLen(l - 1) + c.iter.step-- +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { + // convert typed nil into untyped nil + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + n = nil + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // walk children + // (the order of the cases matches the order of the corresponding node types in go/ast) + switch n := n.(type) { + case nil: + // nothing to do + + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + if n != nil { + a.applyList(n, "List") + } + + case *ast.Field: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.FieldList: + a.applyList(n, "List") + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + a.apply(n, "Elt", nil, n.Elt) + + case *ast.FuncLit: + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + case *ast.CompositeLit: + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Elts") + + case *ast.ParenExpr: + a.apply(n, "X", nil, n.X) + + case *ast.SelectorExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Sel", nil, n.Sel) + + case *ast.IndexExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Index", nil, n.Index) + + case *typeparams.IndexListExpr: + a.apply(n, "X", nil, n.X) + a.applyList(n, "Indices") + + case *ast.SliceExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Low", nil, n.Low) + a.apply(n, "High", nil, n.High) + a.apply(n, "Max", nil, n.Max) + + case *ast.TypeAssertExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Type", nil, n.Type) + + case *ast.CallExpr: + a.apply(n, "Fun", nil, n.Fun) + a.applyList(n, "Args") + + case *ast.StarExpr: + a.apply(n, "X", nil, n.X) + + case *ast.UnaryExpr: + a.apply(n, "X", nil, n.X) + + case *ast.BinaryExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Y", nil, n.Y) + + case *ast.KeyValueExpr: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + // Types + case *ast.ArrayType: + a.apply(n, "Len", nil, n.Len) + a.apply(n, "Elt", nil, n.Elt) + + case *ast.StructType: + a.apply(n, "Fields", nil, n.Fields) + + case *ast.FuncType: + if tparams := typeparams.ForFuncType(n); tparams != nil { + a.apply(n, "TypeParams", nil, tparams) + } + a.apply(n, "Params", nil, n.Params) + a.apply(n, "Results", nil, n.Results) + + case *ast.InterfaceType: + a.apply(n, "Methods", nil, n.Methods) + + case *ast.MapType: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + case *ast.ChanType: + a.apply(n, "Value", nil, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + a.apply(n, "Decl", nil, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + a.apply(n, "Label", nil, n.Label) + a.apply(n, "Stmt", nil, n.Stmt) + + case *ast.ExprStmt: + a.apply(n, "X", nil, n.X) + + case *ast.SendStmt: + a.apply(n, "Chan", nil, n.Chan) + a.apply(n, "Value", nil, n.Value) + + case *ast.IncDecStmt: + a.apply(n, "X", nil, n.X) + + case *ast.AssignStmt: + a.applyList(n, "Lhs") + a.applyList(n, "Rhs") + + case *ast.GoStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.DeferStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.ReturnStmt: + a.applyList(n, "Results") + + case *ast.BranchStmt: + a.apply(n, "Label", nil, n.Label) + + case *ast.BlockStmt: + a.applyList(n, "List") + + case *ast.IfStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Body", nil, n.Body) + a.apply(n, "Else", nil, n.Else) + + case *ast.CaseClause: + a.applyList(n, "List") + a.applyList(n, "Body") + + case *ast.SwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Body", nil, n.Body) + + case *ast.TypeSwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Assign", nil, n.Assign) + a.apply(n, "Body", nil, n.Body) + + case *ast.CommClause: + a.apply(n, "Comm", nil, n.Comm) + a.applyList(n, "Body") + + case *ast.SelectStmt: + a.apply(n, "Body", nil, n.Body) + + case *ast.ForStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Post", nil, n.Post) + a.apply(n, "Body", nil, n.Body) + + case *ast.RangeStmt: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + a.apply(n, "X", nil, n.X) + a.apply(n, "Body", nil, n.Body) + + // Declarations + case *ast.ImportSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Path", nil, n.Path) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.ValueSpec: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Values") + a.apply(n, "Comment", nil, n.Comment) + + case *ast.TypeSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + if tparams := typeparams.ForTypeSpec(n); tparams != nil { + a.apply(n, "TypeParams", nil, tparams) + } + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Specs") + + case *ast.FuncDecl: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Recv", nil, n.Recv) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + // Files and packages + case *ast.File: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.applyList(n, "Decls") + // Don't walk n.Comments; they have either been walked already if + // they are Doc comments, or they can be easily walked explicitly. + + case *ast.Package: + // collect and sort names for reproducible behavior + var names []string + for name := range n.Files { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + a.apply(n, name, nil, n.Files[name]) + } + + default: + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + + a.cursor = saved +} + +// An iterator controls iteration over a slice of nodes. +type iterator struct { + index, step int +} + +func (a *application) applyList(parent ast.Node, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x ast.Node + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(ast.Node) + } + + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go new file mode 100644 index 00000000..919d5305 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -0,0 +1,18 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import "go/ast" + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/imports/forward.go b/vendor/golang.org/x/tools/imports/forward.go new file mode 100644 index 00000000..d2547c74 --- /dev/null +++ b/vendor/golang.org/x/tools/imports/forward.go @@ -0,0 +1,77 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports // import "golang.org/x/tools/imports" + +import ( + "io/ioutil" + "log" + + "golang.org/x/tools/internal/gocommand" + intimp "golang.org/x/tools/internal/imports" +) + +// Options specifies options for processing files. +type Options struct { + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Debug controls verbose logging. +var Debug = false + +// LocalPrefix is a comma-separated string of import path prefixes, which, if +// set, instructs Process to sort the import paths with the given prefixes +// into another group after 3rd-party packages. +var LocalPrefix string + +// Process formats and adjusts imports for the provided file. +// If opt is nil the defaults are used, and if src is nil the source +// is read from the filesystem. +// +// Note that filename's directory influences which imports can be chosen, +// so it is important that filename be accurate. +// To process data “as if” it were in filename, pass the data as a non-nil src. +func Process(filename string, src []byte, opt *Options) ([]byte, error) { + var err error + if src == nil { + src, err = ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + } + if opt == nil { + opt = &Options{Comments: true, TabIndent: true, TabWidth: 8} + } + intopt := &intimp.Options{ + Env: &intimp.ProcessEnv{ + GocmdRunner: &gocommand.Runner{}, + }, + LocalPrefix: LocalPrefix, + AllErrors: opt.AllErrors, + Comments: opt.Comments, + FormatOnly: opt.FormatOnly, + Fragment: opt.Fragment, + TabIndent: opt.TabIndent, + TabWidth: opt.TabWidth, + } + if Debug { + intopt.Env.Logf = log.Printf + } + return intimp.Process(filename, src, intopt) +} + +// VendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +func VendorlessPath(ipath string) string { + return intimp.VendorlessPath(ipath) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go new file mode 100644 index 00000000..798fe599 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go @@ -0,0 +1,196 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fastwalk provides a faster version of filepath.Walk for file system +// scanning tools. +package fastwalk + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "sync" +) + +// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the +// symlink named in the call may be traversed. +var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory") + +// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the +// callback should not be called for any other files in the current directory. +// Child directories will still be traversed. +var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory") + +// Walk is a faster implementation of filepath.Walk. +// +// filepath.Walk's design necessarily calls os.Lstat on each file, +// even if the caller needs less info. +// Many tools need only the type of each file. +// On some platforms, this information is provided directly by the readdir +// system call, avoiding the need to stat each file individually. +// fastwalk_unix.go contains a fork of the syscall routines. +// +// See golang.org/issue/16399 +// +// Walk walks the file tree rooted at root, calling walkFn for +// each file or directory in the tree, including root. +// +// If fastWalk returns filepath.SkipDir, the directory is skipped. +// +// Unlike filepath.Walk: +// - file stat calls must be done by the user. +// The only provided metadata is the file type, which does not include +// any permission bits. +// - multiple goroutines stat the filesystem concurrently. The provided +// walkFn must be safe for concurrent use. +// - fastWalk can follow symlinks if walkFn returns the TraverseLink +// sentinel error. It is the walkFn's responsibility to prevent +// fastWalk from going into symlink cycles. +func Walk(root string, walkFn func(path string, typ os.FileMode) error) error { + // TODO(bradfitz): make numWorkers configurable? We used a + // minimum of 4 to give the kernel more info about multiple + // things we want, in hopes its I/O scheduling can take + // advantage of that. Hopefully most are in cache. Maybe 4 is + // even too low of a minimum. Profile more. + numWorkers := 4 + if n := runtime.NumCPU(); n > numWorkers { + numWorkers = n + } + + // Make sure to wait for all workers to finish, otherwise + // walkFn could still be called after returning. This Wait call + // runs after close(e.donec) below. + var wg sync.WaitGroup + defer wg.Wait() + + w := &walker{ + fn: walkFn, + enqueuec: make(chan walkItem, numWorkers), // buffered for performance + workc: make(chan walkItem, numWorkers), // buffered for performance + donec: make(chan struct{}), + + // buffered for correctness & not leaking goroutines: + resc: make(chan error, numWorkers), + } + defer close(w.donec) + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go w.doWork(&wg) + } + todo := []walkItem{{dir: root}} + out := 0 + for { + workc := w.workc + var workItem walkItem + if len(todo) == 0 { + workc = nil + } else { + workItem = todo[len(todo)-1] + } + select { + case workc <- workItem: + todo = todo[:len(todo)-1] + out++ + case it := <-w.enqueuec: + todo = append(todo, it) + case err := <-w.resc: + out-- + if err != nil { + return err + } + if out == 0 && len(todo) == 0 { + // It's safe to quit here, as long as the buffered + // enqueue channel isn't also readable, which might + // happen if the worker sends both another unit of + // work and its result before the other select was + // scheduled and both w.resc and w.enqueuec were + // readable. + select { + case it := <-w.enqueuec: + todo = append(todo, it) + default: + return nil + } + } + } + } +} + +// doWork reads directories as instructed (via workc) and runs the +// user's callback function. +func (w *walker) doWork(wg *sync.WaitGroup) { + defer wg.Done() + for { + select { + case <-w.donec: + return + case it := <-w.workc: + select { + case <-w.donec: + return + case w.resc <- w.walk(it.dir, !it.callbackDone): + } + } + } +} + +type walker struct { + fn func(path string, typ os.FileMode) error + + donec chan struct{} // closed on fastWalk's return + workc chan walkItem // to workers + enqueuec chan walkItem // from workers + resc chan error // from workers +} + +type walkItem struct { + dir string + callbackDone bool // callback already called; don't do it again +} + +func (w *walker) enqueue(it walkItem) { + select { + case w.enqueuec <- it: + case <-w.donec: + } +} + +func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error { + joined := dirName + string(os.PathSeparator) + baseName + if typ == os.ModeDir { + w.enqueue(walkItem{dir: joined}) + return nil + } + + err := w.fn(joined, typ) + if typ == os.ModeSymlink { + if err == ErrTraverseLink { + // Set callbackDone so we don't call it twice for both the + // symlink-as-symlink and the symlink-as-directory later: + w.enqueue(walkItem{dir: joined, callbackDone: true}) + return nil + } + if err == filepath.SkipDir { + // Permit SkipDir on symlinks too. + return nil + } + } + return err +} + +func (w *walker) walk(root string, runUserCallback bool) error { + if runUserCallback { + err := w.fn(root, os.ModeDir) + if err == filepath.SkipDir { + return nil + } + if err != nil { + return err + } + } + + return readDir(root, w.onDirEnt) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_darwin.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_darwin.go new file mode 100644 index 00000000..0ca55e0d --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_darwin.go @@ -0,0 +1,119 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build darwin && cgo +// +build darwin,cgo + +package fastwalk + +/* +#include + +// fastwalk_readdir_r wraps readdir_r so that we don't have to pass a dirent** +// result pointer which triggers CGO's "Go pointer to Go pointer" check unless +// we allocat the result dirent* with malloc. +// +// fastwalk_readdir_r returns 0 on success, -1 upon reaching the end of the +// directory, or a positive error number to indicate failure. +static int fastwalk_readdir_r(DIR *fd, struct dirent *entry) { + struct dirent *result; + int ret = readdir_r(fd, entry, &result); + if (ret == 0 && result == NULL) { + ret = -1; // EOF + } + return ret; +} +*/ +import "C" + +import ( + "os" + "syscall" + "unsafe" +) + +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fd, err := openDir(dirName) + if err != nil { + return &os.PathError{Op: "opendir", Path: dirName, Err: err} + } + defer C.closedir(fd) + + skipFiles := false + var dirent syscall.Dirent + for { + ret := int(C.fastwalk_readdir_r(fd, (*C.struct_dirent)(unsafe.Pointer(&dirent)))) + if ret != 0 { + if ret == -1 { + break // EOF + } + if ret == int(syscall.EINTR) { + continue + } + return &os.PathError{Op: "readdir", Path: dirName, Err: syscall.Errno(ret)} + } + if dirent.Ino == 0 { + continue + } + typ := dtToType(dirent.Type) + if skipFiles && typ.IsRegular() { + continue + } + name := (*[len(syscall.Dirent{}.Name)]byte)(unsafe.Pointer(&dirent.Name))[:] + name = name[:dirent.Namlen] + for i, c := range name { + if c == 0 { + name = name[:i] + break + } + } + // Check for useless names before allocating a string. + if string(name) == "." || string(name) == ".." { + continue + } + if err := fn(dirName, string(name), typ); err != nil { + if err != ErrSkipFiles { + return err + } + skipFiles = true + } + } + + return nil +} + +func dtToType(typ uint8) os.FileMode { + switch typ { + case syscall.DT_BLK: + return os.ModeDevice + case syscall.DT_CHR: + return os.ModeDevice | os.ModeCharDevice + case syscall.DT_DIR: + return os.ModeDir + case syscall.DT_FIFO: + return os.ModeNamedPipe + case syscall.DT_LNK: + return os.ModeSymlink + case syscall.DT_REG: + return 0 + case syscall.DT_SOCK: + return os.ModeSocket + } + return ^os.FileMode(0) +} + +// openDir wraps opendir(3) and handles any EINTR errors. The returned *DIR +// needs to be closed with closedir(3). +func openDir(path string) (*C.DIR, error) { + name, err := syscall.BytePtrFromString(path) + if err != nil { + return nil, err + } + for { + fd, err := C.opendir((*C.char)(unsafe.Pointer(name))) + if err != syscall.EINTR { + return fd, err + } + } +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go new file mode 100644 index 00000000..d58595db --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build freebsd || openbsd || netbsd +// +build freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Fileno) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go new file mode 100644 index 00000000..d3922890 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go @@ -0,0 +1,15 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (linux || (darwin && !cgo)) && !appengine +// +build linux darwin,!cgo +// +build !appengine + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return dirent.Ino +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go new file mode 100644 index 00000000..38a4db6a --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go @@ -0,0 +1,14 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (darwin && !cgo) || freebsd || openbsd || netbsd +// +build darwin,!cgo freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntNamlen(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Namlen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go new file mode 100644 index 00000000..c82e57df --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build linux && !appengine +// +build linux,!appengine + +package fastwalk + +import ( + "bytes" + "syscall" + "unsafe" +) + +func direntNamlen(dirent *syscall.Dirent) uint64 { + const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name)) + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + const nameBufLen = uint16(len(nameBuf)) + limit := dirent.Reclen - fixedHdr + if limit > nameBufLen { + limit = nameBufLen + } + nameLen := bytes.IndexByte(nameBuf[:limit], 0) + if nameLen < 0 { + panic("failed to find terminating 0 byte in dirent") + } + return uint64(nameLen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go new file mode 100644 index 00000000..085d3116 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go @@ -0,0 +1,38 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build appengine || (!linux && !darwin && !freebsd && !openbsd && !netbsd) +// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd + +package fastwalk + +import ( + "io/ioutil" + "os" +) + +// readDir calls fn for each directory entry in dirName. +// It does not descend into directories or follow symlinks. +// If fn returns a non-nil error, readDir returns with that error +// immediately. +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fis, err := ioutil.ReadDir(dirName) + if err != nil { + return err + } + skipFiles := false + for _, fi := range fis { + if fi.Mode().IsRegular() && skipFiles { + continue + } + if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil { + if err == ErrSkipFiles { + skipFiles = true + continue + } + return err + } + } + return nil +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go new file mode 100644 index 00000000..f12f1a73 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go @@ -0,0 +1,153 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (linux || freebsd || openbsd || netbsd || (darwin && !cgo)) && !appengine +// +build linux freebsd openbsd netbsd darwin,!cgo +// +build !appengine + +package fastwalk + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const blockSize = 8 << 10 + +// unknownFileMode is a sentinel (and bogus) os.FileMode +// value used to represent a syscall.DT_UNKNOWN Dirent.Type. +const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice + +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fd, err := open(dirName, 0, 0) + if err != nil { + return &os.PathError{Op: "open", Path: dirName, Err: err} + } + defer syscall.Close(fd) + + // The buffer must be at least a block long. + buf := make([]byte, blockSize) // stack-allocated; doesn't escape + bufp := 0 // starting read position in buf + nbuf := 0 // end valid data in buf + skipFiles := false + for { + if bufp >= nbuf { + bufp = 0 + nbuf, err = readDirent(fd, buf) + if err != nil { + return os.NewSyscallError("readdirent", err) + } + if nbuf <= 0 { + return nil + } + } + consumed, name, typ := parseDirEnt(buf[bufp:nbuf]) + bufp += consumed + if name == "" || name == "." || name == ".." { + continue + } + // Fallback for filesystems (like old XFS) that don't + // support Dirent.Type and have DT_UNKNOWN (0) there + // instead. + if typ == unknownFileMode { + fi, err := os.Lstat(dirName + "/" + name) + if err != nil { + // It got deleted in the meantime. + if os.IsNotExist(err) { + continue + } + return err + } + typ = fi.Mode() & os.ModeType + } + if skipFiles && typ.IsRegular() { + continue + } + if err := fn(dirName, name, typ); err != nil { + if err == ErrSkipFiles { + skipFiles = true + continue + } + return err + } + } +} + +func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) { + // golang.org/issue/37269 + dirent := &syscall.Dirent{} + copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(dirent))[:], buf) + if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v { + panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v)) + } + if len(buf) < int(dirent.Reclen) { + panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen)) + } + consumed = int(dirent.Reclen) + if direntInode(dirent) == 0 { // File absent in directory. + return + } + switch dirent.Type { + case syscall.DT_REG: + typ = 0 + case syscall.DT_DIR: + typ = os.ModeDir + case syscall.DT_LNK: + typ = os.ModeSymlink + case syscall.DT_BLK: + typ = os.ModeDevice + case syscall.DT_FIFO: + typ = os.ModeNamedPipe + case syscall.DT_SOCK: + typ = os.ModeSocket + case syscall.DT_UNKNOWN: + typ = unknownFileMode + default: + // Skip weird things. + // It's probably a DT_WHT (http://lwn.net/Articles/325369/) + // or something. Revisit if/when this package is moved outside + // of goimports. goimports only cares about regular files, + // symlinks, and directories. + return + } + + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + nameLen := direntNamlen(dirent) + + // Special cases for common things: + if nameLen == 1 && nameBuf[0] == '.' { + name = "." + } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' { + name = ".." + } else { + name = string(nameBuf[:nameLen]) + } + return +} + +// According to https://golang.org/doc/go1.14#runtime +// A consequence of the implementation of preemption is that on Unix systems, including Linux and macOS +// systems, programs built with Go 1.14 will receive more signals than programs built with earlier releases. +// +// This causes syscall.Open and syscall.ReadDirent sometimes fail with EINTR errors. +// We need to retry in this case. +func open(path string, mode int, perm uint32) (fd int, err error) { + for { + fd, err := syscall.Open(path, mode, perm) + if err != syscall.EINTR { + return fd, err + } + } +} + +func readDirent(fd int, buf []byte) (n int, err error) { + for { + nbuf, err := syscall.ReadDirent(fd, buf) + if err != syscall.EINTR { + return nbuf, err + } + } +} diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go new file mode 100644 index 00000000..16840532 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go @@ -0,0 +1,254 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gopathwalk is like filepath.Walk but specialized for finding Go +// packages, particularly in $GOPATH and $GOROOT. +package gopathwalk + +import ( + "bufio" + "bytes" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "time" + + "golang.org/x/tools/internal/fastwalk" +) + +// Options controls the behavior of a Walk call. +type Options struct { + // If Logf is non-nil, debug logging is enabled through this function. + Logf func(format string, args ...interface{}) + // Search module caches. Also disables legacy goimports ignore rules. + ModulesEnabled bool +} + +// RootType indicates the type of a Root. +type RootType int + +const ( + RootUnknown RootType = iota + RootGOROOT + RootGOPATH + RootCurrentModule + RootModuleCache + RootOther +) + +// A Root is a starting point for a Walk. +type Root struct { + Path string + Type RootType +} + +// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// For each package found, add will be called (concurrently) with the absolute +// paths of the containing source directory and the package directory. +// add will be called concurrently. +func Walk(roots []Root, add func(root Root, dir string), opts Options) { + WalkSkip(roots, add, func(Root, string) bool { return false }, opts) +} + +// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// For each package found, add will be called (concurrently) with the absolute +// paths of the containing source directory and the package directory. +// For each directory that will be scanned, skip will be called (concurrently) +// with the absolute paths of the containing source directory and the directory. +// If skip returns false on a directory it will be processed. +// add will be called concurrently. +// skip will be called concurrently. +func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) { + for _, root := range roots { + walkDir(root, add, skip, opts) + } +} + +// walkDir creates a walker and starts fastwalk with this walker. +func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) { + if _, err := os.Stat(root.Path); os.IsNotExist(err) { + if opts.Logf != nil { + opts.Logf("skipping nonexistent directory: %v", root.Path) + } + return + } + start := time.Now() + if opts.Logf != nil { + opts.Logf("gopathwalk: scanning %s", root.Path) + } + w := &walker{ + root: root, + add: add, + skip: skip, + opts: opts, + } + w.init() + if err := fastwalk.Walk(root.Path, w.walk); err != nil { + log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err) + } + + if opts.Logf != nil { + opts.Logf("gopathwalk: scanned %s in %v", root.Path, time.Since(start)) + } +} + +// walker is the callback for fastwalk.Walk. +type walker struct { + root Root // The source directory to scan. + add func(Root, string) // The callback that will be invoked for every possible Go package dir. + skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true. + opts Options // Options passed to Walk by the user. + + ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. +} + +// init initializes the walker based on its Options +func (w *walker) init() { + var ignoredPaths []string + if w.root.Type == RootModuleCache { + ignoredPaths = []string{"cache"} + } + if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH { + ignoredPaths = w.getIgnoredDirs(w.root.Path) + ignoredPaths = append(ignoredPaths, "v", "mod") + } + + for _, p := range ignoredPaths { + full := filepath.Join(w.root.Path, p) + if fi, err := os.Stat(full); err == nil { + w.ignoredDirs = append(w.ignoredDirs, fi) + if w.opts.Logf != nil { + w.opts.Logf("Directory added to ignore list: %s", full) + } + } else if w.opts.Logf != nil { + w.opts.Logf("Error statting ignored directory: %v", err) + } + } +} + +// getIgnoredDirs reads an optional config file at /.goimportsignore +// of relative directories to ignore when scanning for go files. +// The provided path is one of the $GOPATH entries with "src" appended. +func (w *walker) getIgnoredDirs(path string) []string { + file := filepath.Join(path, ".goimportsignore") + slurp, err := ioutil.ReadFile(file) + if w.opts.Logf != nil { + if err != nil { + w.opts.Logf("%v", err) + } else { + w.opts.Logf("Read %s", file) + } + } + if err != nil { + return nil + } + + var ignoredDirs []string + bs := bufio.NewScanner(bytes.NewReader(slurp)) + for bs.Scan() { + line := strings.TrimSpace(bs.Text()) + if line == "" || strings.HasPrefix(line, "#") { + continue + } + ignoredDirs = append(ignoredDirs, line) + } + return ignoredDirs +} + +// shouldSkipDir reports whether the file should be skipped or not. +func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool { + for _, ignoredDir := range w.ignoredDirs { + if os.SameFile(fi, ignoredDir) { + return true + } + } + if w.skip != nil { + // Check with the user specified callback. + return w.skip(w.root, dir) + } + return false +} + +// walk walks through the given path. +func (w *walker) walk(path string, typ os.FileMode) error { + if typ.IsRegular() { + dir := filepath.Dir(path) + if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) { + // Doesn't make sense to have regular files + // directly in your $GOPATH/src or $GOROOT/src. + return fastwalk.ErrSkipFiles + } + if !strings.HasSuffix(path, ".go") { + return nil + } + + w.add(w.root, dir) + return fastwalk.ErrSkipFiles + } + if typ == os.ModeDir { + base := filepath.Base(path) + if base == "" || base[0] == '.' || base[0] == '_' || + base == "testdata" || + (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") || + (!w.opts.ModulesEnabled && base == "node_modules") { + return filepath.SkipDir + } + fi, err := os.Lstat(path) + if err == nil && w.shouldSkipDir(fi, path) { + return filepath.SkipDir + } + return nil + } + if typ == os.ModeSymlink { + base := filepath.Base(path) + if strings.HasPrefix(base, ".#") { + // Emacs noise. + return nil + } + if w.shouldTraverse(path) { + return fastwalk.ErrTraverseLink + } + } + return nil +} + +// shouldTraverse reports whether the symlink fi, found in dir, +// should be followed. It makes sure symlinks were never visited +// before to avoid symlink loops. +func (w *walker) shouldTraverse(path string) bool { + ts, err := os.Stat(path) + if err != nil { + fmt.Fprintln(os.Stderr, err) + return false + } + if !ts.IsDir() { + return false + } + if w.shouldSkipDir(ts, filepath.Dir(path)) { + return false + } + // Check for symlink loops by statting each directory component + // and seeing if any are the same file as ts. + for { + parent := filepath.Dir(path) + if parent == path { + // Made it to the root without seeing a cycle. + // Use this symlink. + return true + } + parentInfo, err := os.Stat(parent) + if err != nil { + return false + } + if os.SameFile(ts, parentInfo) { + // Cycle. Don't traverse. + return false + } + path = parent + } + +} diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go new file mode 100644 index 00000000..642a5ac2 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/fix.go @@ -0,0 +1,1738 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "os" + "path" + "path/filepath" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/gopathwalk" +) + +// importToGroup is a list of functions which map from an import path to +// a group number. +var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){ + func(localPrefix, importPath string) (num int, ok bool) { + if localPrefix == "" { + return + } + for _, p := range strings.Split(localPrefix, ",") { + if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath { + return 3, true + } + } + return + }, + func(_, importPath string) (num int, ok bool) { + if strings.HasPrefix(importPath, "appengine") { + return 2, true + } + return + }, + func(_, importPath string) (num int, ok bool) { + firstComponent := strings.Split(importPath, "/")[0] + if strings.Contains(firstComponent, ".") { + return 1, true + } + return + }, +} + +func importGroup(localPrefix, importPath string) int { + for _, fn := range importToGroup { + if n, ok := fn(localPrefix, importPath); ok { + return n + } + } + return 0 +} + +type ImportFixType int + +const ( + AddImport ImportFixType = iota + DeleteImport + SetImportName +) + +type ImportFix struct { + // StmtInfo represents the import statement this fix will add, remove, or change. + StmtInfo ImportInfo + // IdentName is the identifier that this fix will add or remove. + IdentName string + // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName). + FixType ImportFixType + Relevance float64 // see pkg +} + +// An ImportInfo represents a single import statement. +type ImportInfo struct { + ImportPath string // import path, e.g. "crypto/rand". + Name string // import name, e.g. "crand", or "" if none. +} + +// A packageInfo represents what's known about a package. +type packageInfo struct { + name string // real package name, if known. + exports map[string]bool // known exports. +} + +// parseOtherFiles parses all the Go files in srcDir except filename, including +// test files if filename looks like a test. +func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File { + // This could use go/packages but it doesn't buy much, and it fails + // with https://golang.org/issue/26296 in LoadFiles mode in some cases. + considerTests := strings.HasSuffix(filename, "_test.go") + + fileBase := filepath.Base(filename) + packageFileInfos, err := ioutil.ReadDir(srcDir) + if err != nil { + return nil + } + + var files []*ast.File + for _, fi := range packageFileInfos { + if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") { + continue + } + if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") { + continue + } + + f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0) + if err != nil { + continue + } + + files = append(files, f) + } + + return files +} + +// addGlobals puts the names of package vars into the provided map. +func addGlobals(f *ast.File, globals map[string]bool) { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + globals[valueSpec.Names[0].Name] = true + } + } +} + +// collectReferences builds a map of selector expressions, from +// left hand side (X) to a set of right hand sides (Sel). +func collectReferences(f *ast.File) references { + refs := references{} + + var visitor visitFn + visitor = func(node ast.Node) ast.Visitor { + if node == nil { + return visitor + } + switch v := node.(type) { + case *ast.SelectorExpr: + xident, ok := v.X.(*ast.Ident) + if !ok { + break + } + if xident.Obj != nil { + // If the parser can resolve it, it's not a package ref. + break + } + if !ast.IsExported(v.Sel.Name) { + // Whatever this is, it's not exported from a package. + break + } + pkgName := xident.Name + r := refs[pkgName] + if r == nil { + r = make(map[string]bool) + refs[pkgName] = r + } + r[v.Sel.Name] = true + } + return visitor + } + ast.Walk(visitor, f) + return refs +} + +// collectImports returns all the imports in f. +// Unnamed imports (., _) and "C" are ignored. +func collectImports(f *ast.File) []*ImportInfo { + var imports []*ImportInfo + for _, imp := range f.Imports { + var name string + if imp.Name != nil { + name = imp.Name.Name + } + if imp.Path.Value == `"C"` || name == "_" || name == "." { + continue + } + path := strings.Trim(imp.Path.Value, `"`) + imports = append(imports, &ImportInfo{ + Name: name, + ImportPath: path, + }) + } + return imports +} + +// findMissingImport searches pass's candidates for an import that provides +// pkg, containing all of syms. +func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { + for _, candidate := range p.candidates { + pkgInfo, ok := p.knownPackages[candidate.ImportPath] + if !ok { + continue + } + if p.importIdentifier(candidate) != pkg { + continue + } + + allFound := true + for right := range syms { + if !pkgInfo.exports[right] { + allFound = false + break + } + } + + if allFound { + return candidate + } + } + return nil +} + +// references is set of references found in a Go file. The first map key is the +// left hand side of a selector expression, the second key is the right hand +// side, and the value should always be true. +type references map[string]map[string]bool + +// A pass contains all the inputs and state necessary to fix a file's imports. +// It can be modified in some ways during use; see comments below. +type pass struct { + // Inputs. These must be set before a call to load, and not modified after. + fset *token.FileSet // fset used to parse f and its siblings. + f *ast.File // the file being fixed. + srcDir string // the directory containing f. + env *ProcessEnv // the environment to use for go commands, etc. + loadRealPackageNames bool // if true, load package names from disk rather than guessing them. + otherFiles []*ast.File // sibling files. + + // Intermediate state, generated by load. + existingImports map[string]*ImportInfo + allRefs references + missingRefs references + + // Inputs to fix. These can be augmented between successive fix calls. + lastTry bool // indicates that this is the last call and fix should clean up as best it can. + candidates []*ImportInfo // candidate imports in priority order. + knownPackages map[string]*packageInfo // information about all known packages. +} + +// loadPackageNames saves the package names for everything referenced by imports. +func (p *pass) loadPackageNames(imports []*ImportInfo) error { + if p.env.Logf != nil { + p.env.Logf("loading package names for %v packages", len(imports)) + defer func() { + p.env.Logf("done loading package names for %v packages", len(imports)) + }() + } + var unknown []string + for _, imp := range imports { + if _, ok := p.knownPackages[imp.ImportPath]; ok { + continue + } + unknown = append(unknown, imp.ImportPath) + } + + resolver, err := p.env.GetResolver() + if err != nil { + return err + } + + names, err := resolver.loadPackageNames(unknown, p.srcDir) + if err != nil { + return err + } + + for path, name := range names { + p.knownPackages[path] = &packageInfo{ + name: name, + exports: map[string]bool{}, + } + } + return nil +} + +// importIdentifier returns the identifier that imp will introduce. It will +// guess if the package name has not been loaded, e.g. because the source +// is not available. +func (p *pass) importIdentifier(imp *ImportInfo) string { + if imp.Name != "" { + return imp.Name + } + known := p.knownPackages[imp.ImportPath] + if known != nil && known.name != "" { + return known.name + } + return ImportPathToAssumedName(imp.ImportPath) +} + +// load reads in everything necessary to run a pass, and reports whether the +// file already has all the imports it needs. It fills in p.missingRefs with the +// file's missing symbols, if any, or removes unused imports if not. +func (p *pass) load() ([]*ImportFix, bool) { + p.knownPackages = map[string]*packageInfo{} + p.missingRefs = references{} + p.existingImports = map[string]*ImportInfo{} + + // Load basic information about the file in question. + p.allRefs = collectReferences(p.f) + + // Load stuff from other files in the same package: + // global variables so we know they don't need resolving, and imports + // that we might want to mimic. + globals := map[string]bool{} + for _, otherFile := range p.otherFiles { + // Don't load globals from files that are in the same directory + // but a different package. Using them to suggest imports is OK. + if p.f.Name.Name == otherFile.Name.Name { + addGlobals(otherFile, globals) + } + p.candidates = append(p.candidates, collectImports(otherFile)...) + } + + // Resolve all the import paths we've seen to package names, and store + // f's imports by the identifier they introduce. + imports := collectImports(p.f) + if p.loadRealPackageNames { + err := p.loadPackageNames(append(imports, p.candidates...)) + if err != nil { + if p.env.Logf != nil { + p.env.Logf("loading package names: %v", err) + } + return nil, false + } + } + for _, imp := range imports { + p.existingImports[p.importIdentifier(imp)] = imp + } + + // Find missing references. + for left, rights := range p.allRefs { + if globals[left] { + continue + } + _, ok := p.existingImports[left] + if !ok { + p.missingRefs[left] = rights + continue + } + } + if len(p.missingRefs) != 0 { + return nil, false + } + + return p.fix() +} + +// fix attempts to satisfy missing imports using p.candidates. If it finds +// everything, or if p.lastTry is true, it updates fixes to add the imports it found, +// delete anything unused, and update import names, and returns true. +func (p *pass) fix() ([]*ImportFix, bool) { + // Find missing imports. + var selected []*ImportInfo + for left, rights := range p.missingRefs { + if imp := p.findMissingImport(left, rights); imp != nil { + selected = append(selected, imp) + } + } + + if !p.lastTry && len(selected) != len(p.missingRefs) { + return nil, false + } + + // Found everything, or giving up. Add the new imports and remove any unused. + var fixes []*ImportFix + for _, imp := range p.existingImports { + // We deliberately ignore globals here, because we can't be sure + // they're in the same package. People do things like put multiple + // main packages in the same directory, and we don't want to + // remove imports if they happen to have the same name as a var in + // a different package. + if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { + fixes = append(fixes, &ImportFix{ + StmtInfo: *imp, + IdentName: p.importIdentifier(imp), + FixType: DeleteImport, + }) + continue + } + + // An existing import may need to update its import name to be correct. + if name := p.importSpecName(imp); name != imp.Name { + fixes = append(fixes, &ImportFix{ + StmtInfo: ImportInfo{ + Name: name, + ImportPath: imp.ImportPath, + }, + IdentName: p.importIdentifier(imp), + FixType: SetImportName, + }) + } + } + + for _, imp := range selected { + fixes = append(fixes, &ImportFix{ + StmtInfo: ImportInfo{ + Name: p.importSpecName(imp), + ImportPath: imp.ImportPath, + }, + IdentName: p.importIdentifier(imp), + FixType: AddImport, + }) + } + + return fixes, true +} + +// importSpecName gets the import name of imp in the import spec. +// +// When the import identifier matches the assumed import name, the import name does +// not appear in the import spec. +func (p *pass) importSpecName(imp *ImportInfo) string { + // If we did not load the real package names, or the name is already set, + // we just return the existing name. + if !p.loadRealPackageNames || imp.Name != "" { + return imp.Name + } + + ident := p.importIdentifier(imp) + if ident == ImportPathToAssumedName(imp.ImportPath) { + return "" // ident not needed since the assumed and real names are the same. + } + return ident +} + +// apply will perform the fixes on f in order. +func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) { + for _, fix := range fixes { + switch fix.FixType { + case DeleteImport: + astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case AddImport: + astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case SetImportName: + // Find the matching import path and change the name. + for _, spec := range f.Imports { + path := strings.Trim(spec.Path.Value, `"`) + if path == fix.StmtInfo.ImportPath { + spec.Name = &ast.Ident{ + Name: fix.StmtInfo.Name, + NamePos: spec.Pos(), + } + } + } + } + } +} + +// assumeSiblingImportsValid assumes that siblings' use of packages is valid, +// adding the exports they use. +func (p *pass) assumeSiblingImportsValid() { + for _, f := range p.otherFiles { + refs := collectReferences(f) + imports := collectImports(f) + importsByName := map[string]*ImportInfo{} + for _, imp := range imports { + importsByName[p.importIdentifier(imp)] = imp + } + for left, rights := range refs { + if imp, ok := importsByName[left]; ok { + if m, ok := stdlib[imp.ImportPath]; ok { + // We have the stdlib in memory; no need to guess. + rights = copyExports(m) + } + p.addCandidate(imp, &packageInfo{ + // no name; we already know it. + exports: rights, + }) + } + } + } +} + +// addCandidate adds a candidate import to p, and merges in the information +// in pkg. +func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) { + p.candidates = append(p.candidates, imp) + if existing, ok := p.knownPackages[imp.ImportPath]; ok { + if existing.name == "" { + existing.name = pkg.name + } + for export := range pkg.exports { + existing.exports[export] = true + } + } else { + p.knownPackages[imp.ImportPath] = pkg + } +} + +// fixImports adds and removes imports from f so that all its references are +// satisfied and there are no unused imports. +// +// This is declared as a variable rather than a function so goimports can +// easily be extended by adding a file with an init function. +var fixImports = fixImportsDefault + +func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { + fixes, err := getFixes(fset, f, filename, env) + if err != nil { + return err + } + apply(fset, f, fixes) + return err +} + +// getFixes gets the import fixes that need to be made to f in order to fix the imports. +// It does not modify the ast. +func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { + abs, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + srcDir := filepath.Dir(abs) + if env.Logf != nil { + env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + } + + // First pass: looking only at f, and using the naive algorithm to + // derive package names from import paths, see if the file is already + // complete. We can't add any imports yet, because we don't know + // if missing references are actually package vars. + p := &pass{fset: fset, f: f, srcDir: srcDir, env: env} + if fixes, done := p.load(); done { + return fixes, nil + } + + otherFiles := parseOtherFiles(fset, srcDir, filename) + + // Second pass: add information from other files in the same package, + // like their package vars and imports. + p.otherFiles = otherFiles + if fixes, done := p.load(); done { + return fixes, nil + } + + // Now we can try adding imports from the stdlib. + p.assumeSiblingImportsValid() + addStdlibCandidates(p, p.missingRefs) + if fixes, done := p.fix(); done { + return fixes, nil + } + + // Third pass: get real package names where we had previously used + // the naive algorithm. + p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} + p.loadRealPackageNames = true + p.otherFiles = otherFiles + if fixes, done := p.load(); done { + return fixes, nil + } + + if err := addStdlibCandidates(p, p.missingRefs); err != nil { + return nil, err + } + p.assumeSiblingImportsValid() + if fixes, done := p.fix(); done { + return fixes, nil + } + + // Go look for candidates in $GOPATH, etc. We don't necessarily load + // the real exports of sibling imports, so keep assuming their contents. + if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { + return nil, err + } + + p.lastTry = true + fixes, _ := p.fix() + return fixes, nil +} + +// MaxRelevance is the highest relevance, used for the standard library. +// Chosen arbitrarily to match pre-existing gopls code. +const MaxRelevance = 7.0 + +// getCandidatePkgs works with the passed callback to find all acceptable packages. +// It deduplicates by import path, and uses a cached stdlib rather than reading +// from disk. +func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error { + notSelf := func(p *pkg) bool { + return p.packageName != filePkg || p.dir != filepath.Dir(filename) + } + goenv, err := env.goEnv() + if err != nil { + return err + } + + var mu sync.Mutex // to guard asynchronous access to dupCheck + dupCheck := map[string]struct{}{} + + // Start off with the standard library. + for importPath, exports := range stdlib { + p := &pkg{ + dir: filepath.Join(goenv["GOROOT"], "src", importPath), + importPathShort: importPath, + packageName: path.Base(importPath), + relevance: MaxRelevance, + } + dupCheck[importPath] = struct{}{} + if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) { + wrappedCallback.exportsLoaded(p, exports) + } + } + + scanFilter := &scanCallback{ + rootFound: func(root gopathwalk.Root) bool { + // Exclude goroot results -- getting them is relatively expensive, not cached, + // and generally redundant with the in-memory version. + return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root) + }, + dirFound: wrappedCallback.dirFound, + packageNameLoaded: func(pkg *pkg) bool { + mu.Lock() + defer mu.Unlock() + if _, ok := dupCheck[pkg.importPathShort]; ok { + return false + } + dupCheck[pkg.importPathShort] = struct{}{} + return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg) + }, + exportsLoaded: func(pkg *pkg, exports []string) { + // If we're an x_test, load the package under test's test variant. + if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) { + var err error + _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true) + if err != nil { + return + } + } + wrappedCallback.exportsLoaded(pkg, exports) + }, + } + resolver, err := env.GetResolver() + if err != nil { + return err + } + return resolver.scan(ctx, scanFilter) +} + +func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map[string]float64, error) { + result := make(map[string]float64) + resolver, err := env.GetResolver() + if err != nil { + return nil, err + } + for _, path := range paths { + result[path] = resolver.scoreImportPath(ctx, path) + } + return result, nil +} + +func PrimeCache(ctx context.Context, env *ProcessEnv) error { + // Fully scan the disk for directories, but don't actually read any Go files. + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + return false + }, + packageNameLoaded: func(pkg *pkg) bool { + return false + }, + } + return getCandidatePkgs(ctx, callback, "", "", env) +} + +func candidateImportName(pkg *pkg) string { + if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName { + return pkg.packageName + } + return "" +} + +// GetAllCandidates calls wrapped for each package whose name starts with +// searchPrefix, and can be imported from filename with the package name filePkg. +// +// Beware that the wrapped function may be called multiple times concurrently. +// TODO(adonovan): encapsulate the concurrency. +func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + if !canUse(filename, pkg.dir) { + return false + } + // Try the assumed package name first, then a simpler path match + // in case of packages named vN, which are not uncommon. + return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) || + strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix) + }, + packageNameLoaded: func(pkg *pkg) bool { + if !strings.HasPrefix(pkg.packageName, searchPrefix) { + return false + } + wrapped(ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }) + return false + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +// GetImportPaths calls wrapped for each package whose import path starts with +// searchPrefix, and can be imported from filename with the package name filePkg. +func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + if !canUse(filename, pkg.dir) { + return false + } + return strings.HasPrefix(pkg.importPathShort, searchPrefix) + }, + packageNameLoaded: func(pkg *pkg) bool { + wrapped(ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }) + return false + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +// A PackageExport is a package and its exports. +type PackageExport struct { + Fix *ImportFix + Exports []string +} + +// GetPackageExports returns all known packages with name pkg and their exports. +func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, references{searchPkg: nil}, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + return pkg.packageName == searchPkg + }, + exportsLoaded: func(pkg *pkg, exports []string) { + sort.Strings(exports) + wrapped(PackageExport{ + Fix: &ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }, + Exports: exports, + }) + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +var requiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB", "GOWORK"} + +// ProcessEnv contains environment variables and settings that affect the use of +// the go command, the go/build package, etc. +type ProcessEnv struct { + GocmdRunner *gocommand.Runner + + BuildFlags []string + ModFlag string + ModFile string + + // SkipPathInScan returns true if the path should be skipped from scans of + // the RootCurrentModule root type. The function argument is a clean, + // absolute path. + SkipPathInScan func(string) bool + + // Env overrides the OS environment, and can be used to specify + // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because + // exec.Command will not honor it. + // Specifying all of RequiredGoEnvVars avoids a call to `go env`. + Env map[string]string + + WorkingDir string + + // If Logf is non-nil, debug logging is enabled through this function. + Logf func(format string, args ...interface{}) + + initialized bool + + resolver Resolver +} + +func (e *ProcessEnv) goEnv() (map[string]string, error) { + if err := e.init(); err != nil { + return nil, err + } + return e.Env, nil +} + +func (e *ProcessEnv) matchFile(dir, name string) (bool, error) { + bctx, err := e.buildContext() + if err != nil { + return false, err + } + return bctx.MatchFile(dir, name) +} + +// CopyConfig copies the env's configuration into a new env. +func (e *ProcessEnv) CopyConfig() *ProcessEnv { + copy := &ProcessEnv{ + GocmdRunner: e.GocmdRunner, + initialized: e.initialized, + BuildFlags: e.BuildFlags, + Logf: e.Logf, + WorkingDir: e.WorkingDir, + resolver: nil, + Env: map[string]string{}, + } + for k, v := range e.Env { + copy.Env[k] = v + } + return copy +} + +func (e *ProcessEnv) init() error { + if e.initialized { + return nil + } + + foundAllRequired := true + for _, k := range requiredGoEnvVars { + if _, ok := e.Env[k]; !ok { + foundAllRequired = false + break + } + } + if foundAllRequired { + e.initialized = true + return nil + } + + if e.Env == nil { + e.Env = map[string]string{} + } + + goEnv := map[string]string{} + stdout, err := e.invokeGo(context.TODO(), "env", append([]string{"-json"}, requiredGoEnvVars...)...) + if err != nil { + return err + } + if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil { + return err + } + for k, v := range goEnv { + e.Env[k] = v + } + e.initialized = true + return nil +} + +func (e *ProcessEnv) env() []string { + var env []string // the gocommand package will prepend os.Environ. + for k, v := range e.Env { + env = append(env, k+"="+v) + } + return env +} + +func (e *ProcessEnv) GetResolver() (Resolver, error) { + if e.resolver != nil { + return e.resolver, nil + } + if err := e.init(); err != nil { + return nil, err + } + if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 { + e.resolver = newGopathResolver(e) + return e.resolver, nil + } + e.resolver = newModuleResolver(e) + return e.resolver, nil +} + +func (e *ProcessEnv) buildContext() (*build.Context, error) { + ctx := build.Default + goenv, err := e.goEnv() + if err != nil { + return nil, err + } + ctx.GOROOT = goenv["GOROOT"] + ctx.GOPATH = goenv["GOPATH"] + + // As of Go 1.14, build.Context has a Dir field + // (see golang.org/issue/34860). + // Populate it only if present. + rc := reflect.ValueOf(&ctx).Elem() + dir := rc.FieldByName("Dir") + if dir.IsValid() && dir.Kind() == reflect.String { + dir.SetString(e.WorkingDir) + } + + // Since Go 1.11, go/build.Context.Import may invoke 'go list' depending on + // the value in GO111MODULE in the process's environment. We always want to + // run in GOPATH mode when calling Import, so we need to prevent this from + // happening. In Go 1.16, GO111MODULE defaults to "on", so this problem comes + // up more frequently. + // + // HACK: setting any of the Context I/O hooks prevents Import from invoking + // 'go list', regardless of GO111MODULE. This is undocumented, but it's + // unlikely to change before GOPATH support is removed. + ctx.ReadDir = ioutil.ReadDir + + return &ctx, nil +} + +func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) { + inv := gocommand.Invocation{ + Verb: verb, + Args: args, + BuildFlags: e.BuildFlags, + Env: e.env(), + Logf: e.Logf, + WorkingDir: e.WorkingDir, + } + return e.GocmdRunner.Run(ctx, inv) +} + +func addStdlibCandidates(pass *pass, refs references) error { + goenv, err := pass.env.goEnv() + if err != nil { + return err + } + add := func(pkg string) { + // Prevent self-imports. + if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir { + return + } + exports := copyExports(stdlib[pkg]) + pass.addCandidate( + &ImportInfo{ImportPath: pkg}, + &packageInfo{name: path.Base(pkg), exports: exports}) + } + for left := range refs { + if left == "rand" { + // Make sure we try crypto/rand before math/rand. + add("crypto/rand") + add("math/rand") + continue + } + for importPath := range stdlib { + if path.Base(importPath) == left { + add(importPath) + } + } + } + return nil +} + +// A Resolver does the build-system-specific parts of goimports. +type Resolver interface { + // loadPackageNames loads the package names in importPaths. + loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) + // scan works with callback to search for packages. See scanCallback for details. + scan(ctx context.Context, callback *scanCallback) error + // loadExports returns the set of exported symbols in the package at dir. + // loadExports may be called concurrently. + loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) + // scoreImportPath returns the relevance for an import path. + scoreImportPath(ctx context.Context, path string) float64 + + ClearForNewScan() +} + +// A scanCallback controls a call to scan and receives its results. +// In general, minor errors will be silently discarded; a user should not +// expect to receive a full series of calls for everything. +type scanCallback struct { + // rootFound is called before scanning a new root dir. If it returns true, + // the root will be scanned. Returning false will not necessarily prevent + // directories from that root making it to dirFound. + rootFound func(gopathwalk.Root) bool + // dirFound is called when a directory is found that is possibly a Go package. + // pkg will be populated with everything except packageName. + // If it returns true, the package's name will be loaded. + dirFound func(pkg *pkg) bool + // packageNameLoaded is called when a package is found and its name is loaded. + // If it returns true, the package's exports will be loaded. + packageNameLoaded func(pkg *pkg) bool + // exportsLoaded is called when a package's exports have been loaded. + exportsLoaded func(pkg *pkg, exports []string) +} + +func addExternalCandidates(pass *pass, refs references, filename string) error { + var mu sync.Mutex + found := make(map[string][]pkgDistance) + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true // We want everything. + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, refs, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + if _, want := refs[pkg.packageName]; !want { + return false + } + if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName { + // The candidate is in the same directory and has the + // same package name. Don't try to import ourselves. + return false + } + if !canUse(filename, pkg.dir) { + return false + } + mu.Lock() + defer mu.Unlock() + found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)}) + return false // We'll do our own loading after we sort. + }, + } + resolver, err := pass.env.GetResolver() + if err != nil { + return err + } + if err = resolver.scan(context.Background(), callback); err != nil { + return err + } + + // Search for imports matching potential package references. + type result struct { + imp *ImportInfo + pkg *packageInfo + } + results := make(chan result, len(refs)) + + ctx, cancel := context.WithCancel(context.TODO()) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + var ( + firstErr error + firstErrOnce sync.Once + ) + for pkgName, symbols := range refs { + wg.Add(1) + go func(pkgName string, symbols map[string]bool) { + defer wg.Done() + + found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename) + + if err != nil { + firstErrOnce.Do(func() { + firstErr = err + cancel() + }) + return + } + + if found == nil { + return // No matching package. + } + + imp := &ImportInfo{ + ImportPath: found.importPathShort, + } + + pkg := &packageInfo{ + name: pkgName, + exports: symbols, + } + results <- result{imp, pkg} + }(pkgName, symbols) + } + go func() { + wg.Wait() + close(results) + }() + + for result := range results { + pass.addCandidate(result.imp, result.pkg) + } + return firstErr +} + +// notIdentifier reports whether ch is an invalid identifier character. +func notIdentifier(ch rune) bool { + return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || + '0' <= ch && ch <= '9' || + ch == '_' || + ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch))) +} + +// ImportPathToAssumedName returns the assumed package name of an import path. +// It does this using only string parsing of the import path. +// It picks the last element of the path that does not look like a major +// version, and then picks the valid identifier off the start of that element. +// It is used to determine if a local rename should be added to an import for +// clarity. +// This function could be moved to a standard package and exported if we want +// for use in other tools. +func ImportPathToAssumedName(importPath string) string { + base := path.Base(importPath) + if strings.HasPrefix(base, "v") { + if _, err := strconv.Atoi(base[1:]); err == nil { + dir := path.Dir(importPath) + if dir != "." { + base = path.Base(dir) + } + } + } + base = strings.TrimPrefix(base, "go-") + if i := strings.IndexFunc(base, notIdentifier); i >= 0 { + base = base[:i] + } + return base +} + +// gopathResolver implements resolver for GOPATH workspaces. +type gopathResolver struct { + env *ProcessEnv + walked bool + cache *dirInfoCache + scanSema chan struct{} // scanSema prevents concurrent scans. +} + +func newGopathResolver(env *ProcessEnv) *gopathResolver { + r := &gopathResolver{ + env: env, + cache: &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + }, + scanSema: make(chan struct{}, 1), + } + r.scanSema <- struct{}{} + return r +} + +func (r *gopathResolver) ClearForNewScan() { + <-r.scanSema + r.cache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + r.walked = false + r.scanSema <- struct{}{} +} + +func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + names := map[string]string{} + bctx, err := r.env.buildContext() + if err != nil { + return nil, err + } + for _, path := range importPaths { + names[path] = importPathToName(bctx, path, srcDir) + } + return names, nil +} + +// importPathToName finds out the actual package name, as declared in its .go files. +func importPathToName(bctx *build.Context, importPath, srcDir string) string { + // Fast path for standard library without going to disk. + if _, ok := stdlib[importPath]; ok { + return path.Base(importPath) // stdlib packages always match their paths. + } + + buildPkg, err := bctx.Import(importPath, srcDir, build.FindOnly) + if err != nil { + return "" + } + pkgName, err := packageDirToName(buildPkg.Dir) + if err != nil { + return "" + } + return pkgName +} + +// packageDirToName is a faster version of build.Import if +// the only thing desired is the package name. Given a directory, +// packageDirToName then only parses one file in the package, +// trusting that the files in the directory are consistent. +func packageDirToName(dir string) (packageName string, err error) { + d, err := os.Open(dir) + if err != nil { + return "", err + } + names, err := d.Readdirnames(-1) + d.Close() + if err != nil { + return "", err + } + sort.Strings(names) // to have predictable behavior + var lastErr error + var nfile int + for _, name := range names { + if !strings.HasSuffix(name, ".go") { + continue + } + if strings.HasSuffix(name, "_test.go") { + continue + } + nfile++ + fullFile := filepath.Join(dir, name) + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly) + if err != nil { + lastErr = err + continue + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by ctx.MatchFile. + continue + } + if pkgName == "main" { + // Also skip package main, assuming it's a +build ignore generator or example. + // Since you can't import a package main anyway, there's no harm here. + continue + } + return pkgName, nil + } + if lastErr != nil { + return "", lastErr + } + return "", fmt.Errorf("no importable package found in %d Go files", nfile) +} + +type pkg struct { + dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http") + importPathShort string // vendorless import path ("net/http", "a/b") + packageName string // package name loaded from source if requested + relevance float64 // a weakly-defined score of how relevant a package is. 0 is most relevant. +} + +type pkgDistance struct { + pkg *pkg + distance int // relative distance to target +} + +// byDistanceOrImportPathShortLength sorts by relative distance breaking ties +// on the short import path length and then the import string itself. +type byDistanceOrImportPathShortLength []pkgDistance + +func (s byDistanceOrImportPathShortLength) Len() int { return len(s) } +func (s byDistanceOrImportPathShortLength) Less(i, j int) bool { + di, dj := s[i].distance, s[j].distance + if di == -1 { + return false + } + if dj == -1 { + return true + } + if di != dj { + return di < dj + } + + vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort + if len(vi) != len(vj) { + return len(vi) < len(vj) + } + return vi < vj +} +func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func distance(basepath, targetpath string) int { + p, err := filepath.Rel(basepath, targetpath) + if err != nil { + return -1 + } + if p == "." { + return 0 + } + return strings.Count(p, string(filepath.Separator)) + 1 +} + +func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error { + add := func(root gopathwalk.Root, dir string) { + // We assume cached directories have not changed. We can skip them and their + // children. + if _, ok := r.cache.Load(dir); ok { + return + } + + importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):]) + info := directoryPackageInfo{ + status: directoryScanned, + dir: dir, + rootType: root.Type, + nonCanonicalImportPath: VendorlessPath(importpath), + } + r.cache.Store(dir, info) + } + processDir := func(info directoryPackageInfo) { + // Skip this directory if we were not able to get the package information successfully. + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return + } + + p := &pkg{ + importPathShort: info.nonCanonicalImportPath, + dir: info.dir, + relevance: MaxRelevance - 1, + } + if info.rootType == gopathwalk.RootGOROOT { + p.relevance = MaxRelevance + } + + if !callback.dirFound(p) { + return + } + var err error + p.packageName, err = r.cache.CachePackageName(info) + if err != nil { + return + } + + if !callback.packageNameLoaded(p) { + return + } + if _, exports, err := r.loadExports(ctx, p, false); err == nil { + callback.exportsLoaded(p, exports) + } + } + stop := r.cache.ScanAndListen(ctx, processDir) + defer stop() + + goenv, err := r.env.goEnv() + if err != nil { + return err + } + var roots []gopathwalk.Root + roots = append(roots, gopathwalk.Root{Path: filepath.Join(goenv["GOROOT"], "src"), Type: gopathwalk.RootGOROOT}) + for _, p := range filepath.SplitList(goenv["GOPATH"]) { + roots = append(roots, gopathwalk.Root{Path: filepath.Join(p, "src"), Type: gopathwalk.RootGOPATH}) + } + // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly. + roots = filterRoots(roots, callback.rootFound) + // We can't cancel walks, because we need them to finish to have a usable + // cache. Instead, run them in a separate goroutine and detach. + scanDone := make(chan struct{}) + go func() { + select { + case <-ctx.Done(): + return + case <-r.scanSema: + } + defer func() { r.scanSema <- struct{}{} }() + gopathwalk.Walk(roots, add, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: false}) + close(scanDone) + }() + select { + case <-ctx.Done(): + case <-scanDone: + } + return nil +} + +func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 { + if _, ok := stdlib[path]; ok { + return MaxRelevance + } + return MaxRelevance - 1 +} + +func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root { + var result []gopathwalk.Root + for _, root := range roots { + if !include(root) { + continue + } + result = append(result, root) + } + return result +} + +func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { + if info, ok := r.cache.Load(pkg.dir); ok && !includeTest { + return r.cache.CacheExports(ctx, r.env, info) + } + return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest) +} + +// VendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +func VendorlessPath(ipath string) string { + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):] + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):] + } + return ipath +} + +func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) { + // Look for non-test, buildable .go files which could provide exports. + all, err := ioutil.ReadDir(dir) + if err != nil { + return "", nil, err + } + var files []os.FileInfo + for _, fi := range all { + name := fi.Name() + if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) { + continue + } + match, err := env.matchFile(dir, fi.Name()) + if err != nil || !match { + continue + } + files = append(files, fi) + } + + if len(files) == 0 { + return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir) + } + + var pkgName string + var exports []string + fset := token.NewFileSet() + for _, fi := range files { + select { + case <-ctx.Done(): + return "", nil, ctx.Err() + default: + } + + fullFile := filepath.Join(dir, fi.Name()) + f, err := parser.ParseFile(fset, fullFile, nil, 0) + if err != nil { + if env.Logf != nil { + env.Logf("error parsing %v: %v", fullFile, err) + } + continue + } + if f.Name.Name == "documentation" { + // Special case from go/build.ImportDir, not + // handled by MatchFile above. + continue + } + if includeTest && strings.HasSuffix(f.Name.Name, "_test") { + // x_test package. We want internal test files only. + continue + } + pkgName = f.Name.Name + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports = append(exports, name) + } + } + } + + if env.Logf != nil { + sortedExports := append([]string(nil), exports...) + sort.Strings(sortedExports) + env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", ")) + } + return pkgName, exports, nil +} + +// findImport searches for a package with the given symbols. +// If no package is found, findImport returns ("", false, nil) +func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { + // Sort the candidates by their import package length, + // assuming that shorter package names are better than long + // ones. Note that this sorts by the de-vendored name, so + // there's no "penalty" for vendoring. + sort.Sort(byDistanceOrImportPathShortLength(candidates)) + if pass.env.Logf != nil { + for i, c := range candidates { + pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir) + } + } + resolver, err := pass.env.GetResolver() + if err != nil { + return nil, err + } + + // Collect exports for packages with matching names. + rescv := make([]chan *pkg, len(candidates)) + for i := range candidates { + rescv[i] = make(chan *pkg, 1) + } + const maxConcurrentPackageImport = 4 + loadExportsSem := make(chan struct{}, maxConcurrentPackageImport) + + ctx, cancel := context.WithCancel(ctx) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + + wg.Add(1) + go func() { + defer wg.Done() + for i, c := range candidates { + select { + case loadExportsSem <- struct{}{}: + case <-ctx.Done(): + return + } + + wg.Add(1) + go func(c pkgDistance, resc chan<- *pkg) { + defer func() { + <-loadExportsSem + wg.Done() + }() + + if pass.env.Logf != nil { + pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName) + } + // If we're an x_test, load the package under test's test variant. + includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir + _, exports, err := resolver.loadExports(ctx, c.pkg, includeTest) + if err != nil { + if pass.env.Logf != nil { + pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) + } + resc <- nil + return + } + + exportsMap := make(map[string]bool, len(exports)) + for _, sym := range exports { + exportsMap[sym] = true + } + + // If it doesn't have the right + // symbols, send nil to mean no match. + for symbol := range symbols { + if !exportsMap[symbol] { + resc <- nil + return + } + } + resc <- c.pkg + }(c, rescv[i]) + } + }() + + for _, resc := range rescv { + pkg := <-resc + if pkg == nil { + continue + } + return pkg, nil + } + return nil, nil +} + +// pkgIsCandidate reports whether pkg is a candidate for satisfying the +// finding which package pkgIdent in the file named by filename is trying +// to refer to. +// +// This check is purely lexical and is meant to be as fast as possible +// because it's run over all $GOPATH directories to filter out poor +// candidates in order to limit the CPU and I/O later parsing the +// exports in candidate packages. +// +// filename is the file being formatted. +// pkgIdent is the package being searched for, like "client" (if +// searching for "client.New") +func pkgIsCandidate(filename string, refs references, pkg *pkg) bool { + // Check "internal" and "vendor" visibility: + if !canUse(filename, pkg.dir) { + return false + } + + // Speed optimization to minimize disk I/O: + // the last two components on disk must contain the + // package name somewhere. + // + // This permits mismatch naming like directory + // "go-foo" being package "foo", or "pkg.v3" being "pkg", + // or directory "google.golang.org/api/cloudbilling/v1" + // being package "cloudbilling", but doesn't + // permit a directory "foo" to be package + // "bar", which is strongly discouraged + // anyway. There's no reason goimports needs + // to be slow just to accommodate that. + for pkgIdent := range refs { + lastTwo := lastTwoComponents(pkg.importPathShort) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + } + } + return false +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// canUse reports whether the package in dir is usable from filename, +// respecting the Go "internal" and "vendor" visibility rules. +func canUse(filename, dir string) bool { + // Fast path check, before any allocations. If it doesn't contain vendor + // or internal, it's not tricky: + // Note that this can false-negative on directories like "notinternal", + // but we check it correctly below. This is just a fast path. + if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") { + return true + } + + dirSlash := filepath.ToSlash(dir) + if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") { + return true + } + // Vendor or internal directory only visible from children of parent. + // That means the path from the current directory to the target directory + // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal + // or bar/vendor or bar/internal. + // After stripping all the leading ../, the only okay place to see vendor or internal + // is at the very beginning of the path. + absfile, err := filepath.Abs(filename) + if err != nil { + return false + } + absdir, err := filepath.Abs(dir) + if err != nil { + return false + } + rel, err := filepath.Rel(absfile, absdir) + if err != nil { + return false + } + relSlash := filepath.ToSlash(rel) + if i := strings.LastIndex(relSlash, "../"); i >= 0 { + relSlash = relSlash[i+len("../"):] + } + return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal") +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +type visitFn func(node ast.Node) ast.Visitor + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + return fn(node) +} + +func copyExports(pkg []string) map[string]bool { + m := make(map[string]bool, len(pkg)) + for _, v := range pkg { + m[v] = true + } + return m +} diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go new file mode 100644 index 00000000..95a88383 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/imports.go @@ -0,0 +1,351 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run mkstdlib.go + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/printer" + "go/token" + "io" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" +) + +// Options is golang.org/x/tools/imports.Options with extra internal-only options. +type Options struct { + Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state. + + // LocalPrefix is a comma-separated string of import path prefixes, which, if + // set, instructs Process to sort the import paths with the given prefixes + // into another group after 3rd-party packages. + LocalPrefix string + + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env. +func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) { + fileSet := token.NewFileSet() + file, adjust, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + if !opt.FormatOnly { + if err := fixImports(fileSet, file, filename, opt.Env); err != nil { + return nil, err + } + } + return formatFile(fileSet, file, src, adjust, opt) +} + +// FixImports returns a list of fixes to the imports that, when applied, +// will leave the imports in the same state as Process. src and opt must +// be specified. +// +// Note that filename's directory influences which imports can be chosen, +// so it is important that filename be accurate. +func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { + fileSet := token.NewFileSet() + file, _, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + return getFixes(fileSet, file, filename, opt.Env) +} + +// ApplyFixes applies all of the fixes to the file and formats it. extraMode +// is added in when parsing the file. src and opts must be specified, but no +// env is needed. +func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) { + // Don't use parse() -- we don't care about fragments or statement lists + // here, and we need to work with unparseable files. + fileSet := token.NewFileSet() + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + parserMode |= extraMode + + file, err := parser.ParseFile(fileSet, filename, src, parserMode) + if file == nil { + return nil, err + } + + // Apply the fixes to the file. + apply(fileSet, file, fixes) + + return formatFile(fileSet, file, src, nil, opt) +} + +// formatFile formats the file syntax tree. +// It may mutate the token.FileSet. +// +// If an adjust function is provided, it is called after formatting +// with the original source (formatFile's src parameter) and the +// formatted file, and returns the postpocessed result. +func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { + mergeImports(file) + sortImports(opt.LocalPrefix, fset.File(file.Pos()), file) + var spacesBefore []string // import paths we need spaces before + for _, impSection := range astutil.Imports(fset, file) { + // Within each block of contiguous imports, see if any + // import lines are in different group numbers. If so, + // we'll need to put a space between them so it's + // compatible with gofmt. + lastGroup := -1 + for _, importSpec := range impSection { + importPath, _ := strconv.Unquote(importSpec.Path.Value) + groupNum := importGroup(opt.LocalPrefix, importPath) + if groupNum != lastGroup && lastGroup != -1 { + spacesBefore = append(spacesBefore, importPath) + } + lastGroup = groupNum + } + + } + + printerMode := printer.UseSpaces + if opt.TabIndent { + printerMode |= printer.TabIndent + } + printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} + + var buf bytes.Buffer + err := printConfig.Fprint(&buf, fset, file) + if err != nil { + return nil, err + } + out := buf.Bytes() + if adjust != nil { + out = adjust(src, out) + } + if len(spacesBefore) > 0 { + out, err = addImportSpaces(bytes.NewReader(out), spacesBefore) + if err != nil { + return nil, err + } + } + + out, err = format.Source(out) + if err != nil { + return nil, err + } + return out, nil +} + +// parse parses src, which was read from filename, +// as a Go source file or statement list. +func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + + // Try as whole source file. + file, err := parser.ParseFile(fset, filename, src, parserMode) + if err == nil { + return file, nil, nil + } + // If the error is that the source file didn't begin with a + // package line and we accept fragmented input, fall through to + // try as a source fragment. Stop and return on any other error. + if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + return nil, nil, err + } + + // If this is a declaration list, make it a source file + // by inserting a package clause. + // Insert using a ;, not a newline, so that parse errors are on + // the correct line. + const prefix = "package main;" + psrc := append([]byte(prefix), src...) + file, err = parser.ParseFile(fset, filename, psrc, parserMode) + if err == nil { + // Gofmt will turn the ; into a \n. + // Do that ourselves now and update the file contents, + // so that positions and line numbers are correct going forward. + psrc[len(prefix)-1] = '\n' + fset.File(file.Package).SetLinesForContent(psrc) + + // If a main function exists, we will assume this is a main + // package and leave the file. + if containsMainFunc(file) { + return file, nil, nil + } + + adjust := func(orig, src []byte) []byte { + // Remove the package clause. + src = src[len(prefix):] + return matchSpace(orig, src) + } + return file, adjust, nil + } + // If the error is that the source file didn't begin with a + // declaration, fall through to try as a statement list. + // Stop and return on any other error. + if !strings.Contains(err.Error(), "expected declaration") { + return nil, nil, err + } + + // If this is a statement list, make it a source file + // by inserting a package clause and turning the list + // into a function body. This handles expressions too. + // Insert using a ;, not a newline, so that the line numbers + // in fsrc match the ones in src. + fsrc := append(append([]byte("package p; func _() {"), src...), '}') + file, err = parser.ParseFile(fset, filename, fsrc, parserMode) + if err == nil { + adjust := func(orig, src []byte) []byte { + // Remove the wrapping. + // Gofmt has turned the ; into a \n\n. + src = src[len("package p\n\nfunc _() {"):] + src = src[:len(src)-len("}\n")] + // Gofmt has also indented the function body one level. + // Remove that indent. + src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1) + return matchSpace(orig, src) + } + return file, adjust, nil + } + + // Failed, and out of options. + return nil, nil, err +} + +// containsMainFunc checks if a file contains a function declaration with the +// function signature 'func main()' +func containsMainFunc(file *ast.File) bool { + for _, decl := range file.Decls { + if f, ok := decl.(*ast.FuncDecl); ok { + if f.Name.Name != "main" { + continue + } + + if len(f.Type.Params.List) != 0 { + continue + } + + if f.Type.Results != nil && len(f.Type.Results.List) != 0 { + continue + } + + return true + } + } + + return false +} + +func cutSpace(b []byte) (before, middle, after []byte) { + i := 0 + for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') { + i++ + } + j := len(b) + for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') { + j-- + } + if i <= j { + return b[:i], b[i:j], b[j:] + } + return nil, nil, b[j:] +} + +// matchSpace reformats src to use the same space context as orig. +// 1. If orig begins with blank lines, matchSpace inserts them at the beginning of src. +// 2. matchSpace copies the indentation of the first non-blank line in orig +// to every non-blank line in src. +// 3. matchSpace copies the trailing space from orig and uses it in place +// of src's trailing space. +func matchSpace(orig []byte, src []byte) []byte { + before, _, after := cutSpace(orig) + i := bytes.LastIndex(before, []byte{'\n'}) + before, indent := before[:i+1], before[i+1:] + + _, src, _ = cutSpace(src) + + var b bytes.Buffer + b.Write(before) + for len(src) > 0 { + line := src + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, src = line[:i+1], line[i+1:] + } else { + src = nil + } + if len(line) > 0 && line[0] != '\n' { // not blank + b.Write(indent) + } + b.Write(line) + } + b.Write(after) + return b.Bytes() +} + +var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`) + +func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) { + var out bytes.Buffer + in := bufio.NewReader(r) + inImports := false + done := false + for { + s, err := in.ReadString('\n') + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + if !inImports && !done && strings.HasPrefix(s, "import") { + inImports = true + } + if inImports && (strings.HasPrefix(s, "var") || + strings.HasPrefix(s, "func") || + strings.HasPrefix(s, "const") || + strings.HasPrefix(s, "type")) { + done = true + inImports = false + } + if inImports && len(breaks) > 0 { + if m := impLine.FindStringSubmatch(s); m != nil { + if m[1] == breaks[0] { + out.WriteByte('\n') + breaks = breaks[1:] + } + } + } + + fmt.Fprint(&out, s) + } + return out.Bytes(), nil +} diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go new file mode 100644 index 00000000..7d99d04c --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mod.go @@ -0,0 +1,716 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + + "golang.org/x/mod/module" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/gopathwalk" +) + +// ModuleResolver implements resolver for modules using the go command as little +// as feasible. +type ModuleResolver struct { + env *ProcessEnv + moduleCacheDir string + dummyVendorMod *gocommand.ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory. + roots []gopathwalk.Root + scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots. + scannedRoots map[gopathwalk.Root]bool + + initialized bool + mains []*gocommand.ModuleJSON + mainByDir map[string]*gocommand.ModuleJSON + modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path... + modsByDir []*gocommand.ModuleJSON // ...or Dir. + + // moduleCacheCache stores information about the module cache. + moduleCacheCache *dirInfoCache + otherCache *dirInfoCache +} + +func newModuleResolver(e *ProcessEnv) *ModuleResolver { + r := &ModuleResolver{ + env: e, + scanSema: make(chan struct{}, 1), + } + r.scanSema <- struct{}{} + return r +} + +func (r *ModuleResolver) init() error { + if r.initialized { + return nil + } + + goenv, err := r.env.goEnv() + if err != nil { + return err + } + inv := gocommand.Invocation{ + BuildFlags: r.env.BuildFlags, + ModFlag: r.env.ModFlag, + ModFile: r.env.ModFile, + Env: r.env.env(), + Logf: r.env.Logf, + WorkingDir: r.env.WorkingDir, + } + + vendorEnabled := false + var mainModVendor *gocommand.ModuleJSON + + // Module vendor directories are ignored in workspace mode: + // https://go.googlesource.com/proposal/+/master/design/45713-workspace.md + if len(r.env.Env["GOWORK"]) == 0 { + vendorEnabled, mainModVendor, err = gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner) + if err != nil { + return err + } + } + + if mainModVendor != nil && vendorEnabled { + // Vendor mode is on, so all the non-Main modules are irrelevant, + // and we need to search /vendor for everything. + r.mains = []*gocommand.ModuleJSON{mainModVendor} + r.dummyVendorMod = &gocommand.ModuleJSON{ + Path: "", + Dir: filepath.Join(mainModVendor.Dir, "vendor"), + } + r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod} + r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod} + } else { + // Vendor mode is off, so run go list -m ... to find everything. + err := r.initAllMods() + // We expect an error when running outside of a module with + // GO111MODULE=on. Other errors are fatal. + if err != nil { + if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") { + return err + } + } + } + + if gmc := r.env.Env["GOMODCACHE"]; gmc != "" { + r.moduleCacheDir = gmc + } else { + gopaths := filepath.SplitList(goenv["GOPATH"]) + if len(gopaths) == 0 { + return fmt.Errorf("empty GOPATH") + } + r.moduleCacheDir = filepath.Join(gopaths[0], "/pkg/mod") + } + + sort.Slice(r.modsByModPath, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByModPath[x].Path, "/") + } + return count(j) < count(i) // descending order + }) + sort.Slice(r.modsByDir, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByDir[x].Dir, "/") + } + return count(j) < count(i) // descending order + }) + + r.roots = []gopathwalk.Root{ + {Path: filepath.Join(goenv["GOROOT"], "/src"), Type: gopathwalk.RootGOROOT}, + } + r.mainByDir = make(map[string]*gocommand.ModuleJSON) + for _, main := range r.mains { + r.roots = append(r.roots, gopathwalk.Root{Path: main.Dir, Type: gopathwalk.RootCurrentModule}) + r.mainByDir[main.Dir] = main + } + if vendorEnabled { + r.roots = append(r.roots, gopathwalk.Root{Path: r.dummyVendorMod.Dir, Type: gopathwalk.RootOther}) + } else { + addDep := func(mod *gocommand.ModuleJSON) { + if mod.Replace == nil { + // This is redundant with the cache, but we'll skip it cheaply enough. + r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootModuleCache}) + } else { + r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootOther}) + } + } + // Walk dependent modules before scanning the full mod cache, direct deps first. + for _, mod := range r.modsByModPath { + if !mod.Indirect && !mod.Main { + addDep(mod) + } + } + for _, mod := range r.modsByModPath { + if mod.Indirect && !mod.Main { + addDep(mod) + } + } + r.roots = append(r.roots, gopathwalk.Root{Path: r.moduleCacheDir, Type: gopathwalk.RootModuleCache}) + } + + r.scannedRoots = map[gopathwalk.Root]bool{} + if r.moduleCacheCache == nil { + r.moduleCacheCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + } + if r.otherCache == nil { + r.otherCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + } + r.initialized = true + return nil +} + +func (r *ModuleResolver) initAllMods() error { + stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-e", "-json", "...") + if err != nil { + return err + } + for dec := json.NewDecoder(stdout); dec.More(); { + mod := &gocommand.ModuleJSON{} + if err := dec.Decode(mod); err != nil { + return err + } + if mod.Dir == "" { + if r.env.Logf != nil { + r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path) + } + // Can't do anything with a module that's not downloaded. + continue + } + // golang/go#36193: the go command doesn't always clean paths. + mod.Dir = filepath.Clean(mod.Dir) + r.modsByModPath = append(r.modsByModPath, mod) + r.modsByDir = append(r.modsByDir, mod) + if mod.Main { + r.mains = append(r.mains, mod) + } + } + return nil +} + +func (r *ModuleResolver) ClearForNewScan() { + <-r.scanSema + r.scannedRoots = map[gopathwalk.Root]bool{} + r.otherCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + r.scanSema <- struct{}{} +} + +func (r *ModuleResolver) ClearForNewMod() { + <-r.scanSema + *r = ModuleResolver{ + env: r.env, + moduleCacheCache: r.moduleCacheCache, + otherCache: r.otherCache, + scanSema: r.scanSema, + } + r.init() + r.scanSema <- struct{}{} +} + +// findPackage returns the module and directory that contains the package at +// the given import path, or returns nil, "" if no module is in scope. +func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) { + // This can't find packages in the stdlib, but that's harmless for all + // the existing code paths. + for _, m := range r.modsByModPath { + if !strings.HasPrefix(importPath, m.Path) { + continue + } + pathInModule := importPath[len(m.Path):] + pkgDir := filepath.Join(m.Dir, pathInModule) + if r.dirIsNestedModule(pkgDir, m) { + continue + } + + if info, ok := r.cacheLoad(pkgDir); ok { + if loaded, err := info.reachedStatus(nameLoaded); loaded { + if err != nil { + continue // No package in this dir. + } + return m, pkgDir + } + if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil { + continue // Dir is unreadable, etc. + } + // This is slightly wrong: a directory doesn't have to have an + // importable package to count as a package for package-to-module + // resolution. package main or _test files should count but + // don't. + // TODO(heschi): fix this. + if _, err := r.cachePackageName(info); err == nil { + return m, pkgDir + } + } + + // Not cached. Read the filesystem. + pkgFiles, err := ioutil.ReadDir(pkgDir) + if err != nil { + continue + } + // A module only contains a package if it has buildable go + // files in that directory. If not, it could be provided by an + // outer module. See #29736. + for _, fi := range pkgFiles { + if ok, _ := r.env.matchFile(pkgDir, fi.Name()); ok { + return m, pkgDir + } + } + } + return nil, "" +} + +func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) { + if info, ok := r.moduleCacheCache.Load(dir); ok { + return info, ok + } + return r.otherCache.Load(dir) +} + +func (r *ModuleResolver) cacheStore(info directoryPackageInfo) { + if info.rootType == gopathwalk.RootModuleCache { + r.moduleCacheCache.Store(info.dir, info) + } else { + r.otherCache.Store(info.dir, info) + } +} + +func (r *ModuleResolver) cacheKeys() []string { + return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...) +} + +// cachePackageName caches the package name for a dir already in the cache. +func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) { + if info.rootType == gopathwalk.RootModuleCache { + return r.moduleCacheCache.CachePackageName(info) + } + return r.otherCache.CachePackageName(info) +} + +func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { + if info.rootType == gopathwalk.RootModuleCache { + return r.moduleCacheCache.CacheExports(ctx, env, info) + } + return r.otherCache.CacheExports(ctx, env, info) +} + +// findModuleByDir returns the module that contains dir, or nil if no such +// module is in scope. +func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON { + // This is quite tricky and may not be correct. dir could be: + // - a package in the main module. + // - a replace target underneath the main module's directory. + // - a nested module in the above. + // - a replace target somewhere totally random. + // - a nested module in the above. + // - in the mod cache. + // - in /vendor/ in -mod=vendor mode. + // - nested module? Dunno. + // Rumor has it that replace targets cannot contain other replace targets. + for _, m := range r.modsByDir { + if !strings.HasPrefix(dir, m.Dir) { + continue + } + + if r.dirIsNestedModule(dir, m) { + continue + } + + return m + } + return nil +} + +// dirIsNestedModule reports if dir is contained in a nested module underneath +// mod, not actually in mod. +func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON) bool { + if !strings.HasPrefix(dir, mod.Dir) { + return false + } + if r.dirInModuleCache(dir) { + // Nested modules in the module cache are pruned, + // so it cannot be a nested module. + return false + } + if mod != nil && mod == r.dummyVendorMod { + // The /vendor pseudomodule is flattened and doesn't actually count. + return false + } + modDir, _ := r.modInfo(dir) + if modDir == "" { + return false + } + return modDir != mod.Dir +} + +func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) { + readModName := func(modFile string) string { + modBytes, err := ioutil.ReadFile(modFile) + if err != nil { + return "" + } + return modulePath(modBytes) + } + + if r.dirInModuleCache(dir) { + if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 { + index := strings.Index(dir, matches[1]+"@"+matches[2]) + modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2]) + return modDir, readModName(filepath.Join(modDir, "go.mod")) + } + } + for { + if info, ok := r.cacheLoad(dir); ok { + return info.moduleDir, info.moduleName + } + f := filepath.Join(dir, "go.mod") + info, err := os.Stat(f) + if err == nil && !info.IsDir() { + return dir, readModName(f) + } + + d := filepath.Dir(dir) + if len(d) >= len(dir) { + return "", "" // reached top of file system, no go.mod + } + dir = d + } +} + +func (r *ModuleResolver) dirInModuleCache(dir string) bool { + if r.moduleCacheDir == "" { + return false + } + return strings.HasPrefix(dir, r.moduleCacheDir) +} + +func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + if err := r.init(); err != nil { + return nil, err + } + names := map[string]string{} + for _, path := range importPaths { + _, packageDir := r.findPackage(path) + if packageDir == "" { + continue + } + name, err := packageDirToName(packageDir) + if err != nil { + continue + } + names[path] = name + } + return names, nil +} + +func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error { + if err := r.init(); err != nil { + return err + } + + processDir := func(info directoryPackageInfo) { + // Skip this directory if we were not able to get the package information successfully. + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return + } + pkg, err := r.canonicalize(info) + if err != nil { + return + } + + if !callback.dirFound(pkg) { + return + } + pkg.packageName, err = r.cachePackageName(info) + if err != nil { + return + } + + if !callback.packageNameLoaded(pkg) { + return + } + _, exports, err := r.loadExports(ctx, pkg, false) + if err != nil { + return + } + callback.exportsLoaded(pkg, exports) + } + + // Start processing everything in the cache, and listen for the new stuff + // we discover in the walk below. + stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir) + defer stop1() + stop2 := r.otherCache.ScanAndListen(ctx, processDir) + defer stop2() + + // We assume cached directories are fully cached, including all their + // children, and have not changed. We can skip them. + skip := func(root gopathwalk.Root, dir string) bool { + if r.env.SkipPathInScan != nil && root.Type == gopathwalk.RootCurrentModule { + if root.Path == dir { + return false + } + + if r.env.SkipPathInScan(filepath.Clean(dir)) { + return true + } + } + + info, ok := r.cacheLoad(dir) + if !ok { + return false + } + // This directory can be skipped as long as we have already scanned it. + // Packages with errors will continue to have errors, so there is no need + // to rescan them. + packageScanned, _ := info.reachedStatus(directoryScanned) + return packageScanned + } + + // Add anything new to the cache, and process it if we're still listening. + add := func(root gopathwalk.Root, dir string) { + r.cacheStore(r.scanDirForPackage(root, dir)) + } + + // r.roots and the callback are not necessarily safe to use in the + // goroutine below. Process them eagerly. + roots := filterRoots(r.roots, callback.rootFound) + // We can't cancel walks, because we need them to finish to have a usable + // cache. Instead, run them in a separate goroutine and detach. + scanDone := make(chan struct{}) + go func() { + select { + case <-ctx.Done(): + return + case <-r.scanSema: + } + defer func() { r.scanSema <- struct{}{} }() + // We have the lock on r.scannedRoots, and no other scans can run. + for _, root := range roots { + if ctx.Err() != nil { + return + } + + if r.scannedRoots[root] { + continue + } + gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: true}) + r.scannedRoots[root] = true + } + close(scanDone) + }() + select { + case <-ctx.Done(): + case <-scanDone: + } + return nil +} + +func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 { + if _, ok := stdlib[path]; ok { + return MaxRelevance + } + mod, _ := r.findPackage(path) + return modRelevance(mod) +} + +func modRelevance(mod *gocommand.ModuleJSON) float64 { + var relevance float64 + switch { + case mod == nil: // out of scope + return MaxRelevance - 4 + case mod.Indirect: + relevance = MaxRelevance - 3 + case !mod.Main: + relevance = MaxRelevance - 2 + default: + relevance = MaxRelevance - 1 // main module ties with stdlib + } + + _, versionString, ok := module.SplitPathVersion(mod.Path) + if ok { + index := strings.Index(versionString, "v") + if index == -1 { + return relevance + } + if versionNumber, err := strconv.ParseFloat(versionString[index+1:], 64); err == nil { + relevance += versionNumber / 1000 + } + } + + return relevance +} + +// canonicalize gets the result of canonicalizing the packages using the results +// of initializing the resolver from 'go list -m'. +func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) { + // Packages in GOROOT are already canonical, regardless of the std/cmd modules. + if info.rootType == gopathwalk.RootGOROOT { + return &pkg{ + importPathShort: info.nonCanonicalImportPath, + dir: info.dir, + packageName: path.Base(info.nonCanonicalImportPath), + relevance: MaxRelevance, + }, nil + } + + importPath := info.nonCanonicalImportPath + mod := r.findModuleByDir(info.dir) + // Check if the directory is underneath a module that's in scope. + if mod != nil { + // It is. If dir is the target of a replace directive, + // our guessed import path is wrong. Use the real one. + if mod.Dir == info.dir { + importPath = mod.Path + } else { + dirInMod := info.dir[len(mod.Dir)+len("/"):] + importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod)) + } + } else if !strings.HasPrefix(importPath, info.moduleName) { + // The module's name doesn't match the package's import path. It + // probably needs a replace directive we don't have. + return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir) + } + + res := &pkg{ + importPathShort: importPath, + dir: info.dir, + relevance: modRelevance(mod), + } + // We may have discovered a package that has a different version + // in scope already. Canonicalize to that one if possible. + if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" { + res.dir = canonicalDir + } + return res, nil +} + +func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { + if err := r.init(); err != nil { + return "", nil, err + } + if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest { + return r.cacheExports(ctx, r.env, info) + } + return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest) +} + +func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo { + subdir := "" + if dir != root.Path { + subdir = dir[len(root.Path)+len("/"):] + } + importPath := filepath.ToSlash(subdir) + if strings.HasPrefix(importPath, "vendor/") { + // Only enter vendor directories if they're explicitly requested as a root. + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("unwanted vendor directory"), + } + } + switch root.Type { + case gopathwalk.RootCurrentModule: + importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir)) + case gopathwalk.RootModuleCache: + matches := modCacheRegexp.FindStringSubmatch(subdir) + if len(matches) == 0 { + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("invalid module cache path: %v", subdir), + } + } + modPath, err := module.UnescapePath(filepath.ToSlash(matches[1])) + if err != nil { + if r.env.Logf != nil { + r.env.Logf("decoding module cache path %q: %v", subdir, err) + } + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("decoding module cache path %q: %v", subdir, err), + } + } + importPath = path.Join(modPath, filepath.ToSlash(matches[3])) + } + + modDir, modName := r.modInfo(dir) + result := directoryPackageInfo{ + status: directoryScanned, + dir: dir, + rootType: root.Type, + nonCanonicalImportPath: importPath, + moduleDir: modDir, + moduleName: modName, + } + if root.Type == gopathwalk.RootGOROOT { + // stdlib packages are always in scope, despite the confusing go.mod + return result + } + return result +} + +// modCacheRegexp splits a path in a module cache into module, module version, and package. +var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) + +var ( + slashSlash = []byte("//") + moduleStr = []byte("module") +) + +// modulePath returns the module path from the gomod file text. +// If it cannot find a module path, it returns an empty string. +// It is tolerant of unrelated problems in the go.mod file. +// +// Copied from cmd/go/internal/modfile. +func modulePath(mod []byte) string { + for len(mod) > 0 { + line := mod + mod = nil + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, mod = line[:i], line[i+1:] + } + if i := bytes.Index(line, slashSlash); i >= 0 { + line = line[:i] + } + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, moduleStr) { + continue + } + line = line[len(moduleStr):] + n := len(line) + line = bytes.TrimSpace(line) + if len(line) == n || len(line) == 0 { + continue + } + + if line[0] == '"' || line[0] == '`' { + p, err := strconv.Unquote(string(line)) + if err != nil { + return "" // malformed quoted string or multiline module path + } + return p + } + + return string(line) + } + return "" // missing module path +} diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go new file mode 100644 index 00000000..18dada49 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go @@ -0,0 +1,236 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "context" + "fmt" + "sync" + + "golang.org/x/tools/internal/gopathwalk" +) + +// To find packages to import, the resolver needs to know about all of the +// the packages that could be imported. This includes packages that are +// already in modules that are in (1) the current module, (2) replace targets, +// and (3) packages in the module cache. Packages in (1) and (2) may change over +// time, as the client may edit the current module and locally replaced modules. +// The module cache (which includes all of the packages in (3)) can only +// ever be added to. +// +// The resolver can thus save state about packages in the module cache +// and guarantee that this will not change over time. To obtain information +// about new modules added to the module cache, the module cache should be +// rescanned. +// +// It is OK to serve information about modules that have been deleted, +// as they do still exist. +// TODO(suzmue): can we share information with the caller about +// what module needs to be downloaded to import this package? + +type directoryPackageStatus int + +const ( + _ directoryPackageStatus = iota + directoryScanned + nameLoaded + exportsLoaded +) + +type directoryPackageInfo struct { + // status indicates the extent to which this struct has been filled in. + status directoryPackageStatus + // err is non-nil when there was an error trying to reach status. + err error + + // Set when status >= directoryScanned. + + // dir is the absolute directory of this package. + dir string + rootType gopathwalk.RootType + // nonCanonicalImportPath is the package's expected import path. It may + // not actually be importable at that path. + nonCanonicalImportPath string + + // Module-related information. + moduleDir string // The directory that is the module root of this dir. + moduleName string // The module name that contains this dir. + + // Set when status >= nameLoaded. + + packageName string // the package name, as declared in the source. + + // Set when status >= exportsLoaded. + + exports []string +} + +// reachedStatus returns true when info has a status at least target and any error associated with +// an attempt to reach target. +func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) { + if info.err == nil { + return info.status >= target, nil + } + if info.status == target { + return true, info.err + } + return true, nil +} + +// dirInfoCache is a concurrency safe map for storing information about +// directories that may contain packages. +// +// The information in this cache is built incrementally. Entries are initialized in scan. +// No new keys should be added in any other functions, as all directories containing +// packages are identified in scan. +// +// Other functions, including loadExports and findPackage, may update entries in this cache +// as they discover new things about the directory. +// +// The information in the cache is not expected to change for the cache's +// lifetime, so there is no protection against competing writes. Users should +// take care not to hold the cache across changes to the underlying files. +// +// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc) +type dirInfoCache struct { + mu sync.Mutex + // dirs stores information about packages in directories, keyed by absolute path. + dirs map[string]*directoryPackageInfo + listeners map[*int]cacheListener +} + +type cacheListener func(directoryPackageInfo) + +// ScanAndListen calls listener on all the items in the cache, and on anything +// newly added. The returned stop function waits for all in-flight callbacks to +// finish and blocks new ones. +func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() { + ctx, cancel := context.WithCancel(ctx) + + // Flushing out all the callbacks is tricky without knowing how many there + // are going to be. Setting an arbitrary limit makes it much easier. + const maxInFlight = 10 + sema := make(chan struct{}, maxInFlight) + for i := 0; i < maxInFlight; i++ { + sema <- struct{}{} + } + + cookie := new(int) // A unique ID we can use for the listener. + + // We can't hold mu while calling the listener. + d.mu.Lock() + var keys []string + for key := range d.dirs { + keys = append(keys, key) + } + d.listeners[cookie] = func(info directoryPackageInfo) { + select { + case <-ctx.Done(): + return + case <-sema: + } + listener(info) + sema <- struct{}{} + } + d.mu.Unlock() + + stop := func() { + cancel() + d.mu.Lock() + delete(d.listeners, cookie) + d.mu.Unlock() + for i := 0; i < maxInFlight; i++ { + <-sema + } + } + + // Process the pre-existing keys. + for _, k := range keys { + select { + case <-ctx.Done(): + return stop + default: + } + if v, ok := d.Load(k); ok { + listener(v) + } + } + + return stop +} + +// Store stores the package info for dir. +func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) { + d.mu.Lock() + _, old := d.dirs[dir] + d.dirs[dir] = &info + var listeners []cacheListener + for _, l := range d.listeners { + listeners = append(listeners, l) + } + d.mu.Unlock() + + if !old { + for _, l := range listeners { + l(info) + } + } +} + +// Load returns a copy of the directoryPackageInfo for absolute directory dir. +func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) { + d.mu.Lock() + defer d.mu.Unlock() + info, ok := d.dirs[dir] + if !ok { + return directoryPackageInfo{}, false + } + return *info, true +} + +// Keys returns the keys currently present in d. +func (d *dirInfoCache) Keys() (keys []string) { + d.mu.Lock() + defer d.mu.Unlock() + for key := range d.dirs { + keys = append(keys, key) + } + return keys +} + +func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) { + if loaded, err := info.reachedStatus(nameLoaded); loaded { + return info.packageName, err + } + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return "", fmt.Errorf("cannot read package name, scan error: %v", err) + } + info.packageName, info.err = packageDirToName(info.dir) + info.status = nameLoaded + d.Store(info.dir, info) + return info.packageName, info.err +} + +func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { + if reached, _ := info.reachedStatus(exportsLoaded); reached { + return info.packageName, info.exports, info.err + } + if reached, err := info.reachedStatus(nameLoaded); reached && err != nil { + return "", nil, err + } + info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false) + if info.err == context.Canceled || info.err == context.DeadlineExceeded { + return info.packageName, info.exports, info.err + } + // The cache structure wants things to proceed linearly. We can skip a + // step here, but only if we succeed. + if info.status == nameLoaded || info.err == nil { + info.status = exportsLoaded + } else { + info.status = nameLoaded + } + d.Store(info.dir, info) + return info.packageName, info.exports, info.err +} diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go new file mode 100644 index 00000000..1a0a7ebd --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go @@ -0,0 +1,297 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Hacked up copy of go/ast/import.go +// Modified to use a single token.File in preference to a FileSet. + +package imports + +import ( + "go/ast" + "go/token" + "log" + "sort" + "strconv" +) + +// sortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data loss. +// +// It may mutate the token.File. +func sortImports(localPrefix string, tokFile *token.File, f *ast.File) { + for i, d := range f.Decls { + d, ok := d.(*ast.GenDecl) + if !ok || d.Tok != token.IMPORT { + // Not an import declaration, so we're done. + // Imports are always first. + break + } + + if len(d.Specs) == 0 { + // Empty import block, remove it. + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + } + + if !d.Lparen.IsValid() { + // Not a block: sorted by default. + continue + } + + // Identify and sort runs of specs on successive lines. + i := 0 + specs := d.Specs[:0] + for j, s := range d.Specs { + if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) { + // j begins a new run. End this one. + specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...) + i = j + } + } + specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...) + d.Specs = specs + + // Deduping can leave a blank line before the rparen; clean that up. + // Ignore line directives. + if len(d.Specs) > 0 { + lastSpec := d.Specs[len(d.Specs)-1] + lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line + if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 { + tokFile.MergeLine(rParenLine - 1) // has side effects! + } + } + } +} + +// mergeImports merges all the import declarations into the first one. +// Taken from golang.org/x/tools/ast/astutil. +// This does not adjust line numbers properly +func mergeImports(f *ast.File) { + if len(f.Decls) <= 1 { + return + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } +} + +// declImports reports whether gen contains an import of path. +// Taken from golang.org/x/tools/ast/astutil. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +func importPath(s ast.Spec) string { + t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value) + if err == nil { + return t + } + return "" +} + +func importName(s ast.Spec) string { + n := s.(*ast.ImportSpec).Name + if n == nil { + return "" + } + return n.Name +} + +func importComment(s ast.Spec) string { + c := s.(*ast.ImportSpec).Comment + if c == nil { + return "" + } + return c.Text() +} + +// collapse indicates whether prev may be removed, leaving only next. +func collapse(prev, next ast.Spec) bool { + if importPath(next) != importPath(prev) || importName(next) != importName(prev) { + return false + } + return prev.(*ast.ImportSpec).Comment == nil +} + +type posSpan struct { + Start token.Pos + End token.Pos +} + +// sortSpecs sorts the import specs within each import decl. +// It may mutate the token.File. +func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if len(specs) <= 1 { + return specs + } + + // Record positions for specs. + pos := make([]posSpan, len(specs)) + for i, s := range specs { + pos[i] = posSpan{s.Pos(), s.End()} + } + + // Identify comments in this range. + // Any comment from pos[0].Start to the final line counts. + lastLine := tokFile.Line(pos[len(pos)-1].End) + cstart := len(f.Comments) + cend := len(f.Comments) + for i, g := range f.Comments { + if g.Pos() < pos[0].Start { + continue + } + if i < cstart { + cstart = i + } + if tokFile.Line(g.End()) > lastLine { + cend = i + break + } + } + comments := f.Comments[cstart:cend] + + // Assign each comment to the import spec preceding it. + importComment := map[*ast.ImportSpec][]*ast.CommentGroup{} + specIndex := 0 + for _, g := range comments { + for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() { + specIndex++ + } + s := specs[specIndex].(*ast.ImportSpec) + importComment[s] = append(importComment[s], g) + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to abut the end of its spec. + // Sort the comments by new position. + sort.Sort(byImportSpec{localPrefix, specs}) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + deduped := specs[:0] + for i, s := range specs { + if i == len(specs)-1 || !collapse(s, specs[i+1]) { + deduped = append(deduped, s) + } else { + p := s.Pos() + tokFile.MergeLine(tokFile.Line(p)) // has side effects! + } + } + specs = deduped + + // Fix up comment positions + for i, s := range specs { + s := s.(*ast.ImportSpec) + if s.Name != nil { + s.Name.NamePos = pos[i].Start + } + s.Path.ValuePos = pos[i].Start + s.EndPos = pos[i].End + nextSpecPos := pos[i].End + + for _, g := range importComment[s] { + for _, c := range g.List { + c.Slash = pos[i].End + nextSpecPos = c.End() + } + } + if i < len(specs)-1 { + pos[i+1].Start = nextSpecPos + pos[i+1].End = nextSpecPos + } + } + + sort.Sort(byCommentPos(comments)) + + // Fixup comments can insert blank lines, because import specs are on different lines. + // We remove those blank lines here by merging import spec to the first import spec line. + firstSpecLine := tokFile.Line(specs[0].Pos()) + for _, s := range specs[1:] { + p := s.Pos() + line := tokFile.Line(p) + for previousLine := line - 1; previousLine >= firstSpecLine; { + // MergeLine can panic. Avoid the panic at the cost of not removing the blank line + // golang/go#50329 + if previousLine > 0 && previousLine < tokFile.LineCount() { + tokFile.MergeLine(previousLine) // has side effects! + previousLine-- + } else { + // try to gather some data to diagnose how this could happen + req := "Please report what the imports section of your go file looked like." + log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s", + firstSpecLine, line, previousLine, tokFile.LineCount(), req) + } + } + } + return specs +} + +type byImportSpec struct { + localPrefix string + specs []ast.Spec // slice of *ast.ImportSpec +} + +func (x byImportSpec) Len() int { return len(x.specs) } +func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] } +func (x byImportSpec) Less(i, j int) bool { + ipath := importPath(x.specs[i]) + jpath := importPath(x.specs[j]) + + igroup := importGroup(x.localPrefix, ipath) + jgroup := importGroup(x.localPrefix, jpath) + if igroup != jgroup { + return igroup < jgroup + } + + if ipath != jpath { + return ipath < jpath + } + iname := importName(x.specs[i]) + jname := importName(x.specs[j]) + + if iname != jname { + return iname < jname + } + return importComment(x.specs[i]) < importComment(x.specs[j]) +} + +type byCommentPos []*ast.CommentGroup + +func (x byCommentPos) Len() int { return len(x) } +func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() } diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go new file mode 100644 index 00000000..31a75949 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/zstdlib.go @@ -0,0 +1,11115 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by mkstdlib.go. DO NOT EDIT. + +package imports + +var stdlib = map[string][]string{ + "archive/tar": { + "ErrFieldTooLong", + "ErrHeader", + "ErrInsecurePath", + "ErrWriteAfterClose", + "ErrWriteTooLong", + "FileInfoHeader", + "Format", + "FormatGNU", + "FormatPAX", + "FormatUSTAR", + "FormatUnknown", + "Header", + "NewReader", + "NewWriter", + "Reader", + "TypeBlock", + "TypeChar", + "TypeCont", + "TypeDir", + "TypeFifo", + "TypeGNULongLink", + "TypeGNULongName", + "TypeGNUSparse", + "TypeLink", + "TypeReg", + "TypeRegA", + "TypeSymlink", + "TypeXGlobalHeader", + "TypeXHeader", + "Writer", + }, + "archive/zip": { + "Compressor", + "Decompressor", + "Deflate", + "ErrAlgorithm", + "ErrChecksum", + "ErrFormat", + "ErrInsecurePath", + "File", + "FileHeader", + "FileInfoHeader", + "NewReader", + "NewWriter", + "OpenReader", + "ReadCloser", + "Reader", + "RegisterCompressor", + "RegisterDecompressor", + "Store", + "Writer", + }, + "bufio": { + "ErrAdvanceTooFar", + "ErrBadReadCount", + "ErrBufferFull", + "ErrFinalToken", + "ErrInvalidUnreadByte", + "ErrInvalidUnreadRune", + "ErrNegativeAdvance", + "ErrNegativeCount", + "ErrTooLong", + "MaxScanTokenSize", + "NewReadWriter", + "NewReader", + "NewReaderSize", + "NewScanner", + "NewWriter", + "NewWriterSize", + "ReadWriter", + "Reader", + "ScanBytes", + "ScanLines", + "ScanRunes", + "ScanWords", + "Scanner", + "SplitFunc", + "Writer", + }, + "bytes": { + "Buffer", + "Clone", + "Compare", + "Contains", + "ContainsAny", + "ContainsRune", + "Count", + "Cut", + "CutPrefix", + "CutSuffix", + "Equal", + "EqualFold", + "ErrTooLarge", + "Fields", + "FieldsFunc", + "HasPrefix", + "HasSuffix", + "Index", + "IndexAny", + "IndexByte", + "IndexFunc", + "IndexRune", + "Join", + "LastIndex", + "LastIndexAny", + "LastIndexByte", + "LastIndexFunc", + "Map", + "MinRead", + "NewBuffer", + "NewBufferString", + "NewReader", + "Reader", + "Repeat", + "Replace", + "ReplaceAll", + "Runes", + "Split", + "SplitAfter", + "SplitAfterN", + "SplitN", + "Title", + "ToLower", + "ToLowerSpecial", + "ToTitle", + "ToTitleSpecial", + "ToUpper", + "ToUpperSpecial", + "ToValidUTF8", + "Trim", + "TrimFunc", + "TrimLeft", + "TrimLeftFunc", + "TrimPrefix", + "TrimRight", + "TrimRightFunc", + "TrimSpace", + "TrimSuffix", + }, + "compress/bzip2": { + "NewReader", + "StructuralError", + }, + "compress/flate": { + "BestCompression", + "BestSpeed", + "CorruptInputError", + "DefaultCompression", + "HuffmanOnly", + "InternalError", + "NewReader", + "NewReaderDict", + "NewWriter", + "NewWriterDict", + "NoCompression", + "ReadError", + "Reader", + "Resetter", + "WriteError", + "Writer", + }, + "compress/gzip": { + "BestCompression", + "BestSpeed", + "DefaultCompression", + "ErrChecksum", + "ErrHeader", + "Header", + "HuffmanOnly", + "NewReader", + "NewWriter", + "NewWriterLevel", + "NoCompression", + "Reader", + "Writer", + }, + "compress/lzw": { + "LSB", + "MSB", + "NewReader", + "NewWriter", + "Order", + "Reader", + "Writer", + }, + "compress/zlib": { + "BestCompression", + "BestSpeed", + "DefaultCompression", + "ErrChecksum", + "ErrDictionary", + "ErrHeader", + "HuffmanOnly", + "NewReader", + "NewReaderDict", + "NewWriter", + "NewWriterLevel", + "NewWriterLevelDict", + "NoCompression", + "Resetter", + "Writer", + }, + "container/heap": { + "Fix", + "Init", + "Interface", + "Pop", + "Push", + "Remove", + }, + "container/list": { + "Element", + "List", + "New", + }, + "container/ring": { + "New", + "Ring", + }, + "context": { + "Background", + "CancelCauseFunc", + "CancelFunc", + "Canceled", + "Cause", + "Context", + "DeadlineExceeded", + "TODO", + "WithCancel", + "WithCancelCause", + "WithDeadline", + "WithTimeout", + "WithValue", + }, + "crypto": { + "BLAKE2b_256", + "BLAKE2b_384", + "BLAKE2b_512", + "BLAKE2s_256", + "Decrypter", + "DecrypterOpts", + "Hash", + "MD4", + "MD5", + "MD5SHA1", + "PrivateKey", + "PublicKey", + "RIPEMD160", + "RegisterHash", + "SHA1", + "SHA224", + "SHA256", + "SHA384", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA512", + "SHA512_224", + "SHA512_256", + "Signer", + "SignerOpts", + }, + "crypto/aes": { + "BlockSize", + "KeySizeError", + "NewCipher", + }, + "crypto/cipher": { + "AEAD", + "Block", + "BlockMode", + "NewCBCDecrypter", + "NewCBCEncrypter", + "NewCFBDecrypter", + "NewCFBEncrypter", + "NewCTR", + "NewGCM", + "NewGCMWithNonceSize", + "NewGCMWithTagSize", + "NewOFB", + "Stream", + "StreamReader", + "StreamWriter", + }, + "crypto/des": { + "BlockSize", + "KeySizeError", + "NewCipher", + "NewTripleDESCipher", + }, + "crypto/dsa": { + "ErrInvalidPublicKey", + "GenerateKey", + "GenerateParameters", + "L1024N160", + "L2048N224", + "L2048N256", + "L3072N256", + "ParameterSizes", + "Parameters", + "PrivateKey", + "PublicKey", + "Sign", + "Verify", + }, + "crypto/ecdh": { + "Curve", + "P256", + "P384", + "P521", + "PrivateKey", + "PublicKey", + "X25519", + }, + "crypto/ecdsa": { + "GenerateKey", + "PrivateKey", + "PublicKey", + "Sign", + "SignASN1", + "Verify", + "VerifyASN1", + }, + "crypto/ed25519": { + "GenerateKey", + "NewKeyFromSeed", + "Options", + "PrivateKey", + "PrivateKeySize", + "PublicKey", + "PublicKeySize", + "SeedSize", + "Sign", + "SignatureSize", + "Verify", + "VerifyWithOptions", + }, + "crypto/elliptic": { + "Curve", + "CurveParams", + "GenerateKey", + "Marshal", + "MarshalCompressed", + "P224", + "P256", + "P384", + "P521", + "Unmarshal", + "UnmarshalCompressed", + }, + "crypto/hmac": { + "Equal", + "New", + }, + "crypto/md5": { + "BlockSize", + "New", + "Size", + "Sum", + }, + "crypto/rand": { + "Int", + "Prime", + "Read", + "Reader", + }, + "crypto/rc4": { + "Cipher", + "KeySizeError", + "NewCipher", + }, + "crypto/rsa": { + "CRTValue", + "DecryptOAEP", + "DecryptPKCS1v15", + "DecryptPKCS1v15SessionKey", + "EncryptOAEP", + "EncryptPKCS1v15", + "ErrDecryption", + "ErrMessageTooLong", + "ErrVerification", + "GenerateKey", + "GenerateMultiPrimeKey", + "OAEPOptions", + "PKCS1v15DecryptOptions", + "PSSOptions", + "PSSSaltLengthAuto", + "PSSSaltLengthEqualsHash", + "PrecomputedValues", + "PrivateKey", + "PublicKey", + "SignPKCS1v15", + "SignPSS", + "VerifyPKCS1v15", + "VerifyPSS", + }, + "crypto/sha1": { + "BlockSize", + "New", + "Size", + "Sum", + }, + "crypto/sha256": { + "BlockSize", + "New", + "New224", + "Size", + "Size224", + "Sum224", + "Sum256", + }, + "crypto/sha512": { + "BlockSize", + "New", + "New384", + "New512_224", + "New512_256", + "Size", + "Size224", + "Size256", + "Size384", + "Sum384", + "Sum512", + "Sum512_224", + "Sum512_256", + }, + "crypto/subtle": { + "ConstantTimeByteEq", + "ConstantTimeCompare", + "ConstantTimeCopy", + "ConstantTimeEq", + "ConstantTimeLessOrEq", + "ConstantTimeSelect", + "XORBytes", + }, + "crypto/tls": { + "Certificate", + "CertificateRequestInfo", + "CertificateVerificationError", + "CipherSuite", + "CipherSuiteName", + "CipherSuites", + "Client", + "ClientAuthType", + "ClientHelloInfo", + "ClientSessionCache", + "ClientSessionState", + "Config", + "Conn", + "ConnectionState", + "CurveID", + "CurveP256", + "CurveP384", + "CurveP521", + "Dial", + "DialWithDialer", + "Dialer", + "ECDSAWithP256AndSHA256", + "ECDSAWithP384AndSHA384", + "ECDSAWithP521AndSHA512", + "ECDSAWithSHA1", + "Ed25519", + "InsecureCipherSuites", + "Listen", + "LoadX509KeyPair", + "NewLRUClientSessionCache", + "NewListener", + "NoClientCert", + "PKCS1WithSHA1", + "PKCS1WithSHA256", + "PKCS1WithSHA384", + "PKCS1WithSHA512", + "PSSWithSHA256", + "PSSWithSHA384", + "PSSWithSHA512", + "RecordHeaderError", + "RenegotiateFreelyAsClient", + "RenegotiateNever", + "RenegotiateOnceAsClient", + "RenegotiationSupport", + "RequestClientCert", + "RequireAndVerifyClientCert", + "RequireAnyClientCert", + "Server", + "SignatureScheme", + "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", + "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_RSA_WITH_RC4_128_SHA", + "TLS_FALLBACK_SCSV", + "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + "TLS_RSA_WITH_AES_128_CBC_SHA", + "TLS_RSA_WITH_AES_128_CBC_SHA256", + "TLS_RSA_WITH_AES_128_GCM_SHA256", + "TLS_RSA_WITH_AES_256_CBC_SHA", + "TLS_RSA_WITH_AES_256_GCM_SHA384", + "TLS_RSA_WITH_RC4_128_SHA", + "VerifyClientCertIfGiven", + "VersionSSL30", + "VersionTLS10", + "VersionTLS11", + "VersionTLS12", + "VersionTLS13", + "X25519", + "X509KeyPair", + }, + "crypto/x509": { + "CANotAuthorizedForExtKeyUsage", + "CANotAuthorizedForThisName", + "CertPool", + "Certificate", + "CertificateInvalidError", + "CertificateRequest", + "ConstraintViolationError", + "CreateCertificate", + "CreateCertificateRequest", + "CreateRevocationList", + "DSA", + "DSAWithSHA1", + "DSAWithSHA256", + "DecryptPEMBlock", + "ECDSA", + "ECDSAWithSHA1", + "ECDSAWithSHA256", + "ECDSAWithSHA384", + "ECDSAWithSHA512", + "Ed25519", + "EncryptPEMBlock", + "ErrUnsupportedAlgorithm", + "Expired", + "ExtKeyUsage", + "ExtKeyUsageAny", + "ExtKeyUsageClientAuth", + "ExtKeyUsageCodeSigning", + "ExtKeyUsageEmailProtection", + "ExtKeyUsageIPSECEndSystem", + "ExtKeyUsageIPSECTunnel", + "ExtKeyUsageIPSECUser", + "ExtKeyUsageMicrosoftCommercialCodeSigning", + "ExtKeyUsageMicrosoftKernelCodeSigning", + "ExtKeyUsageMicrosoftServerGatedCrypto", + "ExtKeyUsageNetscapeServerGatedCrypto", + "ExtKeyUsageOCSPSigning", + "ExtKeyUsageServerAuth", + "ExtKeyUsageTimeStamping", + "HostnameError", + "IncompatibleUsage", + "IncorrectPasswordError", + "InsecureAlgorithmError", + "InvalidReason", + "IsEncryptedPEMBlock", + "KeyUsage", + "KeyUsageCRLSign", + "KeyUsageCertSign", + "KeyUsageContentCommitment", + "KeyUsageDataEncipherment", + "KeyUsageDecipherOnly", + "KeyUsageDigitalSignature", + "KeyUsageEncipherOnly", + "KeyUsageKeyAgreement", + "KeyUsageKeyEncipherment", + "MD2WithRSA", + "MD5WithRSA", + "MarshalECPrivateKey", + "MarshalPKCS1PrivateKey", + "MarshalPKCS1PublicKey", + "MarshalPKCS8PrivateKey", + "MarshalPKIXPublicKey", + "NameConstraintsWithoutSANs", + "NameMismatch", + "NewCertPool", + "NotAuthorizedToSign", + "PEMCipher", + "PEMCipher3DES", + "PEMCipherAES128", + "PEMCipherAES192", + "PEMCipherAES256", + "PEMCipherDES", + "ParseCRL", + "ParseCertificate", + "ParseCertificateRequest", + "ParseCertificates", + "ParseDERCRL", + "ParseECPrivateKey", + "ParsePKCS1PrivateKey", + "ParsePKCS1PublicKey", + "ParsePKCS8PrivateKey", + "ParsePKIXPublicKey", + "ParseRevocationList", + "PublicKeyAlgorithm", + "PureEd25519", + "RSA", + "RevocationList", + "SHA1WithRSA", + "SHA256WithRSA", + "SHA256WithRSAPSS", + "SHA384WithRSA", + "SHA384WithRSAPSS", + "SHA512WithRSA", + "SHA512WithRSAPSS", + "SetFallbackRoots", + "SignatureAlgorithm", + "SystemCertPool", + "SystemRootsError", + "TooManyConstraints", + "TooManyIntermediates", + "UnconstrainedName", + "UnhandledCriticalExtension", + "UnknownAuthorityError", + "UnknownPublicKeyAlgorithm", + "UnknownSignatureAlgorithm", + "VerifyOptions", + }, + "crypto/x509/pkix": { + "AlgorithmIdentifier", + "AttributeTypeAndValue", + "AttributeTypeAndValueSET", + "CertificateList", + "Extension", + "Name", + "RDNSequence", + "RelativeDistinguishedNameSET", + "RevokedCertificate", + "TBSCertificateList", + }, + "database/sql": { + "ColumnType", + "Conn", + "DB", + "DBStats", + "Drivers", + "ErrConnDone", + "ErrNoRows", + "ErrTxDone", + "IsolationLevel", + "LevelDefault", + "LevelLinearizable", + "LevelReadCommitted", + "LevelReadUncommitted", + "LevelRepeatableRead", + "LevelSerializable", + "LevelSnapshot", + "LevelWriteCommitted", + "Named", + "NamedArg", + "NullBool", + "NullByte", + "NullFloat64", + "NullInt16", + "NullInt32", + "NullInt64", + "NullString", + "NullTime", + "Open", + "OpenDB", + "Out", + "RawBytes", + "Register", + "Result", + "Row", + "Rows", + "Scanner", + "Stmt", + "Tx", + "TxOptions", + }, + "database/sql/driver": { + "Bool", + "ColumnConverter", + "Conn", + "ConnBeginTx", + "ConnPrepareContext", + "Connector", + "DefaultParameterConverter", + "Driver", + "DriverContext", + "ErrBadConn", + "ErrRemoveArgument", + "ErrSkip", + "Execer", + "ExecerContext", + "Int32", + "IsScanValue", + "IsValue", + "IsolationLevel", + "NamedValue", + "NamedValueChecker", + "NotNull", + "Null", + "Pinger", + "Queryer", + "QueryerContext", + "Result", + "ResultNoRows", + "Rows", + "RowsAffected", + "RowsColumnTypeDatabaseTypeName", + "RowsColumnTypeLength", + "RowsColumnTypeNullable", + "RowsColumnTypePrecisionScale", + "RowsColumnTypeScanType", + "RowsNextResultSet", + "SessionResetter", + "Stmt", + "StmtExecContext", + "StmtQueryContext", + "String", + "Tx", + "TxOptions", + "Validator", + "Value", + "ValueConverter", + "Valuer", + }, + "debug/buildinfo": { + "BuildInfo", + "Read", + "ReadFile", + }, + "debug/dwarf": { + "AddrType", + "ArrayType", + "Attr", + "AttrAbstractOrigin", + "AttrAccessibility", + "AttrAddrBase", + "AttrAddrClass", + "AttrAlignment", + "AttrAllocated", + "AttrArtificial", + "AttrAssociated", + "AttrBaseTypes", + "AttrBinaryScale", + "AttrBitOffset", + "AttrBitSize", + "AttrByteSize", + "AttrCallAllCalls", + "AttrCallAllSourceCalls", + "AttrCallAllTailCalls", + "AttrCallColumn", + "AttrCallDataLocation", + "AttrCallDataValue", + "AttrCallFile", + "AttrCallLine", + "AttrCallOrigin", + "AttrCallPC", + "AttrCallParameter", + "AttrCallReturnPC", + "AttrCallTailCall", + "AttrCallTarget", + "AttrCallTargetClobbered", + "AttrCallValue", + "AttrCalling", + "AttrCommonRef", + "AttrCompDir", + "AttrConstExpr", + "AttrConstValue", + "AttrContainingType", + "AttrCount", + "AttrDataBitOffset", + "AttrDataLocation", + "AttrDataMemberLoc", + "AttrDecimalScale", + "AttrDecimalSign", + "AttrDeclColumn", + "AttrDeclFile", + "AttrDeclLine", + "AttrDeclaration", + "AttrDefaultValue", + "AttrDefaulted", + "AttrDeleted", + "AttrDescription", + "AttrDigitCount", + "AttrDiscr", + "AttrDiscrList", + "AttrDiscrValue", + "AttrDwoName", + "AttrElemental", + "AttrEncoding", + "AttrEndianity", + "AttrEntrypc", + "AttrEnumClass", + "AttrExplicit", + "AttrExportSymbols", + "AttrExtension", + "AttrExternal", + "AttrFrameBase", + "AttrFriend", + "AttrHighpc", + "AttrIdentifierCase", + "AttrImport", + "AttrInline", + "AttrIsOptional", + "AttrLanguage", + "AttrLinkageName", + "AttrLocation", + "AttrLoclistsBase", + "AttrLowerBound", + "AttrLowpc", + "AttrMacroInfo", + "AttrMacros", + "AttrMainSubprogram", + "AttrMutable", + "AttrName", + "AttrNamelistItem", + "AttrNoreturn", + "AttrObjectPointer", + "AttrOrdering", + "AttrPictureString", + "AttrPriority", + "AttrProducer", + "AttrPrototyped", + "AttrPure", + "AttrRanges", + "AttrRank", + "AttrRecursive", + "AttrReference", + "AttrReturnAddr", + "AttrRnglistsBase", + "AttrRvalueReference", + "AttrSegment", + "AttrSibling", + "AttrSignature", + "AttrSmall", + "AttrSpecification", + "AttrStartScope", + "AttrStaticLink", + "AttrStmtList", + "AttrStrOffsetsBase", + "AttrStride", + "AttrStrideSize", + "AttrStringLength", + "AttrStringLengthBitSize", + "AttrStringLengthByteSize", + "AttrThreadsScaled", + "AttrTrampoline", + "AttrType", + "AttrUpperBound", + "AttrUseLocation", + "AttrUseUTF8", + "AttrVarParam", + "AttrVirtuality", + "AttrVisibility", + "AttrVtableElemLoc", + "BasicType", + "BoolType", + "CharType", + "Class", + "ClassAddrPtr", + "ClassAddress", + "ClassBlock", + "ClassConstant", + "ClassExprLoc", + "ClassFlag", + "ClassLinePtr", + "ClassLocList", + "ClassLocListPtr", + "ClassMacPtr", + "ClassRangeListPtr", + "ClassReference", + "ClassReferenceAlt", + "ClassReferenceSig", + "ClassRngList", + "ClassRngListsPtr", + "ClassStrOffsetsPtr", + "ClassString", + "ClassStringAlt", + "ClassUnknown", + "CommonType", + "ComplexType", + "Data", + "DecodeError", + "DotDotDotType", + "Entry", + "EnumType", + "EnumValue", + "ErrUnknownPC", + "Field", + "FloatType", + "FuncType", + "IntType", + "LineEntry", + "LineFile", + "LineReader", + "LineReaderPos", + "New", + "Offset", + "PtrType", + "QualType", + "Reader", + "StructField", + "StructType", + "Tag", + "TagAccessDeclaration", + "TagArrayType", + "TagAtomicType", + "TagBaseType", + "TagCallSite", + "TagCallSiteParameter", + "TagCatchDwarfBlock", + "TagClassType", + "TagCoarrayType", + "TagCommonDwarfBlock", + "TagCommonInclusion", + "TagCompileUnit", + "TagCondition", + "TagConstType", + "TagConstant", + "TagDwarfProcedure", + "TagDynamicType", + "TagEntryPoint", + "TagEnumerationType", + "TagEnumerator", + "TagFileType", + "TagFormalParameter", + "TagFriend", + "TagGenericSubrange", + "TagImmutableType", + "TagImportedDeclaration", + "TagImportedModule", + "TagImportedUnit", + "TagInheritance", + "TagInlinedSubroutine", + "TagInterfaceType", + "TagLabel", + "TagLexDwarfBlock", + "TagMember", + "TagModule", + "TagMutableType", + "TagNamelist", + "TagNamelistItem", + "TagNamespace", + "TagPackedType", + "TagPartialUnit", + "TagPointerType", + "TagPtrToMemberType", + "TagReferenceType", + "TagRestrictType", + "TagRvalueReferenceType", + "TagSetType", + "TagSharedType", + "TagSkeletonUnit", + "TagStringType", + "TagStructType", + "TagSubprogram", + "TagSubrangeType", + "TagSubroutineType", + "TagTemplateAlias", + "TagTemplateTypeParameter", + "TagTemplateValueParameter", + "TagThrownType", + "TagTryDwarfBlock", + "TagTypeUnit", + "TagTypedef", + "TagUnionType", + "TagUnspecifiedParameters", + "TagUnspecifiedType", + "TagVariable", + "TagVariant", + "TagVariantPart", + "TagVolatileType", + "TagWithStmt", + "Type", + "TypedefType", + "UcharType", + "UintType", + "UnspecifiedType", + "UnsupportedType", + "VoidType", + }, + "debug/elf": { + "ARM_MAGIC_TRAMP_NUMBER", + "COMPRESS_HIOS", + "COMPRESS_HIPROC", + "COMPRESS_LOOS", + "COMPRESS_LOPROC", + "COMPRESS_ZLIB", + "Chdr32", + "Chdr64", + "Class", + "CompressionType", + "DF_BIND_NOW", + "DF_ORIGIN", + "DF_STATIC_TLS", + "DF_SYMBOLIC", + "DF_TEXTREL", + "DT_ADDRRNGHI", + "DT_ADDRRNGLO", + "DT_AUDIT", + "DT_AUXILIARY", + "DT_BIND_NOW", + "DT_CHECKSUM", + "DT_CONFIG", + "DT_DEBUG", + "DT_DEPAUDIT", + "DT_ENCODING", + "DT_FEATURE", + "DT_FILTER", + "DT_FINI", + "DT_FINI_ARRAY", + "DT_FINI_ARRAYSZ", + "DT_FLAGS", + "DT_FLAGS_1", + "DT_GNU_CONFLICT", + "DT_GNU_CONFLICTSZ", + "DT_GNU_HASH", + "DT_GNU_LIBLIST", + "DT_GNU_LIBLISTSZ", + "DT_GNU_PRELINKED", + "DT_HASH", + "DT_HIOS", + "DT_HIPROC", + "DT_INIT", + "DT_INIT_ARRAY", + "DT_INIT_ARRAYSZ", + "DT_JMPREL", + "DT_LOOS", + "DT_LOPROC", + "DT_MIPS_AUX_DYNAMIC", + "DT_MIPS_BASE_ADDRESS", + "DT_MIPS_COMPACT_SIZE", + "DT_MIPS_CONFLICT", + "DT_MIPS_CONFLICTNO", + "DT_MIPS_CXX_FLAGS", + "DT_MIPS_DELTA_CLASS", + "DT_MIPS_DELTA_CLASSSYM", + "DT_MIPS_DELTA_CLASSSYM_NO", + "DT_MIPS_DELTA_CLASS_NO", + "DT_MIPS_DELTA_INSTANCE", + "DT_MIPS_DELTA_INSTANCE_NO", + "DT_MIPS_DELTA_RELOC", + "DT_MIPS_DELTA_RELOC_NO", + "DT_MIPS_DELTA_SYM", + "DT_MIPS_DELTA_SYM_NO", + "DT_MIPS_DYNSTR_ALIGN", + "DT_MIPS_FLAGS", + "DT_MIPS_GOTSYM", + "DT_MIPS_GP_VALUE", + "DT_MIPS_HIDDEN_GOTIDX", + "DT_MIPS_HIPAGENO", + "DT_MIPS_ICHECKSUM", + "DT_MIPS_INTERFACE", + "DT_MIPS_INTERFACE_SIZE", + "DT_MIPS_IVERSION", + "DT_MIPS_LIBLIST", + "DT_MIPS_LIBLISTNO", + "DT_MIPS_LOCALPAGE_GOTIDX", + "DT_MIPS_LOCAL_GOTIDX", + "DT_MIPS_LOCAL_GOTNO", + "DT_MIPS_MSYM", + "DT_MIPS_OPTIONS", + "DT_MIPS_PERF_SUFFIX", + "DT_MIPS_PIXIE_INIT", + "DT_MIPS_PLTGOT", + "DT_MIPS_PROTECTED_GOTIDX", + "DT_MIPS_RLD_MAP", + "DT_MIPS_RLD_MAP_REL", + "DT_MIPS_RLD_TEXT_RESOLVE_ADDR", + "DT_MIPS_RLD_VERSION", + "DT_MIPS_RWPLT", + "DT_MIPS_SYMBOL_LIB", + "DT_MIPS_SYMTABNO", + "DT_MIPS_TIME_STAMP", + "DT_MIPS_UNREFEXTNO", + "DT_MOVEENT", + "DT_MOVESZ", + "DT_MOVETAB", + "DT_NEEDED", + "DT_NULL", + "DT_PLTGOT", + "DT_PLTPAD", + "DT_PLTPADSZ", + "DT_PLTREL", + "DT_PLTRELSZ", + "DT_POSFLAG_1", + "DT_PPC64_GLINK", + "DT_PPC64_OPD", + "DT_PPC64_OPDSZ", + "DT_PPC64_OPT", + "DT_PPC_GOT", + "DT_PPC_OPT", + "DT_PREINIT_ARRAY", + "DT_PREINIT_ARRAYSZ", + "DT_REL", + "DT_RELA", + "DT_RELACOUNT", + "DT_RELAENT", + "DT_RELASZ", + "DT_RELCOUNT", + "DT_RELENT", + "DT_RELSZ", + "DT_RPATH", + "DT_RUNPATH", + "DT_SONAME", + "DT_SPARC_REGISTER", + "DT_STRSZ", + "DT_STRTAB", + "DT_SYMBOLIC", + "DT_SYMENT", + "DT_SYMINENT", + "DT_SYMINFO", + "DT_SYMINSZ", + "DT_SYMTAB", + "DT_SYMTAB_SHNDX", + "DT_TEXTREL", + "DT_TLSDESC_GOT", + "DT_TLSDESC_PLT", + "DT_USED", + "DT_VALRNGHI", + "DT_VALRNGLO", + "DT_VERDEF", + "DT_VERDEFNUM", + "DT_VERNEED", + "DT_VERNEEDNUM", + "DT_VERSYM", + "Data", + "Dyn32", + "Dyn64", + "DynFlag", + "DynTag", + "EI_ABIVERSION", + "EI_CLASS", + "EI_DATA", + "EI_NIDENT", + "EI_OSABI", + "EI_PAD", + "EI_VERSION", + "ELFCLASS32", + "ELFCLASS64", + "ELFCLASSNONE", + "ELFDATA2LSB", + "ELFDATA2MSB", + "ELFDATANONE", + "ELFMAG", + "ELFOSABI_86OPEN", + "ELFOSABI_AIX", + "ELFOSABI_ARM", + "ELFOSABI_AROS", + "ELFOSABI_CLOUDABI", + "ELFOSABI_FENIXOS", + "ELFOSABI_FREEBSD", + "ELFOSABI_HPUX", + "ELFOSABI_HURD", + "ELFOSABI_IRIX", + "ELFOSABI_LINUX", + "ELFOSABI_MODESTO", + "ELFOSABI_NETBSD", + "ELFOSABI_NONE", + "ELFOSABI_NSK", + "ELFOSABI_OPENBSD", + "ELFOSABI_OPENVMS", + "ELFOSABI_SOLARIS", + "ELFOSABI_STANDALONE", + "ELFOSABI_TRU64", + "EM_386", + "EM_486", + "EM_56800EX", + "EM_68HC05", + "EM_68HC08", + "EM_68HC11", + "EM_68HC12", + "EM_68HC16", + "EM_68K", + "EM_78KOR", + "EM_8051", + "EM_860", + "EM_88K", + "EM_960", + "EM_AARCH64", + "EM_ALPHA", + "EM_ALPHA_STD", + "EM_ALTERA_NIOS2", + "EM_AMDGPU", + "EM_ARC", + "EM_ARCA", + "EM_ARC_COMPACT", + "EM_ARC_COMPACT2", + "EM_ARM", + "EM_AVR", + "EM_AVR32", + "EM_BA1", + "EM_BA2", + "EM_BLACKFIN", + "EM_BPF", + "EM_C166", + "EM_CDP", + "EM_CE", + "EM_CLOUDSHIELD", + "EM_COGE", + "EM_COLDFIRE", + "EM_COOL", + "EM_COREA_1ST", + "EM_COREA_2ND", + "EM_CR", + "EM_CR16", + "EM_CRAYNV2", + "EM_CRIS", + "EM_CRX", + "EM_CSR_KALIMBA", + "EM_CUDA", + "EM_CYPRESS_M8C", + "EM_D10V", + "EM_D30V", + "EM_DSP24", + "EM_DSPIC30F", + "EM_DXP", + "EM_ECOG1", + "EM_ECOG16", + "EM_ECOG1X", + "EM_ECOG2", + "EM_ETPU", + "EM_EXCESS", + "EM_F2MC16", + "EM_FIREPATH", + "EM_FR20", + "EM_FR30", + "EM_FT32", + "EM_FX66", + "EM_H8S", + "EM_H8_300", + "EM_H8_300H", + "EM_H8_500", + "EM_HUANY", + "EM_IA_64", + "EM_INTEL205", + "EM_INTEL206", + "EM_INTEL207", + "EM_INTEL208", + "EM_INTEL209", + "EM_IP2K", + "EM_JAVELIN", + "EM_K10M", + "EM_KM32", + "EM_KMX16", + "EM_KMX32", + "EM_KMX8", + "EM_KVARC", + "EM_L10M", + "EM_LANAI", + "EM_LATTICEMICO32", + "EM_LOONGARCH", + "EM_M16C", + "EM_M32", + "EM_M32C", + "EM_M32R", + "EM_MANIK", + "EM_MAX", + "EM_MAXQ30", + "EM_MCHP_PIC", + "EM_MCST_ELBRUS", + "EM_ME16", + "EM_METAG", + "EM_MICROBLAZE", + "EM_MIPS", + "EM_MIPS_RS3_LE", + "EM_MIPS_RS4_BE", + "EM_MIPS_X", + "EM_MMA", + "EM_MMDSP_PLUS", + "EM_MMIX", + "EM_MN10200", + "EM_MN10300", + "EM_MOXIE", + "EM_MSP430", + "EM_NCPU", + "EM_NDR1", + "EM_NDS32", + "EM_NONE", + "EM_NORC", + "EM_NS32K", + "EM_OPEN8", + "EM_OPENRISC", + "EM_PARISC", + "EM_PCP", + "EM_PDP10", + "EM_PDP11", + "EM_PDSP", + "EM_PJ", + "EM_PPC", + "EM_PPC64", + "EM_PRISM", + "EM_QDSP6", + "EM_R32C", + "EM_RCE", + "EM_RH32", + "EM_RISCV", + "EM_RL78", + "EM_RS08", + "EM_RX", + "EM_S370", + "EM_S390", + "EM_SCORE7", + "EM_SEP", + "EM_SE_C17", + "EM_SE_C33", + "EM_SH", + "EM_SHARC", + "EM_SLE9X", + "EM_SNP1K", + "EM_SPARC", + "EM_SPARC32PLUS", + "EM_SPARCV9", + "EM_ST100", + "EM_ST19", + "EM_ST200", + "EM_ST7", + "EM_ST9PLUS", + "EM_STARCORE", + "EM_STM8", + "EM_STXP7X", + "EM_SVX", + "EM_TILE64", + "EM_TILEGX", + "EM_TILEPRO", + "EM_TINYJ", + "EM_TI_ARP32", + "EM_TI_C2000", + "EM_TI_C5500", + "EM_TI_C6000", + "EM_TI_PRU", + "EM_TMM_GPP", + "EM_TPC", + "EM_TRICORE", + "EM_TRIMEDIA", + "EM_TSK3000", + "EM_UNICORE", + "EM_V800", + "EM_V850", + "EM_VAX", + "EM_VIDEOCORE", + "EM_VIDEOCORE3", + "EM_VIDEOCORE5", + "EM_VISIUM", + "EM_VPP500", + "EM_X86_64", + "EM_XCORE", + "EM_XGATE", + "EM_XIMO16", + "EM_XTENSA", + "EM_Z80", + "EM_ZSP", + "ET_CORE", + "ET_DYN", + "ET_EXEC", + "ET_HIOS", + "ET_HIPROC", + "ET_LOOS", + "ET_LOPROC", + "ET_NONE", + "ET_REL", + "EV_CURRENT", + "EV_NONE", + "ErrNoSymbols", + "File", + "FileHeader", + "FormatError", + "Header32", + "Header64", + "ImportedSymbol", + "Machine", + "NT_FPREGSET", + "NT_PRPSINFO", + "NT_PRSTATUS", + "NType", + "NewFile", + "OSABI", + "Open", + "PF_MASKOS", + "PF_MASKPROC", + "PF_R", + "PF_W", + "PF_X", + "PT_AARCH64_ARCHEXT", + "PT_AARCH64_UNWIND", + "PT_ARM_ARCHEXT", + "PT_ARM_EXIDX", + "PT_DYNAMIC", + "PT_GNU_EH_FRAME", + "PT_GNU_MBIND_HI", + "PT_GNU_MBIND_LO", + "PT_GNU_PROPERTY", + "PT_GNU_RELRO", + "PT_GNU_STACK", + "PT_HIOS", + "PT_HIPROC", + "PT_INTERP", + "PT_LOAD", + "PT_LOOS", + "PT_LOPROC", + "PT_MIPS_ABIFLAGS", + "PT_MIPS_OPTIONS", + "PT_MIPS_REGINFO", + "PT_MIPS_RTPROC", + "PT_NOTE", + "PT_NULL", + "PT_OPENBSD_BOOTDATA", + "PT_OPENBSD_RANDOMIZE", + "PT_OPENBSD_WXNEEDED", + "PT_PAX_FLAGS", + "PT_PHDR", + "PT_S390_PGSTE", + "PT_SHLIB", + "PT_SUNWSTACK", + "PT_SUNW_EH_FRAME", + "PT_TLS", + "Prog", + "Prog32", + "Prog64", + "ProgFlag", + "ProgHeader", + "ProgType", + "R_386", + "R_386_16", + "R_386_32", + "R_386_32PLT", + "R_386_8", + "R_386_COPY", + "R_386_GLOB_DAT", + "R_386_GOT32", + "R_386_GOT32X", + "R_386_GOTOFF", + "R_386_GOTPC", + "R_386_IRELATIVE", + "R_386_JMP_SLOT", + "R_386_NONE", + "R_386_PC16", + "R_386_PC32", + "R_386_PC8", + "R_386_PLT32", + "R_386_RELATIVE", + "R_386_SIZE32", + "R_386_TLS_DESC", + "R_386_TLS_DESC_CALL", + "R_386_TLS_DTPMOD32", + "R_386_TLS_DTPOFF32", + "R_386_TLS_GD", + "R_386_TLS_GD_32", + "R_386_TLS_GD_CALL", + "R_386_TLS_GD_POP", + "R_386_TLS_GD_PUSH", + "R_386_TLS_GOTDESC", + "R_386_TLS_GOTIE", + "R_386_TLS_IE", + "R_386_TLS_IE_32", + "R_386_TLS_LDM", + "R_386_TLS_LDM_32", + "R_386_TLS_LDM_CALL", + "R_386_TLS_LDM_POP", + "R_386_TLS_LDM_PUSH", + "R_386_TLS_LDO_32", + "R_386_TLS_LE", + "R_386_TLS_LE_32", + "R_386_TLS_TPOFF", + "R_386_TLS_TPOFF32", + "R_390", + "R_390_12", + "R_390_16", + "R_390_20", + "R_390_32", + "R_390_64", + "R_390_8", + "R_390_COPY", + "R_390_GLOB_DAT", + "R_390_GOT12", + "R_390_GOT16", + "R_390_GOT20", + "R_390_GOT32", + "R_390_GOT64", + "R_390_GOTENT", + "R_390_GOTOFF", + "R_390_GOTOFF16", + "R_390_GOTOFF64", + "R_390_GOTPC", + "R_390_GOTPCDBL", + "R_390_GOTPLT12", + "R_390_GOTPLT16", + "R_390_GOTPLT20", + "R_390_GOTPLT32", + "R_390_GOTPLT64", + "R_390_GOTPLTENT", + "R_390_GOTPLTOFF16", + "R_390_GOTPLTOFF32", + "R_390_GOTPLTOFF64", + "R_390_JMP_SLOT", + "R_390_NONE", + "R_390_PC16", + "R_390_PC16DBL", + "R_390_PC32", + "R_390_PC32DBL", + "R_390_PC64", + "R_390_PLT16DBL", + "R_390_PLT32", + "R_390_PLT32DBL", + "R_390_PLT64", + "R_390_RELATIVE", + "R_390_TLS_DTPMOD", + "R_390_TLS_DTPOFF", + "R_390_TLS_GD32", + "R_390_TLS_GD64", + "R_390_TLS_GDCALL", + "R_390_TLS_GOTIE12", + "R_390_TLS_GOTIE20", + "R_390_TLS_GOTIE32", + "R_390_TLS_GOTIE64", + "R_390_TLS_IE32", + "R_390_TLS_IE64", + "R_390_TLS_IEENT", + "R_390_TLS_LDCALL", + "R_390_TLS_LDM32", + "R_390_TLS_LDM64", + "R_390_TLS_LDO32", + "R_390_TLS_LDO64", + "R_390_TLS_LE32", + "R_390_TLS_LE64", + "R_390_TLS_LOAD", + "R_390_TLS_TPOFF", + "R_AARCH64", + "R_AARCH64_ABS16", + "R_AARCH64_ABS32", + "R_AARCH64_ABS64", + "R_AARCH64_ADD_ABS_LO12_NC", + "R_AARCH64_ADR_GOT_PAGE", + "R_AARCH64_ADR_PREL_LO21", + "R_AARCH64_ADR_PREL_PG_HI21", + "R_AARCH64_ADR_PREL_PG_HI21_NC", + "R_AARCH64_CALL26", + "R_AARCH64_CONDBR19", + "R_AARCH64_COPY", + "R_AARCH64_GLOB_DAT", + "R_AARCH64_GOT_LD_PREL19", + "R_AARCH64_IRELATIVE", + "R_AARCH64_JUMP26", + "R_AARCH64_JUMP_SLOT", + "R_AARCH64_LD64_GOTOFF_LO15", + "R_AARCH64_LD64_GOTPAGE_LO15", + "R_AARCH64_LD64_GOT_LO12_NC", + "R_AARCH64_LDST128_ABS_LO12_NC", + "R_AARCH64_LDST16_ABS_LO12_NC", + "R_AARCH64_LDST32_ABS_LO12_NC", + "R_AARCH64_LDST64_ABS_LO12_NC", + "R_AARCH64_LDST8_ABS_LO12_NC", + "R_AARCH64_LD_PREL_LO19", + "R_AARCH64_MOVW_SABS_G0", + "R_AARCH64_MOVW_SABS_G1", + "R_AARCH64_MOVW_SABS_G2", + "R_AARCH64_MOVW_UABS_G0", + "R_AARCH64_MOVW_UABS_G0_NC", + "R_AARCH64_MOVW_UABS_G1", + "R_AARCH64_MOVW_UABS_G1_NC", + "R_AARCH64_MOVW_UABS_G2", + "R_AARCH64_MOVW_UABS_G2_NC", + "R_AARCH64_MOVW_UABS_G3", + "R_AARCH64_NONE", + "R_AARCH64_NULL", + "R_AARCH64_P32_ABS16", + "R_AARCH64_P32_ABS32", + "R_AARCH64_P32_ADD_ABS_LO12_NC", + "R_AARCH64_P32_ADR_GOT_PAGE", + "R_AARCH64_P32_ADR_PREL_LO21", + "R_AARCH64_P32_ADR_PREL_PG_HI21", + "R_AARCH64_P32_CALL26", + "R_AARCH64_P32_CONDBR19", + "R_AARCH64_P32_COPY", + "R_AARCH64_P32_GLOB_DAT", + "R_AARCH64_P32_GOT_LD_PREL19", + "R_AARCH64_P32_IRELATIVE", + "R_AARCH64_P32_JUMP26", + "R_AARCH64_P32_JUMP_SLOT", + "R_AARCH64_P32_LD32_GOT_LO12_NC", + "R_AARCH64_P32_LDST128_ABS_LO12_NC", + "R_AARCH64_P32_LDST16_ABS_LO12_NC", + "R_AARCH64_P32_LDST32_ABS_LO12_NC", + "R_AARCH64_P32_LDST64_ABS_LO12_NC", + "R_AARCH64_P32_LDST8_ABS_LO12_NC", + "R_AARCH64_P32_LD_PREL_LO19", + "R_AARCH64_P32_MOVW_SABS_G0", + "R_AARCH64_P32_MOVW_UABS_G0", + "R_AARCH64_P32_MOVW_UABS_G0_NC", + "R_AARCH64_P32_MOVW_UABS_G1", + "R_AARCH64_P32_PREL16", + "R_AARCH64_P32_PREL32", + "R_AARCH64_P32_RELATIVE", + "R_AARCH64_P32_TLSDESC", + "R_AARCH64_P32_TLSDESC_ADD_LO12_NC", + "R_AARCH64_P32_TLSDESC_ADR_PAGE21", + "R_AARCH64_P32_TLSDESC_ADR_PREL21", + "R_AARCH64_P32_TLSDESC_CALL", + "R_AARCH64_P32_TLSDESC_LD32_LO12_NC", + "R_AARCH64_P32_TLSDESC_LD_PREL19", + "R_AARCH64_P32_TLSGD_ADD_LO12_NC", + "R_AARCH64_P32_TLSGD_ADR_PAGE21", + "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", + "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", + "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", + "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", + "R_AARCH64_P32_TLS_DTPMOD", + "R_AARCH64_P32_TLS_DTPREL", + "R_AARCH64_P32_TLS_TPREL", + "R_AARCH64_P32_TSTBR14", + "R_AARCH64_PREL16", + "R_AARCH64_PREL32", + "R_AARCH64_PREL64", + "R_AARCH64_RELATIVE", + "R_AARCH64_TLSDESC", + "R_AARCH64_TLSDESC_ADD", + "R_AARCH64_TLSDESC_ADD_LO12_NC", + "R_AARCH64_TLSDESC_ADR_PAGE21", + "R_AARCH64_TLSDESC_ADR_PREL21", + "R_AARCH64_TLSDESC_CALL", + "R_AARCH64_TLSDESC_LD64_LO12_NC", + "R_AARCH64_TLSDESC_LDR", + "R_AARCH64_TLSDESC_LD_PREL19", + "R_AARCH64_TLSDESC_OFF_G0_NC", + "R_AARCH64_TLSDESC_OFF_G1", + "R_AARCH64_TLSGD_ADD_LO12_NC", + "R_AARCH64_TLSGD_ADR_PAGE21", + "R_AARCH64_TLSGD_ADR_PREL21", + "R_AARCH64_TLSGD_MOVW_G0_NC", + "R_AARCH64_TLSGD_MOVW_G1", + "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", + "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", + "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", + "R_AARCH64_TLSLD_ADR_PAGE21", + "R_AARCH64_TLSLD_ADR_PREL21", + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12", + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", + "R_AARCH64_TLSLE_ADD_TPREL_HI12", + "R_AARCH64_TLSLE_ADD_TPREL_LO12", + "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", + "R_AARCH64_TLSLE_LDST128_TPREL_LO12", + "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G0", + "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G1", + "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G2", + "R_AARCH64_TLS_DTPMOD64", + "R_AARCH64_TLS_DTPREL64", + "R_AARCH64_TLS_TPREL64", + "R_AARCH64_TSTBR14", + "R_ALPHA", + "R_ALPHA_BRADDR", + "R_ALPHA_COPY", + "R_ALPHA_GLOB_DAT", + "R_ALPHA_GPDISP", + "R_ALPHA_GPREL32", + "R_ALPHA_GPRELHIGH", + "R_ALPHA_GPRELLOW", + "R_ALPHA_GPVALUE", + "R_ALPHA_HINT", + "R_ALPHA_IMMED_BR_HI32", + "R_ALPHA_IMMED_GP_16", + "R_ALPHA_IMMED_GP_HI32", + "R_ALPHA_IMMED_LO32", + "R_ALPHA_IMMED_SCN_HI32", + "R_ALPHA_JMP_SLOT", + "R_ALPHA_LITERAL", + "R_ALPHA_LITUSE", + "R_ALPHA_NONE", + "R_ALPHA_OP_PRSHIFT", + "R_ALPHA_OP_PSUB", + "R_ALPHA_OP_PUSH", + "R_ALPHA_OP_STORE", + "R_ALPHA_REFLONG", + "R_ALPHA_REFQUAD", + "R_ALPHA_RELATIVE", + "R_ALPHA_SREL16", + "R_ALPHA_SREL32", + "R_ALPHA_SREL64", + "R_ARM", + "R_ARM_ABS12", + "R_ARM_ABS16", + "R_ARM_ABS32", + "R_ARM_ABS32_NOI", + "R_ARM_ABS8", + "R_ARM_ALU_PCREL_15_8", + "R_ARM_ALU_PCREL_23_15", + "R_ARM_ALU_PCREL_7_0", + "R_ARM_ALU_PC_G0", + "R_ARM_ALU_PC_G0_NC", + "R_ARM_ALU_PC_G1", + "R_ARM_ALU_PC_G1_NC", + "R_ARM_ALU_PC_G2", + "R_ARM_ALU_SBREL_19_12_NC", + "R_ARM_ALU_SBREL_27_20_CK", + "R_ARM_ALU_SB_G0", + "R_ARM_ALU_SB_G0_NC", + "R_ARM_ALU_SB_G1", + "R_ARM_ALU_SB_G1_NC", + "R_ARM_ALU_SB_G2", + "R_ARM_AMP_VCALL9", + "R_ARM_BASE_ABS", + "R_ARM_CALL", + "R_ARM_COPY", + "R_ARM_GLOB_DAT", + "R_ARM_GNU_VTENTRY", + "R_ARM_GNU_VTINHERIT", + "R_ARM_GOT32", + "R_ARM_GOTOFF", + "R_ARM_GOTOFF12", + "R_ARM_GOTPC", + "R_ARM_GOTRELAX", + "R_ARM_GOT_ABS", + "R_ARM_GOT_BREL12", + "R_ARM_GOT_PREL", + "R_ARM_IRELATIVE", + "R_ARM_JUMP24", + "R_ARM_JUMP_SLOT", + "R_ARM_LDC_PC_G0", + "R_ARM_LDC_PC_G1", + "R_ARM_LDC_PC_G2", + "R_ARM_LDC_SB_G0", + "R_ARM_LDC_SB_G1", + "R_ARM_LDC_SB_G2", + "R_ARM_LDRS_PC_G0", + "R_ARM_LDRS_PC_G1", + "R_ARM_LDRS_PC_G2", + "R_ARM_LDRS_SB_G0", + "R_ARM_LDRS_SB_G1", + "R_ARM_LDRS_SB_G2", + "R_ARM_LDR_PC_G1", + "R_ARM_LDR_PC_G2", + "R_ARM_LDR_SBREL_11_10_NC", + "R_ARM_LDR_SB_G0", + "R_ARM_LDR_SB_G1", + "R_ARM_LDR_SB_G2", + "R_ARM_ME_TOO", + "R_ARM_MOVT_ABS", + "R_ARM_MOVT_BREL", + "R_ARM_MOVT_PREL", + "R_ARM_MOVW_ABS_NC", + "R_ARM_MOVW_BREL", + "R_ARM_MOVW_BREL_NC", + "R_ARM_MOVW_PREL_NC", + "R_ARM_NONE", + "R_ARM_PC13", + "R_ARM_PC24", + "R_ARM_PLT32", + "R_ARM_PLT32_ABS", + "R_ARM_PREL31", + "R_ARM_PRIVATE_0", + "R_ARM_PRIVATE_1", + "R_ARM_PRIVATE_10", + "R_ARM_PRIVATE_11", + "R_ARM_PRIVATE_12", + "R_ARM_PRIVATE_13", + "R_ARM_PRIVATE_14", + "R_ARM_PRIVATE_15", + "R_ARM_PRIVATE_2", + "R_ARM_PRIVATE_3", + "R_ARM_PRIVATE_4", + "R_ARM_PRIVATE_5", + "R_ARM_PRIVATE_6", + "R_ARM_PRIVATE_7", + "R_ARM_PRIVATE_8", + "R_ARM_PRIVATE_9", + "R_ARM_RABS32", + "R_ARM_RBASE", + "R_ARM_REL32", + "R_ARM_REL32_NOI", + "R_ARM_RELATIVE", + "R_ARM_RPC24", + "R_ARM_RREL32", + "R_ARM_RSBREL32", + "R_ARM_RXPC25", + "R_ARM_SBREL31", + "R_ARM_SBREL32", + "R_ARM_SWI24", + "R_ARM_TARGET1", + "R_ARM_TARGET2", + "R_ARM_THM_ABS5", + "R_ARM_THM_ALU_ABS_G0_NC", + "R_ARM_THM_ALU_ABS_G1_NC", + "R_ARM_THM_ALU_ABS_G2_NC", + "R_ARM_THM_ALU_ABS_G3", + "R_ARM_THM_ALU_PREL_11_0", + "R_ARM_THM_GOT_BREL12", + "R_ARM_THM_JUMP11", + "R_ARM_THM_JUMP19", + "R_ARM_THM_JUMP24", + "R_ARM_THM_JUMP6", + "R_ARM_THM_JUMP8", + "R_ARM_THM_MOVT_ABS", + "R_ARM_THM_MOVT_BREL", + "R_ARM_THM_MOVT_PREL", + "R_ARM_THM_MOVW_ABS_NC", + "R_ARM_THM_MOVW_BREL", + "R_ARM_THM_MOVW_BREL_NC", + "R_ARM_THM_MOVW_PREL_NC", + "R_ARM_THM_PC12", + "R_ARM_THM_PC22", + "R_ARM_THM_PC8", + "R_ARM_THM_RPC22", + "R_ARM_THM_SWI8", + "R_ARM_THM_TLS_CALL", + "R_ARM_THM_TLS_DESCSEQ16", + "R_ARM_THM_TLS_DESCSEQ32", + "R_ARM_THM_XPC22", + "R_ARM_TLS_CALL", + "R_ARM_TLS_DESCSEQ", + "R_ARM_TLS_DTPMOD32", + "R_ARM_TLS_DTPOFF32", + "R_ARM_TLS_GD32", + "R_ARM_TLS_GOTDESC", + "R_ARM_TLS_IE12GP", + "R_ARM_TLS_IE32", + "R_ARM_TLS_LDM32", + "R_ARM_TLS_LDO12", + "R_ARM_TLS_LDO32", + "R_ARM_TLS_LE12", + "R_ARM_TLS_LE32", + "R_ARM_TLS_TPOFF32", + "R_ARM_V4BX", + "R_ARM_XPC25", + "R_INFO", + "R_INFO32", + "R_LARCH", + "R_LARCH_32", + "R_LARCH_32_PCREL", + "R_LARCH_64", + "R_LARCH_ABS64_HI12", + "R_LARCH_ABS64_LO20", + "R_LARCH_ABS_HI20", + "R_LARCH_ABS_LO12", + "R_LARCH_ADD16", + "R_LARCH_ADD24", + "R_LARCH_ADD32", + "R_LARCH_ADD64", + "R_LARCH_ADD8", + "R_LARCH_B16", + "R_LARCH_B21", + "R_LARCH_B26", + "R_LARCH_COPY", + "R_LARCH_GNU_VTENTRY", + "R_LARCH_GNU_VTINHERIT", + "R_LARCH_GOT64_HI12", + "R_LARCH_GOT64_LO20", + "R_LARCH_GOT64_PC_HI12", + "R_LARCH_GOT64_PC_LO20", + "R_LARCH_GOT_HI20", + "R_LARCH_GOT_LO12", + "R_LARCH_GOT_PC_HI20", + "R_LARCH_GOT_PC_LO12", + "R_LARCH_IRELATIVE", + "R_LARCH_JUMP_SLOT", + "R_LARCH_MARK_LA", + "R_LARCH_MARK_PCREL", + "R_LARCH_NONE", + "R_LARCH_PCALA64_HI12", + "R_LARCH_PCALA64_LO20", + "R_LARCH_PCALA_HI20", + "R_LARCH_PCALA_LO12", + "R_LARCH_RELATIVE", + "R_LARCH_RELAX", + "R_LARCH_SOP_ADD", + "R_LARCH_SOP_AND", + "R_LARCH_SOP_ASSERT", + "R_LARCH_SOP_IF_ELSE", + "R_LARCH_SOP_NOT", + "R_LARCH_SOP_POP_32_S_0_10_10_16_S2", + "R_LARCH_SOP_POP_32_S_0_5_10_16_S2", + "R_LARCH_SOP_POP_32_S_10_12", + "R_LARCH_SOP_POP_32_S_10_16", + "R_LARCH_SOP_POP_32_S_10_16_S2", + "R_LARCH_SOP_POP_32_S_10_5", + "R_LARCH_SOP_POP_32_S_5_20", + "R_LARCH_SOP_POP_32_U", + "R_LARCH_SOP_POP_32_U_10_12", + "R_LARCH_SOP_PUSH_ABSOLUTE", + "R_LARCH_SOP_PUSH_DUP", + "R_LARCH_SOP_PUSH_GPREL", + "R_LARCH_SOP_PUSH_PCREL", + "R_LARCH_SOP_PUSH_PLT_PCREL", + "R_LARCH_SOP_PUSH_TLS_GD", + "R_LARCH_SOP_PUSH_TLS_GOT", + "R_LARCH_SOP_PUSH_TLS_TPREL", + "R_LARCH_SOP_SL", + "R_LARCH_SOP_SR", + "R_LARCH_SOP_SUB", + "R_LARCH_SUB16", + "R_LARCH_SUB24", + "R_LARCH_SUB32", + "R_LARCH_SUB64", + "R_LARCH_SUB8", + "R_LARCH_TLS_DTPMOD32", + "R_LARCH_TLS_DTPMOD64", + "R_LARCH_TLS_DTPREL32", + "R_LARCH_TLS_DTPREL64", + "R_LARCH_TLS_GD_HI20", + "R_LARCH_TLS_GD_PC_HI20", + "R_LARCH_TLS_IE64_HI12", + "R_LARCH_TLS_IE64_LO20", + "R_LARCH_TLS_IE64_PC_HI12", + "R_LARCH_TLS_IE64_PC_LO20", + "R_LARCH_TLS_IE_HI20", + "R_LARCH_TLS_IE_LO12", + "R_LARCH_TLS_IE_PC_HI20", + "R_LARCH_TLS_IE_PC_LO12", + "R_LARCH_TLS_LD_HI20", + "R_LARCH_TLS_LD_PC_HI20", + "R_LARCH_TLS_LE64_HI12", + "R_LARCH_TLS_LE64_LO20", + "R_LARCH_TLS_LE_HI20", + "R_LARCH_TLS_LE_LO12", + "R_LARCH_TLS_TPREL32", + "R_LARCH_TLS_TPREL64", + "R_MIPS", + "R_MIPS_16", + "R_MIPS_26", + "R_MIPS_32", + "R_MIPS_64", + "R_MIPS_ADD_IMMEDIATE", + "R_MIPS_CALL16", + "R_MIPS_CALL_HI16", + "R_MIPS_CALL_LO16", + "R_MIPS_DELETE", + "R_MIPS_GOT16", + "R_MIPS_GOT_DISP", + "R_MIPS_GOT_HI16", + "R_MIPS_GOT_LO16", + "R_MIPS_GOT_OFST", + "R_MIPS_GOT_PAGE", + "R_MIPS_GPREL16", + "R_MIPS_GPREL32", + "R_MIPS_HI16", + "R_MIPS_HIGHER", + "R_MIPS_HIGHEST", + "R_MIPS_INSERT_A", + "R_MIPS_INSERT_B", + "R_MIPS_JALR", + "R_MIPS_LITERAL", + "R_MIPS_LO16", + "R_MIPS_NONE", + "R_MIPS_PC16", + "R_MIPS_PJUMP", + "R_MIPS_REL16", + "R_MIPS_REL32", + "R_MIPS_RELGOT", + "R_MIPS_SCN_DISP", + "R_MIPS_SHIFT5", + "R_MIPS_SHIFT6", + "R_MIPS_SUB", + "R_MIPS_TLS_DTPMOD32", + "R_MIPS_TLS_DTPMOD64", + "R_MIPS_TLS_DTPREL32", + "R_MIPS_TLS_DTPREL64", + "R_MIPS_TLS_DTPREL_HI16", + "R_MIPS_TLS_DTPREL_LO16", + "R_MIPS_TLS_GD", + "R_MIPS_TLS_GOTTPREL", + "R_MIPS_TLS_LDM", + "R_MIPS_TLS_TPREL32", + "R_MIPS_TLS_TPREL64", + "R_MIPS_TLS_TPREL_HI16", + "R_MIPS_TLS_TPREL_LO16", + "R_PPC", + "R_PPC64", + "R_PPC64_ADDR14", + "R_PPC64_ADDR14_BRNTAKEN", + "R_PPC64_ADDR14_BRTAKEN", + "R_PPC64_ADDR16", + "R_PPC64_ADDR16_DS", + "R_PPC64_ADDR16_HA", + "R_PPC64_ADDR16_HI", + "R_PPC64_ADDR16_HIGH", + "R_PPC64_ADDR16_HIGHA", + "R_PPC64_ADDR16_HIGHER", + "R_PPC64_ADDR16_HIGHER34", + "R_PPC64_ADDR16_HIGHERA", + "R_PPC64_ADDR16_HIGHERA34", + "R_PPC64_ADDR16_HIGHEST", + "R_PPC64_ADDR16_HIGHEST34", + "R_PPC64_ADDR16_HIGHESTA", + "R_PPC64_ADDR16_HIGHESTA34", + "R_PPC64_ADDR16_LO", + "R_PPC64_ADDR16_LO_DS", + "R_PPC64_ADDR24", + "R_PPC64_ADDR32", + "R_PPC64_ADDR64", + "R_PPC64_ADDR64_LOCAL", + "R_PPC64_COPY", + "R_PPC64_D28", + "R_PPC64_D34", + "R_PPC64_D34_HA30", + "R_PPC64_D34_HI30", + "R_PPC64_D34_LO", + "R_PPC64_DTPMOD64", + "R_PPC64_DTPREL16", + "R_PPC64_DTPREL16_DS", + "R_PPC64_DTPREL16_HA", + "R_PPC64_DTPREL16_HI", + "R_PPC64_DTPREL16_HIGH", + "R_PPC64_DTPREL16_HIGHA", + "R_PPC64_DTPREL16_HIGHER", + "R_PPC64_DTPREL16_HIGHERA", + "R_PPC64_DTPREL16_HIGHEST", + "R_PPC64_DTPREL16_HIGHESTA", + "R_PPC64_DTPREL16_LO", + "R_PPC64_DTPREL16_LO_DS", + "R_PPC64_DTPREL34", + "R_PPC64_DTPREL64", + "R_PPC64_ENTRY", + "R_PPC64_GLOB_DAT", + "R_PPC64_GNU_VTENTRY", + "R_PPC64_GNU_VTINHERIT", + "R_PPC64_GOT16", + "R_PPC64_GOT16_DS", + "R_PPC64_GOT16_HA", + "R_PPC64_GOT16_HI", + "R_PPC64_GOT16_LO", + "R_PPC64_GOT16_LO_DS", + "R_PPC64_GOT_DTPREL16_DS", + "R_PPC64_GOT_DTPREL16_HA", + "R_PPC64_GOT_DTPREL16_HI", + "R_PPC64_GOT_DTPREL16_LO_DS", + "R_PPC64_GOT_DTPREL_PCREL34", + "R_PPC64_GOT_PCREL34", + "R_PPC64_GOT_TLSGD16", + "R_PPC64_GOT_TLSGD16_HA", + "R_PPC64_GOT_TLSGD16_HI", + "R_PPC64_GOT_TLSGD16_LO", + "R_PPC64_GOT_TLSGD_PCREL34", + "R_PPC64_GOT_TLSLD16", + "R_PPC64_GOT_TLSLD16_HA", + "R_PPC64_GOT_TLSLD16_HI", + "R_PPC64_GOT_TLSLD16_LO", + "R_PPC64_GOT_TLSLD_PCREL34", + "R_PPC64_GOT_TPREL16_DS", + "R_PPC64_GOT_TPREL16_HA", + "R_PPC64_GOT_TPREL16_HI", + "R_PPC64_GOT_TPREL16_LO_DS", + "R_PPC64_GOT_TPREL_PCREL34", + "R_PPC64_IRELATIVE", + "R_PPC64_JMP_IREL", + "R_PPC64_JMP_SLOT", + "R_PPC64_NONE", + "R_PPC64_PCREL28", + "R_PPC64_PCREL34", + "R_PPC64_PCREL_OPT", + "R_PPC64_PLT16_HA", + "R_PPC64_PLT16_HI", + "R_PPC64_PLT16_LO", + "R_PPC64_PLT16_LO_DS", + "R_PPC64_PLT32", + "R_PPC64_PLT64", + "R_PPC64_PLTCALL", + "R_PPC64_PLTCALL_NOTOC", + "R_PPC64_PLTGOT16", + "R_PPC64_PLTGOT16_DS", + "R_PPC64_PLTGOT16_HA", + "R_PPC64_PLTGOT16_HI", + "R_PPC64_PLTGOT16_LO", + "R_PPC64_PLTGOT_LO_DS", + "R_PPC64_PLTREL32", + "R_PPC64_PLTREL64", + "R_PPC64_PLTSEQ", + "R_PPC64_PLTSEQ_NOTOC", + "R_PPC64_PLT_PCREL34", + "R_PPC64_PLT_PCREL34_NOTOC", + "R_PPC64_REL14", + "R_PPC64_REL14_BRNTAKEN", + "R_PPC64_REL14_BRTAKEN", + "R_PPC64_REL16", + "R_PPC64_REL16DX_HA", + "R_PPC64_REL16_HA", + "R_PPC64_REL16_HI", + "R_PPC64_REL16_HIGH", + "R_PPC64_REL16_HIGHA", + "R_PPC64_REL16_HIGHER", + "R_PPC64_REL16_HIGHER34", + "R_PPC64_REL16_HIGHERA", + "R_PPC64_REL16_HIGHERA34", + "R_PPC64_REL16_HIGHEST", + "R_PPC64_REL16_HIGHEST34", + "R_PPC64_REL16_HIGHESTA", + "R_PPC64_REL16_HIGHESTA34", + "R_PPC64_REL16_LO", + "R_PPC64_REL24", + "R_PPC64_REL24_NOTOC", + "R_PPC64_REL30", + "R_PPC64_REL32", + "R_PPC64_REL64", + "R_PPC64_RELATIVE", + "R_PPC64_SECTOFF", + "R_PPC64_SECTOFF_DS", + "R_PPC64_SECTOFF_HA", + "R_PPC64_SECTOFF_HI", + "R_PPC64_SECTOFF_LO", + "R_PPC64_SECTOFF_LO_DS", + "R_PPC64_TLS", + "R_PPC64_TLSGD", + "R_PPC64_TLSLD", + "R_PPC64_TOC", + "R_PPC64_TOC16", + "R_PPC64_TOC16_DS", + "R_PPC64_TOC16_HA", + "R_PPC64_TOC16_HI", + "R_PPC64_TOC16_LO", + "R_PPC64_TOC16_LO_DS", + "R_PPC64_TOCSAVE", + "R_PPC64_TPREL16", + "R_PPC64_TPREL16_DS", + "R_PPC64_TPREL16_HA", + "R_PPC64_TPREL16_HI", + "R_PPC64_TPREL16_HIGH", + "R_PPC64_TPREL16_HIGHA", + "R_PPC64_TPREL16_HIGHER", + "R_PPC64_TPREL16_HIGHERA", + "R_PPC64_TPREL16_HIGHEST", + "R_PPC64_TPREL16_HIGHESTA", + "R_PPC64_TPREL16_LO", + "R_PPC64_TPREL16_LO_DS", + "R_PPC64_TPREL34", + "R_PPC64_TPREL64", + "R_PPC64_UADDR16", + "R_PPC64_UADDR32", + "R_PPC64_UADDR64", + "R_PPC_ADDR14", + "R_PPC_ADDR14_BRNTAKEN", + "R_PPC_ADDR14_BRTAKEN", + "R_PPC_ADDR16", + "R_PPC_ADDR16_HA", + "R_PPC_ADDR16_HI", + "R_PPC_ADDR16_LO", + "R_PPC_ADDR24", + "R_PPC_ADDR32", + "R_PPC_COPY", + "R_PPC_DTPMOD32", + "R_PPC_DTPREL16", + "R_PPC_DTPREL16_HA", + "R_PPC_DTPREL16_HI", + "R_PPC_DTPREL16_LO", + "R_PPC_DTPREL32", + "R_PPC_EMB_BIT_FLD", + "R_PPC_EMB_MRKREF", + "R_PPC_EMB_NADDR16", + "R_PPC_EMB_NADDR16_HA", + "R_PPC_EMB_NADDR16_HI", + "R_PPC_EMB_NADDR16_LO", + "R_PPC_EMB_NADDR32", + "R_PPC_EMB_RELSDA", + "R_PPC_EMB_RELSEC16", + "R_PPC_EMB_RELST_HA", + "R_PPC_EMB_RELST_HI", + "R_PPC_EMB_RELST_LO", + "R_PPC_EMB_SDA21", + "R_PPC_EMB_SDA2I16", + "R_PPC_EMB_SDA2REL", + "R_PPC_EMB_SDAI16", + "R_PPC_GLOB_DAT", + "R_PPC_GOT16", + "R_PPC_GOT16_HA", + "R_PPC_GOT16_HI", + "R_PPC_GOT16_LO", + "R_PPC_GOT_TLSGD16", + "R_PPC_GOT_TLSGD16_HA", + "R_PPC_GOT_TLSGD16_HI", + "R_PPC_GOT_TLSGD16_LO", + "R_PPC_GOT_TLSLD16", + "R_PPC_GOT_TLSLD16_HA", + "R_PPC_GOT_TLSLD16_HI", + "R_PPC_GOT_TLSLD16_LO", + "R_PPC_GOT_TPREL16", + "R_PPC_GOT_TPREL16_HA", + "R_PPC_GOT_TPREL16_HI", + "R_PPC_GOT_TPREL16_LO", + "R_PPC_JMP_SLOT", + "R_PPC_LOCAL24PC", + "R_PPC_NONE", + "R_PPC_PLT16_HA", + "R_PPC_PLT16_HI", + "R_PPC_PLT16_LO", + "R_PPC_PLT32", + "R_PPC_PLTREL24", + "R_PPC_PLTREL32", + "R_PPC_REL14", + "R_PPC_REL14_BRNTAKEN", + "R_PPC_REL14_BRTAKEN", + "R_PPC_REL24", + "R_PPC_REL32", + "R_PPC_RELATIVE", + "R_PPC_SDAREL16", + "R_PPC_SECTOFF", + "R_PPC_SECTOFF_HA", + "R_PPC_SECTOFF_HI", + "R_PPC_SECTOFF_LO", + "R_PPC_TLS", + "R_PPC_TPREL16", + "R_PPC_TPREL16_HA", + "R_PPC_TPREL16_HI", + "R_PPC_TPREL16_LO", + "R_PPC_TPREL32", + "R_PPC_UADDR16", + "R_PPC_UADDR32", + "R_RISCV", + "R_RISCV_32", + "R_RISCV_32_PCREL", + "R_RISCV_64", + "R_RISCV_ADD16", + "R_RISCV_ADD32", + "R_RISCV_ADD64", + "R_RISCV_ADD8", + "R_RISCV_ALIGN", + "R_RISCV_BRANCH", + "R_RISCV_CALL", + "R_RISCV_CALL_PLT", + "R_RISCV_COPY", + "R_RISCV_GNU_VTENTRY", + "R_RISCV_GNU_VTINHERIT", + "R_RISCV_GOT_HI20", + "R_RISCV_GPREL_I", + "R_RISCV_GPREL_S", + "R_RISCV_HI20", + "R_RISCV_JAL", + "R_RISCV_JUMP_SLOT", + "R_RISCV_LO12_I", + "R_RISCV_LO12_S", + "R_RISCV_NONE", + "R_RISCV_PCREL_HI20", + "R_RISCV_PCREL_LO12_I", + "R_RISCV_PCREL_LO12_S", + "R_RISCV_RELATIVE", + "R_RISCV_RELAX", + "R_RISCV_RVC_BRANCH", + "R_RISCV_RVC_JUMP", + "R_RISCV_RVC_LUI", + "R_RISCV_SET16", + "R_RISCV_SET32", + "R_RISCV_SET6", + "R_RISCV_SET8", + "R_RISCV_SUB16", + "R_RISCV_SUB32", + "R_RISCV_SUB6", + "R_RISCV_SUB64", + "R_RISCV_SUB8", + "R_RISCV_TLS_DTPMOD32", + "R_RISCV_TLS_DTPMOD64", + "R_RISCV_TLS_DTPREL32", + "R_RISCV_TLS_DTPREL64", + "R_RISCV_TLS_GD_HI20", + "R_RISCV_TLS_GOT_HI20", + "R_RISCV_TLS_TPREL32", + "R_RISCV_TLS_TPREL64", + "R_RISCV_TPREL_ADD", + "R_RISCV_TPREL_HI20", + "R_RISCV_TPREL_I", + "R_RISCV_TPREL_LO12_I", + "R_RISCV_TPREL_LO12_S", + "R_RISCV_TPREL_S", + "R_SPARC", + "R_SPARC_10", + "R_SPARC_11", + "R_SPARC_13", + "R_SPARC_16", + "R_SPARC_22", + "R_SPARC_32", + "R_SPARC_5", + "R_SPARC_6", + "R_SPARC_64", + "R_SPARC_7", + "R_SPARC_8", + "R_SPARC_COPY", + "R_SPARC_DISP16", + "R_SPARC_DISP32", + "R_SPARC_DISP64", + "R_SPARC_DISP8", + "R_SPARC_GLOB_DAT", + "R_SPARC_GLOB_JMP", + "R_SPARC_GOT10", + "R_SPARC_GOT13", + "R_SPARC_GOT22", + "R_SPARC_H44", + "R_SPARC_HH22", + "R_SPARC_HI22", + "R_SPARC_HIPLT22", + "R_SPARC_HIX22", + "R_SPARC_HM10", + "R_SPARC_JMP_SLOT", + "R_SPARC_L44", + "R_SPARC_LM22", + "R_SPARC_LO10", + "R_SPARC_LOPLT10", + "R_SPARC_LOX10", + "R_SPARC_M44", + "R_SPARC_NONE", + "R_SPARC_OLO10", + "R_SPARC_PC10", + "R_SPARC_PC22", + "R_SPARC_PCPLT10", + "R_SPARC_PCPLT22", + "R_SPARC_PCPLT32", + "R_SPARC_PC_HH22", + "R_SPARC_PC_HM10", + "R_SPARC_PC_LM22", + "R_SPARC_PLT32", + "R_SPARC_PLT64", + "R_SPARC_REGISTER", + "R_SPARC_RELATIVE", + "R_SPARC_UA16", + "R_SPARC_UA32", + "R_SPARC_UA64", + "R_SPARC_WDISP16", + "R_SPARC_WDISP19", + "R_SPARC_WDISP22", + "R_SPARC_WDISP30", + "R_SPARC_WPLT30", + "R_SYM32", + "R_SYM64", + "R_TYPE32", + "R_TYPE64", + "R_X86_64", + "R_X86_64_16", + "R_X86_64_32", + "R_X86_64_32S", + "R_X86_64_64", + "R_X86_64_8", + "R_X86_64_COPY", + "R_X86_64_DTPMOD64", + "R_X86_64_DTPOFF32", + "R_X86_64_DTPOFF64", + "R_X86_64_GLOB_DAT", + "R_X86_64_GOT32", + "R_X86_64_GOT64", + "R_X86_64_GOTOFF64", + "R_X86_64_GOTPC32", + "R_X86_64_GOTPC32_TLSDESC", + "R_X86_64_GOTPC64", + "R_X86_64_GOTPCREL", + "R_X86_64_GOTPCREL64", + "R_X86_64_GOTPCRELX", + "R_X86_64_GOTPLT64", + "R_X86_64_GOTTPOFF", + "R_X86_64_IRELATIVE", + "R_X86_64_JMP_SLOT", + "R_X86_64_NONE", + "R_X86_64_PC16", + "R_X86_64_PC32", + "R_X86_64_PC32_BND", + "R_X86_64_PC64", + "R_X86_64_PC8", + "R_X86_64_PLT32", + "R_X86_64_PLT32_BND", + "R_X86_64_PLTOFF64", + "R_X86_64_RELATIVE", + "R_X86_64_RELATIVE64", + "R_X86_64_REX_GOTPCRELX", + "R_X86_64_SIZE32", + "R_X86_64_SIZE64", + "R_X86_64_TLSDESC", + "R_X86_64_TLSDESC_CALL", + "R_X86_64_TLSGD", + "R_X86_64_TLSLD", + "R_X86_64_TPOFF32", + "R_X86_64_TPOFF64", + "Rel32", + "Rel64", + "Rela32", + "Rela64", + "SHF_ALLOC", + "SHF_COMPRESSED", + "SHF_EXECINSTR", + "SHF_GROUP", + "SHF_INFO_LINK", + "SHF_LINK_ORDER", + "SHF_MASKOS", + "SHF_MASKPROC", + "SHF_MERGE", + "SHF_OS_NONCONFORMING", + "SHF_STRINGS", + "SHF_TLS", + "SHF_WRITE", + "SHN_ABS", + "SHN_COMMON", + "SHN_HIOS", + "SHN_HIPROC", + "SHN_HIRESERVE", + "SHN_LOOS", + "SHN_LOPROC", + "SHN_LORESERVE", + "SHN_UNDEF", + "SHN_XINDEX", + "SHT_DYNAMIC", + "SHT_DYNSYM", + "SHT_FINI_ARRAY", + "SHT_GNU_ATTRIBUTES", + "SHT_GNU_HASH", + "SHT_GNU_LIBLIST", + "SHT_GNU_VERDEF", + "SHT_GNU_VERNEED", + "SHT_GNU_VERSYM", + "SHT_GROUP", + "SHT_HASH", + "SHT_HIOS", + "SHT_HIPROC", + "SHT_HIUSER", + "SHT_INIT_ARRAY", + "SHT_LOOS", + "SHT_LOPROC", + "SHT_LOUSER", + "SHT_MIPS_ABIFLAGS", + "SHT_NOBITS", + "SHT_NOTE", + "SHT_NULL", + "SHT_PREINIT_ARRAY", + "SHT_PROGBITS", + "SHT_REL", + "SHT_RELA", + "SHT_SHLIB", + "SHT_STRTAB", + "SHT_SYMTAB", + "SHT_SYMTAB_SHNDX", + "STB_GLOBAL", + "STB_HIOS", + "STB_HIPROC", + "STB_LOCAL", + "STB_LOOS", + "STB_LOPROC", + "STB_WEAK", + "STT_COMMON", + "STT_FILE", + "STT_FUNC", + "STT_HIOS", + "STT_HIPROC", + "STT_LOOS", + "STT_LOPROC", + "STT_NOTYPE", + "STT_OBJECT", + "STT_SECTION", + "STT_TLS", + "STV_DEFAULT", + "STV_HIDDEN", + "STV_INTERNAL", + "STV_PROTECTED", + "ST_BIND", + "ST_INFO", + "ST_TYPE", + "ST_VISIBILITY", + "Section", + "Section32", + "Section64", + "SectionFlag", + "SectionHeader", + "SectionIndex", + "SectionType", + "Sym32", + "Sym32Size", + "Sym64", + "Sym64Size", + "SymBind", + "SymType", + "SymVis", + "Symbol", + "Type", + "Version", + }, + "debug/gosym": { + "DecodingError", + "Func", + "LineTable", + "NewLineTable", + "NewTable", + "Obj", + "Sym", + "Table", + "UnknownFileError", + "UnknownLineError", + }, + "debug/macho": { + "ARM64_RELOC_ADDEND", + "ARM64_RELOC_BRANCH26", + "ARM64_RELOC_GOT_LOAD_PAGE21", + "ARM64_RELOC_GOT_LOAD_PAGEOFF12", + "ARM64_RELOC_PAGE21", + "ARM64_RELOC_PAGEOFF12", + "ARM64_RELOC_POINTER_TO_GOT", + "ARM64_RELOC_SUBTRACTOR", + "ARM64_RELOC_TLVP_LOAD_PAGE21", + "ARM64_RELOC_TLVP_LOAD_PAGEOFF12", + "ARM64_RELOC_UNSIGNED", + "ARM_RELOC_BR24", + "ARM_RELOC_HALF", + "ARM_RELOC_HALF_SECTDIFF", + "ARM_RELOC_LOCAL_SECTDIFF", + "ARM_RELOC_PAIR", + "ARM_RELOC_PB_LA_PTR", + "ARM_RELOC_SECTDIFF", + "ARM_RELOC_VANILLA", + "ARM_THUMB_32BIT_BRANCH", + "ARM_THUMB_RELOC_BR22", + "Cpu", + "Cpu386", + "CpuAmd64", + "CpuArm", + "CpuArm64", + "CpuPpc", + "CpuPpc64", + "Dylib", + "DylibCmd", + "Dysymtab", + "DysymtabCmd", + "ErrNotFat", + "FatArch", + "FatArchHeader", + "FatFile", + "File", + "FileHeader", + "FlagAllModsBound", + "FlagAllowStackExecution", + "FlagAppExtensionSafe", + "FlagBindAtLoad", + "FlagBindsToWeak", + "FlagCanonical", + "FlagDeadStrippableDylib", + "FlagDyldLink", + "FlagForceFlat", + "FlagHasTLVDescriptors", + "FlagIncrLink", + "FlagLazyInit", + "FlagNoFixPrebinding", + "FlagNoHeapExecution", + "FlagNoMultiDefs", + "FlagNoReexportedDylibs", + "FlagNoUndefs", + "FlagPIE", + "FlagPrebindable", + "FlagPrebound", + "FlagRootSafe", + "FlagSetuidSafe", + "FlagSplitSegs", + "FlagSubsectionsViaSymbols", + "FlagTwoLevel", + "FlagWeakDefines", + "FormatError", + "GENERIC_RELOC_LOCAL_SECTDIFF", + "GENERIC_RELOC_PAIR", + "GENERIC_RELOC_PB_LA_PTR", + "GENERIC_RELOC_SECTDIFF", + "GENERIC_RELOC_TLV", + "GENERIC_RELOC_VANILLA", + "Load", + "LoadBytes", + "LoadCmd", + "LoadCmdDylib", + "LoadCmdDylinker", + "LoadCmdDysymtab", + "LoadCmdRpath", + "LoadCmdSegment", + "LoadCmdSegment64", + "LoadCmdSymtab", + "LoadCmdThread", + "LoadCmdUnixThread", + "Magic32", + "Magic64", + "MagicFat", + "NewFatFile", + "NewFile", + "Nlist32", + "Nlist64", + "Open", + "OpenFat", + "Regs386", + "RegsAMD64", + "Reloc", + "RelocTypeARM", + "RelocTypeARM64", + "RelocTypeGeneric", + "RelocTypeX86_64", + "Rpath", + "RpathCmd", + "Section", + "Section32", + "Section64", + "SectionHeader", + "Segment", + "Segment32", + "Segment64", + "SegmentHeader", + "Symbol", + "Symtab", + "SymtabCmd", + "Thread", + "Type", + "TypeBundle", + "TypeDylib", + "TypeExec", + "TypeObj", + "X86_64_RELOC_BRANCH", + "X86_64_RELOC_GOT", + "X86_64_RELOC_GOT_LOAD", + "X86_64_RELOC_SIGNED", + "X86_64_RELOC_SIGNED_1", + "X86_64_RELOC_SIGNED_2", + "X86_64_RELOC_SIGNED_4", + "X86_64_RELOC_SUBTRACTOR", + "X86_64_RELOC_TLV", + "X86_64_RELOC_UNSIGNED", + }, + "debug/pe": { + "COFFSymbol", + "COFFSymbolAuxFormat5", + "COFFSymbolSize", + "DataDirectory", + "File", + "FileHeader", + "FormatError", + "IMAGE_COMDAT_SELECT_ANY", + "IMAGE_COMDAT_SELECT_ASSOCIATIVE", + "IMAGE_COMDAT_SELECT_EXACT_MATCH", + "IMAGE_COMDAT_SELECT_LARGEST", + "IMAGE_COMDAT_SELECT_NODUPLICATES", + "IMAGE_COMDAT_SELECT_SAME_SIZE", + "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", + "IMAGE_DIRECTORY_ENTRY_BASERELOC", + "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", + "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", + "IMAGE_DIRECTORY_ENTRY_DEBUG", + "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", + "IMAGE_DIRECTORY_ENTRY_EXCEPTION", + "IMAGE_DIRECTORY_ENTRY_EXPORT", + "IMAGE_DIRECTORY_ENTRY_GLOBALPTR", + "IMAGE_DIRECTORY_ENTRY_IAT", + "IMAGE_DIRECTORY_ENTRY_IMPORT", + "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", + "IMAGE_DIRECTORY_ENTRY_RESOURCE", + "IMAGE_DIRECTORY_ENTRY_SECURITY", + "IMAGE_DIRECTORY_ENTRY_TLS", + "IMAGE_DLLCHARACTERISTICS_APPCONTAINER", + "IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", + "IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", + "IMAGE_DLLCHARACTERISTICS_GUARD_CF", + "IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", + "IMAGE_DLLCHARACTERISTICS_NO_BIND", + "IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", + "IMAGE_DLLCHARACTERISTICS_NO_SEH", + "IMAGE_DLLCHARACTERISTICS_NX_COMPAT", + "IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", + "IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", + "IMAGE_FILE_32BIT_MACHINE", + "IMAGE_FILE_AGGRESIVE_WS_TRIM", + "IMAGE_FILE_BYTES_REVERSED_HI", + "IMAGE_FILE_BYTES_REVERSED_LO", + "IMAGE_FILE_DEBUG_STRIPPED", + "IMAGE_FILE_DLL", + "IMAGE_FILE_EXECUTABLE_IMAGE", + "IMAGE_FILE_LARGE_ADDRESS_AWARE", + "IMAGE_FILE_LINE_NUMS_STRIPPED", + "IMAGE_FILE_LOCAL_SYMS_STRIPPED", + "IMAGE_FILE_MACHINE_AM33", + "IMAGE_FILE_MACHINE_AMD64", + "IMAGE_FILE_MACHINE_ARM", + "IMAGE_FILE_MACHINE_ARM64", + "IMAGE_FILE_MACHINE_ARMNT", + "IMAGE_FILE_MACHINE_EBC", + "IMAGE_FILE_MACHINE_I386", + "IMAGE_FILE_MACHINE_IA64", + "IMAGE_FILE_MACHINE_LOONGARCH32", + "IMAGE_FILE_MACHINE_LOONGARCH64", + "IMAGE_FILE_MACHINE_M32R", + "IMAGE_FILE_MACHINE_MIPS16", + "IMAGE_FILE_MACHINE_MIPSFPU", + "IMAGE_FILE_MACHINE_MIPSFPU16", + "IMAGE_FILE_MACHINE_POWERPC", + "IMAGE_FILE_MACHINE_POWERPCFP", + "IMAGE_FILE_MACHINE_R4000", + "IMAGE_FILE_MACHINE_RISCV128", + "IMAGE_FILE_MACHINE_RISCV32", + "IMAGE_FILE_MACHINE_RISCV64", + "IMAGE_FILE_MACHINE_SH3", + "IMAGE_FILE_MACHINE_SH3DSP", + "IMAGE_FILE_MACHINE_SH4", + "IMAGE_FILE_MACHINE_SH5", + "IMAGE_FILE_MACHINE_THUMB", + "IMAGE_FILE_MACHINE_UNKNOWN", + "IMAGE_FILE_MACHINE_WCEMIPSV2", + "IMAGE_FILE_NET_RUN_FROM_SWAP", + "IMAGE_FILE_RELOCS_STRIPPED", + "IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", + "IMAGE_FILE_SYSTEM", + "IMAGE_FILE_UP_SYSTEM_ONLY", + "IMAGE_SCN_CNT_CODE", + "IMAGE_SCN_CNT_INITIALIZED_DATA", + "IMAGE_SCN_CNT_UNINITIALIZED_DATA", + "IMAGE_SCN_LNK_COMDAT", + "IMAGE_SCN_MEM_DISCARDABLE", + "IMAGE_SCN_MEM_EXECUTE", + "IMAGE_SCN_MEM_READ", + "IMAGE_SCN_MEM_WRITE", + "IMAGE_SUBSYSTEM_EFI_APPLICATION", + "IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", + "IMAGE_SUBSYSTEM_EFI_ROM", + "IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", + "IMAGE_SUBSYSTEM_NATIVE", + "IMAGE_SUBSYSTEM_NATIVE_WINDOWS", + "IMAGE_SUBSYSTEM_OS2_CUI", + "IMAGE_SUBSYSTEM_POSIX_CUI", + "IMAGE_SUBSYSTEM_UNKNOWN", + "IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", + "IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", + "IMAGE_SUBSYSTEM_WINDOWS_CUI", + "IMAGE_SUBSYSTEM_WINDOWS_GUI", + "IMAGE_SUBSYSTEM_XBOX", + "ImportDirectory", + "NewFile", + "Open", + "OptionalHeader32", + "OptionalHeader64", + "Reloc", + "Section", + "SectionHeader", + "SectionHeader32", + "StringTable", + "Symbol", + }, + "debug/plan9obj": { + "ErrNoSymbols", + "File", + "FileHeader", + "Magic386", + "Magic64", + "MagicAMD64", + "MagicARM", + "NewFile", + "Open", + "Section", + "SectionHeader", + "Sym", + }, + "embed": { + "FS", + }, + "encoding": { + "BinaryMarshaler", + "BinaryUnmarshaler", + "TextMarshaler", + "TextUnmarshaler", + }, + "encoding/ascii85": { + "CorruptInputError", + "Decode", + "Encode", + "MaxEncodedLen", + "NewDecoder", + "NewEncoder", + }, + "encoding/asn1": { + "BitString", + "ClassApplication", + "ClassContextSpecific", + "ClassPrivate", + "ClassUniversal", + "Enumerated", + "Flag", + "Marshal", + "MarshalWithParams", + "NullBytes", + "NullRawValue", + "ObjectIdentifier", + "RawContent", + "RawValue", + "StructuralError", + "SyntaxError", + "TagBMPString", + "TagBitString", + "TagBoolean", + "TagEnum", + "TagGeneralString", + "TagGeneralizedTime", + "TagIA5String", + "TagInteger", + "TagNull", + "TagNumericString", + "TagOID", + "TagOctetString", + "TagPrintableString", + "TagSequence", + "TagSet", + "TagT61String", + "TagUTCTime", + "TagUTF8String", + "Unmarshal", + "UnmarshalWithParams", + }, + "encoding/base32": { + "CorruptInputError", + "Encoding", + "HexEncoding", + "NewDecoder", + "NewEncoder", + "NewEncoding", + "NoPadding", + "StdEncoding", + "StdPadding", + }, + "encoding/base64": { + "CorruptInputError", + "Encoding", + "NewDecoder", + "NewEncoder", + "NewEncoding", + "NoPadding", + "RawStdEncoding", + "RawURLEncoding", + "StdEncoding", + "StdPadding", + "URLEncoding", + }, + "encoding/binary": { + "AppendByteOrder", + "AppendUvarint", + "AppendVarint", + "BigEndian", + "ByteOrder", + "LittleEndian", + "MaxVarintLen16", + "MaxVarintLen32", + "MaxVarintLen64", + "PutUvarint", + "PutVarint", + "Read", + "ReadUvarint", + "ReadVarint", + "Size", + "Uvarint", + "Varint", + "Write", + }, + "encoding/csv": { + "ErrBareQuote", + "ErrFieldCount", + "ErrQuote", + "ErrTrailingComma", + "NewReader", + "NewWriter", + "ParseError", + "Reader", + "Writer", + }, + "encoding/gob": { + "CommonType", + "Decoder", + "Encoder", + "GobDecoder", + "GobEncoder", + "NewDecoder", + "NewEncoder", + "Register", + "RegisterName", + }, + "encoding/hex": { + "Decode", + "DecodeString", + "DecodedLen", + "Dump", + "Dumper", + "Encode", + "EncodeToString", + "EncodedLen", + "ErrLength", + "InvalidByteError", + "NewDecoder", + "NewEncoder", + }, + "encoding/json": { + "Compact", + "Decoder", + "Delim", + "Encoder", + "HTMLEscape", + "Indent", + "InvalidUTF8Error", + "InvalidUnmarshalError", + "Marshal", + "MarshalIndent", + "Marshaler", + "MarshalerError", + "NewDecoder", + "NewEncoder", + "Number", + "RawMessage", + "SyntaxError", + "Token", + "Unmarshal", + "UnmarshalFieldError", + "UnmarshalTypeError", + "Unmarshaler", + "UnsupportedTypeError", + "UnsupportedValueError", + "Valid", + }, + "encoding/pem": { + "Block", + "Decode", + "Encode", + "EncodeToMemory", + }, + "encoding/xml": { + "Attr", + "CharData", + "Comment", + "CopyToken", + "Decoder", + "Directive", + "Encoder", + "EndElement", + "Escape", + "EscapeText", + "HTMLAutoClose", + "HTMLEntity", + "Header", + "Marshal", + "MarshalIndent", + "Marshaler", + "MarshalerAttr", + "Name", + "NewDecoder", + "NewEncoder", + "NewTokenDecoder", + "ProcInst", + "StartElement", + "SyntaxError", + "TagPathError", + "Token", + "TokenReader", + "Unmarshal", + "UnmarshalError", + "Unmarshaler", + "UnmarshalerAttr", + "UnsupportedTypeError", + }, + "errors": { + "As", + "Is", + "Join", + "New", + "Unwrap", + }, + "expvar": { + "Do", + "Float", + "Func", + "Get", + "Handler", + "Int", + "KeyValue", + "Map", + "NewFloat", + "NewInt", + "NewMap", + "NewString", + "Publish", + "String", + "Var", + }, + "flag": { + "Arg", + "Args", + "Bool", + "BoolVar", + "CommandLine", + "ContinueOnError", + "Duration", + "DurationVar", + "ErrHelp", + "ErrorHandling", + "ExitOnError", + "Flag", + "FlagSet", + "Float64", + "Float64Var", + "Func", + "Getter", + "Int", + "Int64", + "Int64Var", + "IntVar", + "Lookup", + "NArg", + "NFlag", + "NewFlagSet", + "PanicOnError", + "Parse", + "Parsed", + "PrintDefaults", + "Set", + "String", + "StringVar", + "TextVar", + "Uint", + "Uint64", + "Uint64Var", + "UintVar", + "UnquoteUsage", + "Usage", + "Value", + "Var", + "Visit", + "VisitAll", + }, + "fmt": { + "Append", + "Appendf", + "Appendln", + "Errorf", + "FormatString", + "Formatter", + "Fprint", + "Fprintf", + "Fprintln", + "Fscan", + "Fscanf", + "Fscanln", + "GoStringer", + "Print", + "Printf", + "Println", + "Scan", + "ScanState", + "Scanf", + "Scanln", + "Scanner", + "Sprint", + "Sprintf", + "Sprintln", + "Sscan", + "Sscanf", + "Sscanln", + "State", + "Stringer", + }, + "go/ast": { + "ArrayType", + "AssignStmt", + "Bad", + "BadDecl", + "BadExpr", + "BadStmt", + "BasicLit", + "BinaryExpr", + "BlockStmt", + "BranchStmt", + "CallExpr", + "CaseClause", + "ChanDir", + "ChanType", + "CommClause", + "Comment", + "CommentGroup", + "CommentMap", + "CompositeLit", + "Con", + "Decl", + "DeclStmt", + "DeferStmt", + "Ellipsis", + "EmptyStmt", + "Expr", + "ExprStmt", + "Field", + "FieldFilter", + "FieldList", + "File", + "FileExports", + "Filter", + "FilterDecl", + "FilterFile", + "FilterFuncDuplicates", + "FilterImportDuplicates", + "FilterPackage", + "FilterUnassociatedComments", + "ForStmt", + "Fprint", + "Fun", + "FuncDecl", + "FuncLit", + "FuncType", + "GenDecl", + "GoStmt", + "Ident", + "IfStmt", + "ImportSpec", + "Importer", + "IncDecStmt", + "IndexExpr", + "IndexListExpr", + "Inspect", + "InterfaceType", + "IsExported", + "KeyValueExpr", + "LabeledStmt", + "Lbl", + "MapType", + "MergeMode", + "MergePackageFiles", + "NewCommentMap", + "NewIdent", + "NewObj", + "NewPackage", + "NewScope", + "Node", + "NotNilFilter", + "ObjKind", + "Object", + "Package", + "PackageExports", + "ParenExpr", + "Pkg", + "Print", + "RECV", + "RangeStmt", + "ReturnStmt", + "SEND", + "Scope", + "SelectStmt", + "SelectorExpr", + "SendStmt", + "SliceExpr", + "SortImports", + "Spec", + "StarExpr", + "Stmt", + "StructType", + "SwitchStmt", + "Typ", + "TypeAssertExpr", + "TypeSpec", + "TypeSwitchStmt", + "UnaryExpr", + "ValueSpec", + "Var", + "Visitor", + "Walk", + }, + "go/build": { + "AllowBinary", + "ArchChar", + "Context", + "Default", + "FindOnly", + "IgnoreVendor", + "Import", + "ImportComment", + "ImportDir", + "ImportMode", + "IsLocalImport", + "MultiplePackageError", + "NoGoError", + "Package", + "ToolDir", + }, + "go/build/constraint": { + "AndExpr", + "Expr", + "IsGoBuild", + "IsPlusBuild", + "NotExpr", + "OrExpr", + "Parse", + "PlusBuildLines", + "SyntaxError", + "TagExpr", + }, + "go/constant": { + "BinaryOp", + "BitLen", + "Bool", + "BoolVal", + "Bytes", + "Compare", + "Complex", + "Denom", + "Float", + "Float32Val", + "Float64Val", + "Imag", + "Int", + "Int64Val", + "Kind", + "Make", + "MakeBool", + "MakeFloat64", + "MakeFromBytes", + "MakeFromLiteral", + "MakeImag", + "MakeInt64", + "MakeString", + "MakeUint64", + "MakeUnknown", + "Num", + "Real", + "Shift", + "Sign", + "String", + "StringVal", + "ToComplex", + "ToFloat", + "ToInt", + "Uint64Val", + "UnaryOp", + "Unknown", + "Val", + "Value", + }, + "go/doc": { + "AllDecls", + "AllMethods", + "Example", + "Examples", + "Filter", + "Func", + "IllegalPrefixes", + "IsPredeclared", + "Mode", + "New", + "NewFromFiles", + "Note", + "Package", + "PreserveAST", + "Synopsis", + "ToHTML", + "ToText", + "Type", + "Value", + }, + "go/doc/comment": { + "Block", + "Code", + "DefaultLookupPackage", + "Doc", + "DocLink", + "Heading", + "Italic", + "Link", + "LinkDef", + "List", + "ListItem", + "Paragraph", + "Parser", + "Plain", + "Printer", + "Text", + }, + "go/format": { + "Node", + "Source", + }, + "go/importer": { + "Default", + "For", + "ForCompiler", + "Lookup", + }, + "go/parser": { + "AllErrors", + "DeclarationErrors", + "ImportsOnly", + "Mode", + "PackageClauseOnly", + "ParseComments", + "ParseDir", + "ParseExpr", + "ParseExprFrom", + "ParseFile", + "SkipObjectResolution", + "SpuriousErrors", + "Trace", + }, + "go/printer": { + "CommentedNode", + "Config", + "Fprint", + "Mode", + "RawFormat", + "SourcePos", + "TabIndent", + "UseSpaces", + }, + "go/scanner": { + "Error", + "ErrorHandler", + "ErrorList", + "Mode", + "PrintError", + "ScanComments", + "Scanner", + }, + "go/token": { + "ADD", + "ADD_ASSIGN", + "AND", + "AND_ASSIGN", + "AND_NOT", + "AND_NOT_ASSIGN", + "ARROW", + "ASSIGN", + "BREAK", + "CASE", + "CHAN", + "CHAR", + "COLON", + "COMMA", + "COMMENT", + "CONST", + "CONTINUE", + "DEC", + "DEFAULT", + "DEFER", + "DEFINE", + "ELLIPSIS", + "ELSE", + "EOF", + "EQL", + "FALLTHROUGH", + "FLOAT", + "FOR", + "FUNC", + "File", + "FileSet", + "GEQ", + "GO", + "GOTO", + "GTR", + "HighestPrec", + "IDENT", + "IF", + "ILLEGAL", + "IMAG", + "IMPORT", + "INC", + "INT", + "INTERFACE", + "IsExported", + "IsIdentifier", + "IsKeyword", + "LAND", + "LBRACE", + "LBRACK", + "LEQ", + "LOR", + "LPAREN", + "LSS", + "Lookup", + "LowestPrec", + "MAP", + "MUL", + "MUL_ASSIGN", + "NEQ", + "NOT", + "NewFileSet", + "NoPos", + "OR", + "OR_ASSIGN", + "PACKAGE", + "PERIOD", + "Pos", + "Position", + "QUO", + "QUO_ASSIGN", + "RANGE", + "RBRACE", + "RBRACK", + "REM", + "REM_ASSIGN", + "RETURN", + "RPAREN", + "SELECT", + "SEMICOLON", + "SHL", + "SHL_ASSIGN", + "SHR", + "SHR_ASSIGN", + "STRING", + "STRUCT", + "SUB", + "SUB_ASSIGN", + "SWITCH", + "TILDE", + "TYPE", + "Token", + "UnaryPrec", + "VAR", + "XOR", + "XOR_ASSIGN", + }, + "go/types": { + "ArgumentError", + "Array", + "AssertableTo", + "AssignableTo", + "Basic", + "BasicInfo", + "BasicKind", + "Bool", + "Builtin", + "Byte", + "Chan", + "ChanDir", + "CheckExpr", + "Checker", + "Comparable", + "Complex128", + "Complex64", + "Config", + "Const", + "Context", + "ConvertibleTo", + "DefPredeclaredTestFuncs", + "Default", + "Error", + "Eval", + "ExprString", + "FieldVal", + "Float32", + "Float64", + "Func", + "Id", + "Identical", + "IdenticalIgnoreTags", + "Implements", + "ImportMode", + "Importer", + "ImporterFrom", + "Info", + "Initializer", + "Instance", + "Instantiate", + "Int", + "Int16", + "Int32", + "Int64", + "Int8", + "Interface", + "Invalid", + "IsBoolean", + "IsComplex", + "IsConstType", + "IsFloat", + "IsInteger", + "IsInterface", + "IsNumeric", + "IsOrdered", + "IsString", + "IsUnsigned", + "IsUntyped", + "Label", + "LookupFieldOrMethod", + "Map", + "MethodExpr", + "MethodSet", + "MethodVal", + "MissingMethod", + "Named", + "NewArray", + "NewChan", + "NewChecker", + "NewConst", + "NewContext", + "NewField", + "NewFunc", + "NewInterface", + "NewInterfaceType", + "NewLabel", + "NewMap", + "NewMethodSet", + "NewNamed", + "NewPackage", + "NewParam", + "NewPkgName", + "NewPointer", + "NewScope", + "NewSignature", + "NewSignatureType", + "NewSlice", + "NewStruct", + "NewTerm", + "NewTuple", + "NewTypeName", + "NewTypeParam", + "NewUnion", + "NewVar", + "Nil", + "Object", + "ObjectString", + "Package", + "PkgName", + "Pointer", + "Qualifier", + "RecvOnly", + "RelativeTo", + "Rune", + "Satisfies", + "Scope", + "Selection", + "SelectionKind", + "SelectionString", + "SendOnly", + "SendRecv", + "Signature", + "Sizes", + "SizesFor", + "Slice", + "StdSizes", + "String", + "Struct", + "Term", + "Tuple", + "Typ", + "Type", + "TypeAndValue", + "TypeList", + "TypeName", + "TypeParam", + "TypeParamList", + "TypeString", + "Uint", + "Uint16", + "Uint32", + "Uint64", + "Uint8", + "Uintptr", + "Union", + "Universe", + "Unsafe", + "UnsafePointer", + "UntypedBool", + "UntypedComplex", + "UntypedFloat", + "UntypedInt", + "UntypedNil", + "UntypedRune", + "UntypedString", + "Var", + "WriteExpr", + "WriteSignature", + "WriteType", + }, + "hash": { + "Hash", + "Hash32", + "Hash64", + }, + "hash/adler32": { + "Checksum", + "New", + "Size", + }, + "hash/crc32": { + "Castagnoli", + "Checksum", + "ChecksumIEEE", + "IEEE", + "IEEETable", + "Koopman", + "MakeTable", + "New", + "NewIEEE", + "Size", + "Table", + "Update", + }, + "hash/crc64": { + "Checksum", + "ECMA", + "ISO", + "MakeTable", + "New", + "Size", + "Table", + "Update", + }, + "hash/fnv": { + "New128", + "New128a", + "New32", + "New32a", + "New64", + "New64a", + }, + "hash/maphash": { + "Bytes", + "Hash", + "MakeSeed", + "Seed", + "String", + }, + "html": { + "EscapeString", + "UnescapeString", + }, + "html/template": { + "CSS", + "ErrAmbigContext", + "ErrBadHTML", + "ErrBranchEnd", + "ErrEndContext", + "ErrNoSuchTemplate", + "ErrOutputContext", + "ErrPartialCharset", + "ErrPartialEscape", + "ErrPredefinedEscaper", + "ErrRangeLoopReentry", + "ErrSlashAmbig", + "Error", + "ErrorCode", + "FuncMap", + "HTML", + "HTMLAttr", + "HTMLEscape", + "HTMLEscapeString", + "HTMLEscaper", + "IsTrue", + "JS", + "JSEscape", + "JSEscapeString", + "JSEscaper", + "JSStr", + "Must", + "New", + "OK", + "ParseFS", + "ParseFiles", + "ParseGlob", + "Srcset", + "Template", + "URL", + "URLQueryEscaper", + }, + "image": { + "Alpha", + "Alpha16", + "Black", + "CMYK", + "Config", + "Decode", + "DecodeConfig", + "ErrFormat", + "Gray", + "Gray16", + "Image", + "NRGBA", + "NRGBA64", + "NYCbCrA", + "NewAlpha", + "NewAlpha16", + "NewCMYK", + "NewGray", + "NewGray16", + "NewNRGBA", + "NewNRGBA64", + "NewNYCbCrA", + "NewPaletted", + "NewRGBA", + "NewRGBA64", + "NewUniform", + "NewYCbCr", + "Opaque", + "Paletted", + "PalettedImage", + "Point", + "Pt", + "RGBA", + "RGBA64", + "RGBA64Image", + "Rect", + "Rectangle", + "RegisterFormat", + "Transparent", + "Uniform", + "White", + "YCbCr", + "YCbCrSubsampleRatio", + "YCbCrSubsampleRatio410", + "YCbCrSubsampleRatio411", + "YCbCrSubsampleRatio420", + "YCbCrSubsampleRatio422", + "YCbCrSubsampleRatio440", + "YCbCrSubsampleRatio444", + "ZP", + "ZR", + }, + "image/color": { + "Alpha", + "Alpha16", + "Alpha16Model", + "AlphaModel", + "Black", + "CMYK", + "CMYKModel", + "CMYKToRGB", + "Color", + "Gray", + "Gray16", + "Gray16Model", + "GrayModel", + "Model", + "ModelFunc", + "NRGBA", + "NRGBA64", + "NRGBA64Model", + "NRGBAModel", + "NYCbCrA", + "NYCbCrAModel", + "Opaque", + "Palette", + "RGBA", + "RGBA64", + "RGBA64Model", + "RGBAModel", + "RGBToCMYK", + "RGBToYCbCr", + "Transparent", + "White", + "YCbCr", + "YCbCrModel", + "YCbCrToRGB", + }, + "image/color/palette": { + "Plan9", + "WebSafe", + }, + "image/draw": { + "Draw", + "DrawMask", + "Drawer", + "FloydSteinberg", + "Image", + "Op", + "Over", + "Quantizer", + "RGBA64Image", + "Src", + }, + "image/gif": { + "Decode", + "DecodeAll", + "DecodeConfig", + "DisposalBackground", + "DisposalNone", + "DisposalPrevious", + "Encode", + "EncodeAll", + "GIF", + "Options", + }, + "image/jpeg": { + "Decode", + "DecodeConfig", + "DefaultQuality", + "Encode", + "FormatError", + "Options", + "Reader", + "UnsupportedError", + }, + "image/png": { + "BestCompression", + "BestSpeed", + "CompressionLevel", + "Decode", + "DecodeConfig", + "DefaultCompression", + "Encode", + "Encoder", + "EncoderBuffer", + "EncoderBufferPool", + "FormatError", + "NoCompression", + "UnsupportedError", + }, + "index/suffixarray": { + "Index", + "New", + }, + "io": { + "ByteReader", + "ByteScanner", + "ByteWriter", + "Closer", + "Copy", + "CopyBuffer", + "CopyN", + "Discard", + "EOF", + "ErrClosedPipe", + "ErrNoProgress", + "ErrShortBuffer", + "ErrShortWrite", + "ErrUnexpectedEOF", + "LimitReader", + "LimitedReader", + "MultiReader", + "MultiWriter", + "NewOffsetWriter", + "NewSectionReader", + "NopCloser", + "OffsetWriter", + "Pipe", + "PipeReader", + "PipeWriter", + "ReadAll", + "ReadAtLeast", + "ReadCloser", + "ReadFull", + "ReadSeekCloser", + "ReadSeeker", + "ReadWriteCloser", + "ReadWriteSeeker", + "ReadWriter", + "Reader", + "ReaderAt", + "ReaderFrom", + "RuneReader", + "RuneScanner", + "SectionReader", + "SeekCurrent", + "SeekEnd", + "SeekStart", + "Seeker", + "StringWriter", + "TeeReader", + "WriteCloser", + "WriteSeeker", + "WriteString", + "Writer", + "WriterAt", + "WriterTo", + }, + "io/fs": { + "DirEntry", + "ErrClosed", + "ErrExist", + "ErrInvalid", + "ErrNotExist", + "ErrPermission", + "FS", + "File", + "FileInfo", + "FileInfoToDirEntry", + "FileMode", + "Glob", + "GlobFS", + "ModeAppend", + "ModeCharDevice", + "ModeDevice", + "ModeDir", + "ModeExclusive", + "ModeIrregular", + "ModeNamedPipe", + "ModePerm", + "ModeSetgid", + "ModeSetuid", + "ModeSocket", + "ModeSticky", + "ModeSymlink", + "ModeTemporary", + "ModeType", + "PathError", + "ReadDir", + "ReadDirFS", + "ReadDirFile", + "ReadFile", + "ReadFileFS", + "SkipAll", + "SkipDir", + "Stat", + "StatFS", + "Sub", + "SubFS", + "ValidPath", + "WalkDir", + "WalkDirFunc", + }, + "io/ioutil": { + "Discard", + "NopCloser", + "ReadAll", + "ReadDir", + "ReadFile", + "TempDir", + "TempFile", + "WriteFile", + }, + "log": { + "Default", + "Fatal", + "Fatalf", + "Fatalln", + "Flags", + "LUTC", + "Ldate", + "Llongfile", + "Lmicroseconds", + "Lmsgprefix", + "Logger", + "Lshortfile", + "LstdFlags", + "Ltime", + "New", + "Output", + "Panic", + "Panicf", + "Panicln", + "Prefix", + "Print", + "Printf", + "Println", + "SetFlags", + "SetOutput", + "SetPrefix", + "Writer", + }, + "log/syslog": { + "Dial", + "LOG_ALERT", + "LOG_AUTH", + "LOG_AUTHPRIV", + "LOG_CRIT", + "LOG_CRON", + "LOG_DAEMON", + "LOG_DEBUG", + "LOG_EMERG", + "LOG_ERR", + "LOG_FTP", + "LOG_INFO", + "LOG_KERN", + "LOG_LOCAL0", + "LOG_LOCAL1", + "LOG_LOCAL2", + "LOG_LOCAL3", + "LOG_LOCAL4", + "LOG_LOCAL5", + "LOG_LOCAL6", + "LOG_LOCAL7", + "LOG_LPR", + "LOG_MAIL", + "LOG_NEWS", + "LOG_NOTICE", + "LOG_SYSLOG", + "LOG_USER", + "LOG_UUCP", + "LOG_WARNING", + "New", + "NewLogger", + "Priority", + "Writer", + }, + "math": { + "Abs", + "Acos", + "Acosh", + "Asin", + "Asinh", + "Atan", + "Atan2", + "Atanh", + "Cbrt", + "Ceil", + "Copysign", + "Cos", + "Cosh", + "Dim", + "E", + "Erf", + "Erfc", + "Erfcinv", + "Erfinv", + "Exp", + "Exp2", + "Expm1", + "FMA", + "Float32bits", + "Float32frombits", + "Float64bits", + "Float64frombits", + "Floor", + "Frexp", + "Gamma", + "Hypot", + "Ilogb", + "Inf", + "IsInf", + "IsNaN", + "J0", + "J1", + "Jn", + "Ldexp", + "Lgamma", + "Ln10", + "Ln2", + "Log", + "Log10", + "Log10E", + "Log1p", + "Log2", + "Log2E", + "Logb", + "Max", + "MaxFloat32", + "MaxFloat64", + "MaxInt", + "MaxInt16", + "MaxInt32", + "MaxInt64", + "MaxInt8", + "MaxUint", + "MaxUint16", + "MaxUint32", + "MaxUint64", + "MaxUint8", + "Min", + "MinInt", + "MinInt16", + "MinInt32", + "MinInt64", + "MinInt8", + "Mod", + "Modf", + "NaN", + "Nextafter", + "Nextafter32", + "Phi", + "Pi", + "Pow", + "Pow10", + "Remainder", + "Round", + "RoundToEven", + "Signbit", + "Sin", + "Sincos", + "Sinh", + "SmallestNonzeroFloat32", + "SmallestNonzeroFloat64", + "Sqrt", + "Sqrt2", + "SqrtE", + "SqrtPhi", + "SqrtPi", + "Tan", + "Tanh", + "Trunc", + "Y0", + "Y1", + "Yn", + }, + "math/big": { + "Above", + "Accuracy", + "AwayFromZero", + "Below", + "ErrNaN", + "Exact", + "Float", + "Int", + "Jacobi", + "MaxBase", + "MaxExp", + "MaxPrec", + "MinExp", + "NewFloat", + "NewInt", + "NewRat", + "ParseFloat", + "Rat", + "RoundingMode", + "ToNearestAway", + "ToNearestEven", + "ToNegativeInf", + "ToPositiveInf", + "ToZero", + "Word", + }, + "math/bits": { + "Add", + "Add32", + "Add64", + "Div", + "Div32", + "Div64", + "LeadingZeros", + "LeadingZeros16", + "LeadingZeros32", + "LeadingZeros64", + "LeadingZeros8", + "Len", + "Len16", + "Len32", + "Len64", + "Len8", + "Mul", + "Mul32", + "Mul64", + "OnesCount", + "OnesCount16", + "OnesCount32", + "OnesCount64", + "OnesCount8", + "Rem", + "Rem32", + "Rem64", + "Reverse", + "Reverse16", + "Reverse32", + "Reverse64", + "Reverse8", + "ReverseBytes", + "ReverseBytes16", + "ReverseBytes32", + "ReverseBytes64", + "RotateLeft", + "RotateLeft16", + "RotateLeft32", + "RotateLeft64", + "RotateLeft8", + "Sub", + "Sub32", + "Sub64", + "TrailingZeros", + "TrailingZeros16", + "TrailingZeros32", + "TrailingZeros64", + "TrailingZeros8", + "UintSize", + }, + "math/cmplx": { + "Abs", + "Acos", + "Acosh", + "Asin", + "Asinh", + "Atan", + "Atanh", + "Conj", + "Cos", + "Cosh", + "Cot", + "Exp", + "Inf", + "IsInf", + "IsNaN", + "Log", + "Log10", + "NaN", + "Phase", + "Polar", + "Pow", + "Rect", + "Sin", + "Sinh", + "Sqrt", + "Tan", + "Tanh", + }, + "math/rand": { + "ExpFloat64", + "Float32", + "Float64", + "Int", + "Int31", + "Int31n", + "Int63", + "Int63n", + "Intn", + "New", + "NewSource", + "NewZipf", + "NormFloat64", + "Perm", + "Rand", + "Read", + "Seed", + "Shuffle", + "Source", + "Source64", + "Uint32", + "Uint64", + "Zipf", + }, + "mime": { + "AddExtensionType", + "BEncoding", + "ErrInvalidMediaParameter", + "ExtensionsByType", + "FormatMediaType", + "ParseMediaType", + "QEncoding", + "TypeByExtension", + "WordDecoder", + "WordEncoder", + }, + "mime/multipart": { + "ErrMessageTooLarge", + "File", + "FileHeader", + "Form", + "NewReader", + "NewWriter", + "Part", + "Reader", + "Writer", + }, + "mime/quotedprintable": { + "NewReader", + "NewWriter", + "Reader", + "Writer", + }, + "net": { + "Addr", + "AddrError", + "Buffers", + "CIDRMask", + "Conn", + "DNSConfigError", + "DNSError", + "DefaultResolver", + "Dial", + "DialIP", + "DialTCP", + "DialTimeout", + "DialUDP", + "DialUnix", + "Dialer", + "ErrClosed", + "ErrWriteToConnected", + "Error", + "FileConn", + "FileListener", + "FilePacketConn", + "FlagBroadcast", + "FlagLoopback", + "FlagMulticast", + "FlagPointToPoint", + "FlagRunning", + "FlagUp", + "Flags", + "HardwareAddr", + "IP", + "IPAddr", + "IPConn", + "IPMask", + "IPNet", + "IPv4", + "IPv4Mask", + "IPv4allrouter", + "IPv4allsys", + "IPv4bcast", + "IPv4len", + "IPv4zero", + "IPv6interfacelocalallnodes", + "IPv6len", + "IPv6linklocalallnodes", + "IPv6linklocalallrouters", + "IPv6loopback", + "IPv6unspecified", + "IPv6zero", + "Interface", + "InterfaceAddrs", + "InterfaceByIndex", + "InterfaceByName", + "Interfaces", + "InvalidAddrError", + "JoinHostPort", + "Listen", + "ListenConfig", + "ListenIP", + "ListenMulticastUDP", + "ListenPacket", + "ListenTCP", + "ListenUDP", + "ListenUnix", + "ListenUnixgram", + "Listener", + "LookupAddr", + "LookupCNAME", + "LookupHost", + "LookupIP", + "LookupMX", + "LookupNS", + "LookupPort", + "LookupSRV", + "LookupTXT", + "MX", + "NS", + "OpError", + "PacketConn", + "ParseCIDR", + "ParseError", + "ParseIP", + "ParseMAC", + "Pipe", + "ResolveIPAddr", + "ResolveTCPAddr", + "ResolveUDPAddr", + "ResolveUnixAddr", + "Resolver", + "SRV", + "SplitHostPort", + "TCPAddr", + "TCPAddrFromAddrPort", + "TCPConn", + "TCPListener", + "UDPAddr", + "UDPAddrFromAddrPort", + "UDPConn", + "UnixAddr", + "UnixConn", + "UnixListener", + "UnknownNetworkError", + }, + "net/http": { + "AllowQuerySemicolons", + "CanonicalHeaderKey", + "Client", + "CloseNotifier", + "ConnState", + "Cookie", + "CookieJar", + "DefaultClient", + "DefaultMaxHeaderBytes", + "DefaultMaxIdleConnsPerHost", + "DefaultServeMux", + "DefaultTransport", + "DetectContentType", + "Dir", + "ErrAbortHandler", + "ErrBodyNotAllowed", + "ErrBodyReadAfterClose", + "ErrContentLength", + "ErrHandlerTimeout", + "ErrHeaderTooLong", + "ErrHijacked", + "ErrLineTooLong", + "ErrMissingBoundary", + "ErrMissingContentLength", + "ErrMissingFile", + "ErrNoCookie", + "ErrNoLocation", + "ErrNotMultipart", + "ErrNotSupported", + "ErrServerClosed", + "ErrShortBody", + "ErrSkipAltProtocol", + "ErrUnexpectedTrailer", + "ErrUseLastResponse", + "ErrWriteAfterFlush", + "Error", + "FS", + "File", + "FileServer", + "FileSystem", + "Flusher", + "Get", + "Handle", + "HandleFunc", + "Handler", + "HandlerFunc", + "Head", + "Header", + "Hijacker", + "ListenAndServe", + "ListenAndServeTLS", + "LocalAddrContextKey", + "MaxBytesError", + "MaxBytesHandler", + "MaxBytesReader", + "MethodConnect", + "MethodDelete", + "MethodGet", + "MethodHead", + "MethodOptions", + "MethodPatch", + "MethodPost", + "MethodPut", + "MethodTrace", + "NewFileTransport", + "NewRequest", + "NewRequestWithContext", + "NewResponseController", + "NewServeMux", + "NoBody", + "NotFound", + "NotFoundHandler", + "ParseHTTPVersion", + "ParseTime", + "Post", + "PostForm", + "ProtocolError", + "ProxyFromEnvironment", + "ProxyURL", + "PushOptions", + "Pusher", + "ReadRequest", + "ReadResponse", + "Redirect", + "RedirectHandler", + "Request", + "Response", + "ResponseController", + "ResponseWriter", + "RoundTripper", + "SameSite", + "SameSiteDefaultMode", + "SameSiteLaxMode", + "SameSiteNoneMode", + "SameSiteStrictMode", + "Serve", + "ServeContent", + "ServeFile", + "ServeMux", + "ServeTLS", + "Server", + "ServerContextKey", + "SetCookie", + "StateActive", + "StateClosed", + "StateHijacked", + "StateIdle", + "StateNew", + "StatusAccepted", + "StatusAlreadyReported", + "StatusBadGateway", + "StatusBadRequest", + "StatusConflict", + "StatusContinue", + "StatusCreated", + "StatusEarlyHints", + "StatusExpectationFailed", + "StatusFailedDependency", + "StatusForbidden", + "StatusFound", + "StatusGatewayTimeout", + "StatusGone", + "StatusHTTPVersionNotSupported", + "StatusIMUsed", + "StatusInsufficientStorage", + "StatusInternalServerError", + "StatusLengthRequired", + "StatusLocked", + "StatusLoopDetected", + "StatusMethodNotAllowed", + "StatusMisdirectedRequest", + "StatusMovedPermanently", + "StatusMultiStatus", + "StatusMultipleChoices", + "StatusNetworkAuthenticationRequired", + "StatusNoContent", + "StatusNonAuthoritativeInfo", + "StatusNotAcceptable", + "StatusNotExtended", + "StatusNotFound", + "StatusNotImplemented", + "StatusNotModified", + "StatusOK", + "StatusPartialContent", + "StatusPaymentRequired", + "StatusPermanentRedirect", + "StatusPreconditionFailed", + "StatusPreconditionRequired", + "StatusProcessing", + "StatusProxyAuthRequired", + "StatusRequestEntityTooLarge", + "StatusRequestHeaderFieldsTooLarge", + "StatusRequestTimeout", + "StatusRequestURITooLong", + "StatusRequestedRangeNotSatisfiable", + "StatusResetContent", + "StatusSeeOther", + "StatusServiceUnavailable", + "StatusSwitchingProtocols", + "StatusTeapot", + "StatusTemporaryRedirect", + "StatusText", + "StatusTooEarly", + "StatusTooManyRequests", + "StatusUnauthorized", + "StatusUnavailableForLegalReasons", + "StatusUnprocessableEntity", + "StatusUnsupportedMediaType", + "StatusUpgradeRequired", + "StatusUseProxy", + "StatusVariantAlsoNegotiates", + "StripPrefix", + "TimeFormat", + "TimeoutHandler", + "TrailerPrefix", + "Transport", + }, + "net/http/cgi": { + "Handler", + "Request", + "RequestFromMap", + "Serve", + }, + "net/http/cookiejar": { + "Jar", + "New", + "Options", + "PublicSuffixList", + }, + "net/http/fcgi": { + "ErrConnClosed", + "ErrRequestAborted", + "ProcessEnv", + "Serve", + }, + "net/http/httptest": { + "DefaultRemoteAddr", + "NewRecorder", + "NewRequest", + "NewServer", + "NewTLSServer", + "NewUnstartedServer", + "ResponseRecorder", + "Server", + }, + "net/http/httptrace": { + "ClientTrace", + "ContextClientTrace", + "DNSDoneInfo", + "DNSStartInfo", + "GotConnInfo", + "WithClientTrace", + "WroteRequestInfo", + }, + "net/http/httputil": { + "BufferPool", + "ClientConn", + "DumpRequest", + "DumpRequestOut", + "DumpResponse", + "ErrClosed", + "ErrLineTooLong", + "ErrPersistEOF", + "ErrPipeline", + "NewChunkedReader", + "NewChunkedWriter", + "NewClientConn", + "NewProxyClientConn", + "NewServerConn", + "NewSingleHostReverseProxy", + "ProxyRequest", + "ReverseProxy", + "ServerConn", + }, + "net/http/pprof": { + "Cmdline", + "Handler", + "Index", + "Profile", + "Symbol", + "Trace", + }, + "net/mail": { + "Address", + "AddressParser", + "ErrHeaderNotPresent", + "Header", + "Message", + "ParseAddress", + "ParseAddressList", + "ParseDate", + "ReadMessage", + }, + "net/netip": { + "Addr", + "AddrFrom16", + "AddrFrom4", + "AddrFromSlice", + "AddrPort", + "AddrPortFrom", + "IPv4Unspecified", + "IPv6LinkLocalAllNodes", + "IPv6LinkLocalAllRouters", + "IPv6Loopback", + "IPv6Unspecified", + "MustParseAddr", + "MustParseAddrPort", + "MustParsePrefix", + "ParseAddr", + "ParseAddrPort", + "ParsePrefix", + "Prefix", + "PrefixFrom", + }, + "net/rpc": { + "Accept", + "Call", + "Client", + "ClientCodec", + "DefaultDebugPath", + "DefaultRPCPath", + "DefaultServer", + "Dial", + "DialHTTP", + "DialHTTPPath", + "ErrShutdown", + "HandleHTTP", + "NewClient", + "NewClientWithCodec", + "NewServer", + "Register", + "RegisterName", + "Request", + "Response", + "ServeCodec", + "ServeConn", + "ServeRequest", + "Server", + "ServerCodec", + "ServerError", + }, + "net/rpc/jsonrpc": { + "Dial", + "NewClient", + "NewClientCodec", + "NewServerCodec", + "ServeConn", + }, + "net/smtp": { + "Auth", + "CRAMMD5Auth", + "Client", + "Dial", + "NewClient", + "PlainAuth", + "SendMail", + "ServerInfo", + }, + "net/textproto": { + "CanonicalMIMEHeaderKey", + "Conn", + "Dial", + "Error", + "MIMEHeader", + "NewConn", + "NewReader", + "NewWriter", + "Pipeline", + "ProtocolError", + "Reader", + "TrimBytes", + "TrimString", + "Writer", + }, + "net/url": { + "Error", + "EscapeError", + "InvalidHostError", + "JoinPath", + "Parse", + "ParseQuery", + "ParseRequestURI", + "PathEscape", + "PathUnescape", + "QueryEscape", + "QueryUnescape", + "URL", + "User", + "UserPassword", + "Userinfo", + "Values", + }, + "os": { + "Args", + "Chdir", + "Chmod", + "Chown", + "Chtimes", + "Clearenv", + "Create", + "CreateTemp", + "DevNull", + "DirEntry", + "DirFS", + "Environ", + "ErrClosed", + "ErrDeadlineExceeded", + "ErrExist", + "ErrInvalid", + "ErrNoDeadline", + "ErrNotExist", + "ErrPermission", + "ErrProcessDone", + "Executable", + "Exit", + "Expand", + "ExpandEnv", + "File", + "FileInfo", + "FileMode", + "FindProcess", + "Getegid", + "Getenv", + "Geteuid", + "Getgid", + "Getgroups", + "Getpagesize", + "Getpid", + "Getppid", + "Getuid", + "Getwd", + "Hostname", + "Interrupt", + "IsExist", + "IsNotExist", + "IsPathSeparator", + "IsPermission", + "IsTimeout", + "Kill", + "Lchown", + "Link", + "LinkError", + "LookupEnv", + "Lstat", + "Mkdir", + "MkdirAll", + "MkdirTemp", + "ModeAppend", + "ModeCharDevice", + "ModeDevice", + "ModeDir", + "ModeExclusive", + "ModeIrregular", + "ModeNamedPipe", + "ModePerm", + "ModeSetgid", + "ModeSetuid", + "ModeSocket", + "ModeSticky", + "ModeSymlink", + "ModeTemporary", + "ModeType", + "NewFile", + "NewSyscallError", + "O_APPEND", + "O_CREATE", + "O_EXCL", + "O_RDONLY", + "O_RDWR", + "O_SYNC", + "O_TRUNC", + "O_WRONLY", + "Open", + "OpenFile", + "PathError", + "PathListSeparator", + "PathSeparator", + "Pipe", + "ProcAttr", + "Process", + "ProcessState", + "ReadDir", + "ReadFile", + "Readlink", + "Remove", + "RemoveAll", + "Rename", + "SEEK_CUR", + "SEEK_END", + "SEEK_SET", + "SameFile", + "Setenv", + "Signal", + "StartProcess", + "Stat", + "Stderr", + "Stdin", + "Stdout", + "Symlink", + "SyscallError", + "TempDir", + "Truncate", + "Unsetenv", + "UserCacheDir", + "UserConfigDir", + "UserHomeDir", + "WriteFile", + }, + "os/exec": { + "Cmd", + "Command", + "CommandContext", + "ErrDot", + "ErrNotFound", + "ErrWaitDelay", + "Error", + "ExitError", + "LookPath", + }, + "os/signal": { + "Ignore", + "Ignored", + "Notify", + "NotifyContext", + "Reset", + "Stop", + }, + "os/user": { + "Current", + "Group", + "Lookup", + "LookupGroup", + "LookupGroupId", + "LookupId", + "UnknownGroupError", + "UnknownGroupIdError", + "UnknownUserError", + "UnknownUserIdError", + "User", + }, + "path": { + "Base", + "Clean", + "Dir", + "ErrBadPattern", + "Ext", + "IsAbs", + "Join", + "Match", + "Split", + }, + "path/filepath": { + "Abs", + "Base", + "Clean", + "Dir", + "ErrBadPattern", + "EvalSymlinks", + "Ext", + "FromSlash", + "Glob", + "HasPrefix", + "IsAbs", + "IsLocal", + "Join", + "ListSeparator", + "Match", + "Rel", + "Separator", + "SkipAll", + "SkipDir", + "Split", + "SplitList", + "ToSlash", + "VolumeName", + "Walk", + "WalkDir", + "WalkFunc", + }, + "plugin": { + "Open", + "Plugin", + "Symbol", + }, + "reflect": { + "Append", + "AppendSlice", + "Array", + "ArrayOf", + "Bool", + "BothDir", + "Chan", + "ChanDir", + "ChanOf", + "Complex128", + "Complex64", + "Copy", + "DeepEqual", + "Float32", + "Float64", + "Func", + "FuncOf", + "Indirect", + "Int", + "Int16", + "Int32", + "Int64", + "Int8", + "Interface", + "Invalid", + "Kind", + "MakeChan", + "MakeFunc", + "MakeMap", + "MakeMapWithSize", + "MakeSlice", + "Map", + "MapIter", + "MapOf", + "Method", + "New", + "NewAt", + "Pointer", + "PointerTo", + "Ptr", + "PtrTo", + "RecvDir", + "Select", + "SelectCase", + "SelectDefault", + "SelectDir", + "SelectRecv", + "SelectSend", + "SendDir", + "Slice", + "SliceHeader", + "SliceOf", + "String", + "StringHeader", + "Struct", + "StructField", + "StructOf", + "StructTag", + "Swapper", + "Type", + "TypeOf", + "Uint", + "Uint16", + "Uint32", + "Uint64", + "Uint8", + "Uintptr", + "UnsafePointer", + "Value", + "ValueError", + "ValueOf", + "VisibleFields", + "Zero", + }, + "regexp": { + "Compile", + "CompilePOSIX", + "Match", + "MatchReader", + "MatchString", + "MustCompile", + "MustCompilePOSIX", + "QuoteMeta", + "Regexp", + }, + "regexp/syntax": { + "ClassNL", + "Compile", + "DotNL", + "EmptyBeginLine", + "EmptyBeginText", + "EmptyEndLine", + "EmptyEndText", + "EmptyNoWordBoundary", + "EmptyOp", + "EmptyOpContext", + "EmptyWordBoundary", + "ErrInternalError", + "ErrInvalidCharClass", + "ErrInvalidCharRange", + "ErrInvalidEscape", + "ErrInvalidNamedCapture", + "ErrInvalidPerlOp", + "ErrInvalidRepeatOp", + "ErrInvalidRepeatSize", + "ErrInvalidUTF8", + "ErrLarge", + "ErrMissingBracket", + "ErrMissingParen", + "ErrMissingRepeatArgument", + "ErrNestingDepth", + "ErrTrailingBackslash", + "ErrUnexpectedParen", + "Error", + "ErrorCode", + "Flags", + "FoldCase", + "Inst", + "InstAlt", + "InstAltMatch", + "InstCapture", + "InstEmptyWidth", + "InstFail", + "InstMatch", + "InstNop", + "InstOp", + "InstRune", + "InstRune1", + "InstRuneAny", + "InstRuneAnyNotNL", + "IsWordChar", + "Literal", + "MatchNL", + "NonGreedy", + "OneLine", + "Op", + "OpAlternate", + "OpAnyChar", + "OpAnyCharNotNL", + "OpBeginLine", + "OpBeginText", + "OpCapture", + "OpCharClass", + "OpConcat", + "OpEmptyMatch", + "OpEndLine", + "OpEndText", + "OpLiteral", + "OpNoMatch", + "OpNoWordBoundary", + "OpPlus", + "OpQuest", + "OpRepeat", + "OpStar", + "OpWordBoundary", + "POSIX", + "Parse", + "Perl", + "PerlX", + "Prog", + "Regexp", + "Simple", + "UnicodeGroups", + "WasDollar", + }, + "runtime": { + "BlockProfile", + "BlockProfileRecord", + "Breakpoint", + "CPUProfile", + "Caller", + "Callers", + "CallersFrames", + "Compiler", + "Error", + "Frame", + "Frames", + "Func", + "FuncForPC", + "GC", + "GOARCH", + "GOMAXPROCS", + "GOOS", + "GOROOT", + "Goexit", + "GoroutineProfile", + "Gosched", + "KeepAlive", + "LockOSThread", + "MemProfile", + "MemProfileRate", + "MemProfileRecord", + "MemStats", + "MutexProfile", + "NumCPU", + "NumCgoCall", + "NumGoroutine", + "ReadMemStats", + "ReadTrace", + "SetBlockProfileRate", + "SetCPUProfileRate", + "SetCgoTraceback", + "SetFinalizer", + "SetMutexProfileFraction", + "Stack", + "StackRecord", + "StartTrace", + "StopTrace", + "ThreadCreateProfile", + "TypeAssertionError", + "UnlockOSThread", + "Version", + }, + "runtime/cgo": { + "Handle", + "Incomplete", + "NewHandle", + }, + "runtime/coverage": { + "ClearCounters", + "WriteCounters", + "WriteCountersDir", + "WriteMeta", + "WriteMetaDir", + }, + "runtime/debug": { + "BuildInfo", + "BuildSetting", + "FreeOSMemory", + "GCStats", + "Module", + "ParseBuildInfo", + "PrintStack", + "ReadBuildInfo", + "ReadGCStats", + "SetGCPercent", + "SetMaxStack", + "SetMaxThreads", + "SetMemoryLimit", + "SetPanicOnFault", + "SetTraceback", + "Stack", + "WriteHeapDump", + }, + "runtime/metrics": { + "All", + "Description", + "Float64Histogram", + "KindBad", + "KindFloat64", + "KindFloat64Histogram", + "KindUint64", + "Read", + "Sample", + "Value", + "ValueKind", + }, + "runtime/pprof": { + "Do", + "ForLabels", + "Label", + "LabelSet", + "Labels", + "Lookup", + "NewProfile", + "Profile", + "Profiles", + "SetGoroutineLabels", + "StartCPUProfile", + "StopCPUProfile", + "WithLabels", + "WriteHeapProfile", + }, + "runtime/trace": { + "IsEnabled", + "Log", + "Logf", + "NewTask", + "Region", + "Start", + "StartRegion", + "Stop", + "Task", + "WithRegion", + }, + "sort": { + "Find", + "Float64Slice", + "Float64s", + "Float64sAreSorted", + "IntSlice", + "Interface", + "Ints", + "IntsAreSorted", + "IsSorted", + "Reverse", + "Search", + "SearchFloat64s", + "SearchInts", + "SearchStrings", + "Slice", + "SliceIsSorted", + "SliceStable", + "Sort", + "Stable", + "StringSlice", + "Strings", + "StringsAreSorted", + }, + "strconv": { + "AppendBool", + "AppendFloat", + "AppendInt", + "AppendQuote", + "AppendQuoteRune", + "AppendQuoteRuneToASCII", + "AppendQuoteRuneToGraphic", + "AppendQuoteToASCII", + "AppendQuoteToGraphic", + "AppendUint", + "Atoi", + "CanBackquote", + "ErrRange", + "ErrSyntax", + "FormatBool", + "FormatComplex", + "FormatFloat", + "FormatInt", + "FormatUint", + "IntSize", + "IsGraphic", + "IsPrint", + "Itoa", + "NumError", + "ParseBool", + "ParseComplex", + "ParseFloat", + "ParseInt", + "ParseUint", + "Quote", + "QuoteRune", + "QuoteRuneToASCII", + "QuoteRuneToGraphic", + "QuoteToASCII", + "QuoteToGraphic", + "QuotedPrefix", + "Unquote", + "UnquoteChar", + }, + "strings": { + "Builder", + "Clone", + "Compare", + "Contains", + "ContainsAny", + "ContainsRune", + "Count", + "Cut", + "CutPrefix", + "CutSuffix", + "EqualFold", + "Fields", + "FieldsFunc", + "HasPrefix", + "HasSuffix", + "Index", + "IndexAny", + "IndexByte", + "IndexFunc", + "IndexRune", + "Join", + "LastIndex", + "LastIndexAny", + "LastIndexByte", + "LastIndexFunc", + "Map", + "NewReader", + "NewReplacer", + "Reader", + "Repeat", + "Replace", + "ReplaceAll", + "Replacer", + "Split", + "SplitAfter", + "SplitAfterN", + "SplitN", + "Title", + "ToLower", + "ToLowerSpecial", + "ToTitle", + "ToTitleSpecial", + "ToUpper", + "ToUpperSpecial", + "ToValidUTF8", + "Trim", + "TrimFunc", + "TrimLeft", + "TrimLeftFunc", + "TrimPrefix", + "TrimRight", + "TrimRightFunc", + "TrimSpace", + "TrimSuffix", + }, + "sync": { + "Cond", + "Locker", + "Map", + "Mutex", + "NewCond", + "Once", + "Pool", + "RWMutex", + "WaitGroup", + }, + "sync/atomic": { + "AddInt32", + "AddInt64", + "AddUint32", + "AddUint64", + "AddUintptr", + "Bool", + "CompareAndSwapInt32", + "CompareAndSwapInt64", + "CompareAndSwapPointer", + "CompareAndSwapUint32", + "CompareAndSwapUint64", + "CompareAndSwapUintptr", + "Int32", + "Int64", + "LoadInt32", + "LoadInt64", + "LoadPointer", + "LoadUint32", + "LoadUint64", + "LoadUintptr", + "Pointer", + "StoreInt32", + "StoreInt64", + "StorePointer", + "StoreUint32", + "StoreUint64", + "StoreUintptr", + "SwapInt32", + "SwapInt64", + "SwapPointer", + "SwapUint32", + "SwapUint64", + "SwapUintptr", + "Uint32", + "Uint64", + "Uintptr", + "Value", + }, + "syscall": { + "AF_ALG", + "AF_APPLETALK", + "AF_ARP", + "AF_ASH", + "AF_ATM", + "AF_ATMPVC", + "AF_ATMSVC", + "AF_AX25", + "AF_BLUETOOTH", + "AF_BRIDGE", + "AF_CAIF", + "AF_CAN", + "AF_CCITT", + "AF_CHAOS", + "AF_CNT", + "AF_COIP", + "AF_DATAKIT", + "AF_DECnet", + "AF_DLI", + "AF_E164", + "AF_ECMA", + "AF_ECONET", + "AF_ENCAP", + "AF_FILE", + "AF_HYLINK", + "AF_IEEE80211", + "AF_IEEE802154", + "AF_IMPLINK", + "AF_INET", + "AF_INET6", + "AF_INET6_SDP", + "AF_INET_SDP", + "AF_IPX", + "AF_IRDA", + "AF_ISDN", + "AF_ISO", + "AF_IUCV", + "AF_KEY", + "AF_LAT", + "AF_LINK", + "AF_LLC", + "AF_LOCAL", + "AF_MAX", + "AF_MPLS", + "AF_NATM", + "AF_NDRV", + "AF_NETBEUI", + "AF_NETBIOS", + "AF_NETGRAPH", + "AF_NETLINK", + "AF_NETROM", + "AF_NS", + "AF_OROUTE", + "AF_OSI", + "AF_PACKET", + "AF_PHONET", + "AF_PPP", + "AF_PPPOX", + "AF_PUP", + "AF_RDS", + "AF_RESERVED_36", + "AF_ROSE", + "AF_ROUTE", + "AF_RXRPC", + "AF_SCLUSTER", + "AF_SECURITY", + "AF_SIP", + "AF_SLOW", + "AF_SNA", + "AF_SYSTEM", + "AF_TIPC", + "AF_UNIX", + "AF_UNSPEC", + "AF_UTUN", + "AF_VENDOR00", + "AF_VENDOR01", + "AF_VENDOR02", + "AF_VENDOR03", + "AF_VENDOR04", + "AF_VENDOR05", + "AF_VENDOR06", + "AF_VENDOR07", + "AF_VENDOR08", + "AF_VENDOR09", + "AF_VENDOR10", + "AF_VENDOR11", + "AF_VENDOR12", + "AF_VENDOR13", + "AF_VENDOR14", + "AF_VENDOR15", + "AF_VENDOR16", + "AF_VENDOR17", + "AF_VENDOR18", + "AF_VENDOR19", + "AF_VENDOR20", + "AF_VENDOR21", + "AF_VENDOR22", + "AF_VENDOR23", + "AF_VENDOR24", + "AF_VENDOR25", + "AF_VENDOR26", + "AF_VENDOR27", + "AF_VENDOR28", + "AF_VENDOR29", + "AF_VENDOR30", + "AF_VENDOR31", + "AF_VENDOR32", + "AF_VENDOR33", + "AF_VENDOR34", + "AF_VENDOR35", + "AF_VENDOR36", + "AF_VENDOR37", + "AF_VENDOR38", + "AF_VENDOR39", + "AF_VENDOR40", + "AF_VENDOR41", + "AF_VENDOR42", + "AF_VENDOR43", + "AF_VENDOR44", + "AF_VENDOR45", + "AF_VENDOR46", + "AF_VENDOR47", + "AF_WANPIPE", + "AF_X25", + "AI_CANONNAME", + "AI_NUMERICHOST", + "AI_PASSIVE", + "APPLICATION_ERROR", + "ARPHRD_ADAPT", + "ARPHRD_APPLETLK", + "ARPHRD_ARCNET", + "ARPHRD_ASH", + "ARPHRD_ATM", + "ARPHRD_AX25", + "ARPHRD_BIF", + "ARPHRD_CHAOS", + "ARPHRD_CISCO", + "ARPHRD_CSLIP", + "ARPHRD_CSLIP6", + "ARPHRD_DDCMP", + "ARPHRD_DLCI", + "ARPHRD_ECONET", + "ARPHRD_EETHER", + "ARPHRD_ETHER", + "ARPHRD_EUI64", + "ARPHRD_FCAL", + "ARPHRD_FCFABRIC", + "ARPHRD_FCPL", + "ARPHRD_FCPP", + "ARPHRD_FDDI", + "ARPHRD_FRAD", + "ARPHRD_FRELAY", + "ARPHRD_HDLC", + "ARPHRD_HIPPI", + "ARPHRD_HWX25", + "ARPHRD_IEEE1394", + "ARPHRD_IEEE802", + "ARPHRD_IEEE80211", + "ARPHRD_IEEE80211_PRISM", + "ARPHRD_IEEE80211_RADIOTAP", + "ARPHRD_IEEE802154", + "ARPHRD_IEEE802154_PHY", + "ARPHRD_IEEE802_TR", + "ARPHRD_INFINIBAND", + "ARPHRD_IPDDP", + "ARPHRD_IPGRE", + "ARPHRD_IRDA", + "ARPHRD_LAPB", + "ARPHRD_LOCALTLK", + "ARPHRD_LOOPBACK", + "ARPHRD_METRICOM", + "ARPHRD_NETROM", + "ARPHRD_NONE", + "ARPHRD_PIMREG", + "ARPHRD_PPP", + "ARPHRD_PRONET", + "ARPHRD_RAWHDLC", + "ARPHRD_ROSE", + "ARPHRD_RSRVD", + "ARPHRD_SIT", + "ARPHRD_SKIP", + "ARPHRD_SLIP", + "ARPHRD_SLIP6", + "ARPHRD_STRIP", + "ARPHRD_TUNNEL", + "ARPHRD_TUNNEL6", + "ARPHRD_VOID", + "ARPHRD_X25", + "AUTHTYPE_CLIENT", + "AUTHTYPE_SERVER", + "Accept", + "Accept4", + "AcceptEx", + "Access", + "Acct", + "AddrinfoW", + "Adjtime", + "Adjtimex", + "AllThreadsSyscall", + "AllThreadsSyscall6", + "AttachLsf", + "B0", + "B1000000", + "B110", + "B115200", + "B1152000", + "B1200", + "B134", + "B14400", + "B150", + "B1500000", + "B1800", + "B19200", + "B200", + "B2000000", + "B230400", + "B2400", + "B2500000", + "B28800", + "B300", + "B3000000", + "B3500000", + "B38400", + "B4000000", + "B460800", + "B4800", + "B50", + "B500000", + "B57600", + "B576000", + "B600", + "B7200", + "B75", + "B76800", + "B921600", + "B9600", + "BASE_PROTOCOL", + "BIOCFEEDBACK", + "BIOCFLUSH", + "BIOCGBLEN", + "BIOCGDIRECTION", + "BIOCGDIRFILT", + "BIOCGDLT", + "BIOCGDLTLIST", + "BIOCGETBUFMODE", + "BIOCGETIF", + "BIOCGETZMAX", + "BIOCGFEEDBACK", + "BIOCGFILDROP", + "BIOCGHDRCMPLT", + "BIOCGRSIG", + "BIOCGRTIMEOUT", + "BIOCGSEESENT", + "BIOCGSTATS", + "BIOCGSTATSOLD", + "BIOCGTSTAMP", + "BIOCIMMEDIATE", + "BIOCLOCK", + "BIOCPROMISC", + "BIOCROTZBUF", + "BIOCSBLEN", + "BIOCSDIRECTION", + "BIOCSDIRFILT", + "BIOCSDLT", + "BIOCSETBUFMODE", + "BIOCSETF", + "BIOCSETFNR", + "BIOCSETIF", + "BIOCSETWF", + "BIOCSETZBUF", + "BIOCSFEEDBACK", + "BIOCSFILDROP", + "BIOCSHDRCMPLT", + "BIOCSRSIG", + "BIOCSRTIMEOUT", + "BIOCSSEESENT", + "BIOCSTCPF", + "BIOCSTSTAMP", + "BIOCSUDPF", + "BIOCVERSION", + "BPF_A", + "BPF_ABS", + "BPF_ADD", + "BPF_ALIGNMENT", + "BPF_ALIGNMENT32", + "BPF_ALU", + "BPF_AND", + "BPF_B", + "BPF_BUFMODE_BUFFER", + "BPF_BUFMODE_ZBUF", + "BPF_DFLTBUFSIZE", + "BPF_DIRECTION_IN", + "BPF_DIRECTION_OUT", + "BPF_DIV", + "BPF_H", + "BPF_IMM", + "BPF_IND", + "BPF_JA", + "BPF_JEQ", + "BPF_JGE", + "BPF_JGT", + "BPF_JMP", + "BPF_JSET", + "BPF_K", + "BPF_LD", + "BPF_LDX", + "BPF_LEN", + "BPF_LSH", + "BPF_MAJOR_VERSION", + "BPF_MAXBUFSIZE", + "BPF_MAXINSNS", + "BPF_MEM", + "BPF_MEMWORDS", + "BPF_MINBUFSIZE", + "BPF_MINOR_VERSION", + "BPF_MISC", + "BPF_MSH", + "BPF_MUL", + "BPF_NEG", + "BPF_OR", + "BPF_RELEASE", + "BPF_RET", + "BPF_RSH", + "BPF_ST", + "BPF_STX", + "BPF_SUB", + "BPF_TAX", + "BPF_TXA", + "BPF_T_BINTIME", + "BPF_T_BINTIME_FAST", + "BPF_T_BINTIME_MONOTONIC", + "BPF_T_BINTIME_MONOTONIC_FAST", + "BPF_T_FAST", + "BPF_T_FLAG_MASK", + "BPF_T_FORMAT_MASK", + "BPF_T_MICROTIME", + "BPF_T_MICROTIME_FAST", + "BPF_T_MICROTIME_MONOTONIC", + "BPF_T_MICROTIME_MONOTONIC_FAST", + "BPF_T_MONOTONIC", + "BPF_T_MONOTONIC_FAST", + "BPF_T_NANOTIME", + "BPF_T_NANOTIME_FAST", + "BPF_T_NANOTIME_MONOTONIC", + "BPF_T_NANOTIME_MONOTONIC_FAST", + "BPF_T_NONE", + "BPF_T_NORMAL", + "BPF_W", + "BPF_X", + "BRKINT", + "Bind", + "BindToDevice", + "BpfBuflen", + "BpfDatalink", + "BpfHdr", + "BpfHeadercmpl", + "BpfInsn", + "BpfInterface", + "BpfJump", + "BpfProgram", + "BpfStat", + "BpfStats", + "BpfStmt", + "BpfTimeout", + "BpfTimeval", + "BpfVersion", + "BpfZbuf", + "BpfZbufHeader", + "ByHandleFileInformation", + "BytePtrFromString", + "ByteSliceFromString", + "CCR0_FLUSH", + "CERT_CHAIN_POLICY_AUTHENTICODE", + "CERT_CHAIN_POLICY_AUTHENTICODE_TS", + "CERT_CHAIN_POLICY_BASE", + "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", + "CERT_CHAIN_POLICY_EV", + "CERT_CHAIN_POLICY_MICROSOFT_ROOT", + "CERT_CHAIN_POLICY_NT_AUTH", + "CERT_CHAIN_POLICY_SSL", + "CERT_E_CN_NO_MATCH", + "CERT_E_EXPIRED", + "CERT_E_PURPOSE", + "CERT_E_ROLE", + "CERT_E_UNTRUSTEDROOT", + "CERT_STORE_ADD_ALWAYS", + "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", + "CERT_STORE_PROV_MEMORY", + "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", + "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", + "CERT_TRUST_INVALID_BASIC_CONSTRAINTS", + "CERT_TRUST_INVALID_EXTENSION", + "CERT_TRUST_INVALID_NAME_CONSTRAINTS", + "CERT_TRUST_INVALID_POLICY_CONSTRAINTS", + "CERT_TRUST_IS_CYCLIC", + "CERT_TRUST_IS_EXPLICIT_DISTRUST", + "CERT_TRUST_IS_NOT_SIGNATURE_VALID", + "CERT_TRUST_IS_NOT_TIME_VALID", + "CERT_TRUST_IS_NOT_VALID_FOR_USAGE", + "CERT_TRUST_IS_OFFLINE_REVOCATION", + "CERT_TRUST_IS_REVOKED", + "CERT_TRUST_IS_UNTRUSTED_ROOT", + "CERT_TRUST_NO_ERROR", + "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", + "CERT_TRUST_REVOCATION_STATUS_UNKNOWN", + "CFLUSH", + "CLOCAL", + "CLONE_CHILD_CLEARTID", + "CLONE_CHILD_SETTID", + "CLONE_CLEAR_SIGHAND", + "CLONE_CSIGNAL", + "CLONE_DETACHED", + "CLONE_FILES", + "CLONE_FS", + "CLONE_INTO_CGROUP", + "CLONE_IO", + "CLONE_NEWCGROUP", + "CLONE_NEWIPC", + "CLONE_NEWNET", + "CLONE_NEWNS", + "CLONE_NEWPID", + "CLONE_NEWTIME", + "CLONE_NEWUSER", + "CLONE_NEWUTS", + "CLONE_PARENT", + "CLONE_PARENT_SETTID", + "CLONE_PID", + "CLONE_PIDFD", + "CLONE_PTRACE", + "CLONE_SETTLS", + "CLONE_SIGHAND", + "CLONE_SYSVSEM", + "CLONE_THREAD", + "CLONE_UNTRACED", + "CLONE_VFORK", + "CLONE_VM", + "CPUID_CFLUSH", + "CREAD", + "CREATE_ALWAYS", + "CREATE_NEW", + "CREATE_NEW_PROCESS_GROUP", + "CREATE_UNICODE_ENVIRONMENT", + "CRYPT_DEFAULT_CONTAINER_OPTIONAL", + "CRYPT_DELETEKEYSET", + "CRYPT_MACHINE_KEYSET", + "CRYPT_NEWKEYSET", + "CRYPT_SILENT", + "CRYPT_VERIFYCONTEXT", + "CS5", + "CS6", + "CS7", + "CS8", + "CSIZE", + "CSTART", + "CSTATUS", + "CSTOP", + "CSTOPB", + "CSUSP", + "CTL_MAXNAME", + "CTL_NET", + "CTL_QUERY", + "CTRL_BREAK_EVENT", + "CTRL_CLOSE_EVENT", + "CTRL_C_EVENT", + "CTRL_LOGOFF_EVENT", + "CTRL_SHUTDOWN_EVENT", + "CancelIo", + "CancelIoEx", + "CertAddCertificateContextToStore", + "CertChainContext", + "CertChainElement", + "CertChainPara", + "CertChainPolicyPara", + "CertChainPolicyStatus", + "CertCloseStore", + "CertContext", + "CertCreateCertificateContext", + "CertEnhKeyUsage", + "CertEnumCertificatesInStore", + "CertFreeCertificateChain", + "CertFreeCertificateContext", + "CertGetCertificateChain", + "CertInfo", + "CertOpenStore", + "CertOpenSystemStore", + "CertRevocationCrlInfo", + "CertRevocationInfo", + "CertSimpleChain", + "CertTrustListInfo", + "CertTrustStatus", + "CertUsageMatch", + "CertVerifyCertificateChainPolicy", + "Chdir", + "CheckBpfVersion", + "Chflags", + "Chmod", + "Chown", + "Chroot", + "Clearenv", + "Close", + "CloseHandle", + "CloseOnExec", + "Closesocket", + "CmsgLen", + "CmsgSpace", + "Cmsghdr", + "CommandLineToArgv", + "ComputerName", + "Conn", + "Connect", + "ConnectEx", + "ConvertSidToStringSid", + "ConvertStringSidToSid", + "CopySid", + "Creat", + "CreateDirectory", + "CreateFile", + "CreateFileMapping", + "CreateHardLink", + "CreateIoCompletionPort", + "CreatePipe", + "CreateProcess", + "CreateProcessAsUser", + "CreateSymbolicLink", + "CreateToolhelp32Snapshot", + "Credential", + "CryptAcquireContext", + "CryptGenRandom", + "CryptReleaseContext", + "DIOCBSFLUSH", + "DIOCOSFPFLUSH", + "DLL", + "DLLError", + "DLT_A429", + "DLT_A653_ICM", + "DLT_AIRONET_HEADER", + "DLT_AOS", + "DLT_APPLE_IP_OVER_IEEE1394", + "DLT_ARCNET", + "DLT_ARCNET_LINUX", + "DLT_ATM_CLIP", + "DLT_ATM_RFC1483", + "DLT_AURORA", + "DLT_AX25", + "DLT_AX25_KISS", + "DLT_BACNET_MS_TP", + "DLT_BLUETOOTH_HCI_H4", + "DLT_BLUETOOTH_HCI_H4_WITH_PHDR", + "DLT_CAN20B", + "DLT_CAN_SOCKETCAN", + "DLT_CHAOS", + "DLT_CHDLC", + "DLT_CISCO_IOS", + "DLT_C_HDLC", + "DLT_C_HDLC_WITH_DIR", + "DLT_DBUS", + "DLT_DECT", + "DLT_DOCSIS", + "DLT_DVB_CI", + "DLT_ECONET", + "DLT_EN10MB", + "DLT_EN3MB", + "DLT_ENC", + "DLT_ERF", + "DLT_ERF_ETH", + "DLT_ERF_POS", + "DLT_FC_2", + "DLT_FC_2_WITH_FRAME_DELIMS", + "DLT_FDDI", + "DLT_FLEXRAY", + "DLT_FRELAY", + "DLT_FRELAY_WITH_DIR", + "DLT_GCOM_SERIAL", + "DLT_GCOM_T1E1", + "DLT_GPF_F", + "DLT_GPF_T", + "DLT_GPRS_LLC", + "DLT_GSMTAP_ABIS", + "DLT_GSMTAP_UM", + "DLT_HDLC", + "DLT_HHDLC", + "DLT_HIPPI", + "DLT_IBM_SN", + "DLT_IBM_SP", + "DLT_IEEE802", + "DLT_IEEE802_11", + "DLT_IEEE802_11_RADIO", + "DLT_IEEE802_11_RADIO_AVS", + "DLT_IEEE802_15_4", + "DLT_IEEE802_15_4_LINUX", + "DLT_IEEE802_15_4_NOFCS", + "DLT_IEEE802_15_4_NONASK_PHY", + "DLT_IEEE802_16_MAC_CPS", + "DLT_IEEE802_16_MAC_CPS_RADIO", + "DLT_IPFILTER", + "DLT_IPMB", + "DLT_IPMB_LINUX", + "DLT_IPNET", + "DLT_IPOIB", + "DLT_IPV4", + "DLT_IPV6", + "DLT_IP_OVER_FC", + "DLT_JUNIPER_ATM1", + "DLT_JUNIPER_ATM2", + "DLT_JUNIPER_ATM_CEMIC", + "DLT_JUNIPER_CHDLC", + "DLT_JUNIPER_ES", + "DLT_JUNIPER_ETHER", + "DLT_JUNIPER_FIBRECHANNEL", + "DLT_JUNIPER_FRELAY", + "DLT_JUNIPER_GGSN", + "DLT_JUNIPER_ISM", + "DLT_JUNIPER_MFR", + "DLT_JUNIPER_MLFR", + "DLT_JUNIPER_MLPPP", + "DLT_JUNIPER_MONITOR", + "DLT_JUNIPER_PIC_PEER", + "DLT_JUNIPER_PPP", + "DLT_JUNIPER_PPPOE", + "DLT_JUNIPER_PPPOE_ATM", + "DLT_JUNIPER_SERVICES", + "DLT_JUNIPER_SRX_E2E", + "DLT_JUNIPER_ST", + "DLT_JUNIPER_VP", + "DLT_JUNIPER_VS", + "DLT_LAPB_WITH_DIR", + "DLT_LAPD", + "DLT_LIN", + "DLT_LINUX_EVDEV", + "DLT_LINUX_IRDA", + "DLT_LINUX_LAPD", + "DLT_LINUX_PPP_WITHDIRECTION", + "DLT_LINUX_SLL", + "DLT_LOOP", + "DLT_LTALK", + "DLT_MATCHING_MAX", + "DLT_MATCHING_MIN", + "DLT_MFR", + "DLT_MOST", + "DLT_MPEG_2_TS", + "DLT_MPLS", + "DLT_MTP2", + "DLT_MTP2_WITH_PHDR", + "DLT_MTP3", + "DLT_MUX27010", + "DLT_NETANALYZER", + "DLT_NETANALYZER_TRANSPARENT", + "DLT_NFC_LLCP", + "DLT_NFLOG", + "DLT_NG40", + "DLT_NULL", + "DLT_PCI_EXP", + "DLT_PFLOG", + "DLT_PFSYNC", + "DLT_PPI", + "DLT_PPP", + "DLT_PPP_BSDOS", + "DLT_PPP_ETHER", + "DLT_PPP_PPPD", + "DLT_PPP_SERIAL", + "DLT_PPP_WITH_DIR", + "DLT_PPP_WITH_DIRECTION", + "DLT_PRISM_HEADER", + "DLT_PRONET", + "DLT_RAIF1", + "DLT_RAW", + "DLT_RAWAF_MASK", + "DLT_RIO", + "DLT_SCCP", + "DLT_SITA", + "DLT_SLIP", + "DLT_SLIP_BSDOS", + "DLT_STANAG_5066_D_PDU", + "DLT_SUNATM", + "DLT_SYMANTEC_FIREWALL", + "DLT_TZSP", + "DLT_USB", + "DLT_USB_LINUX", + "DLT_USB_LINUX_MMAPPED", + "DLT_USER0", + "DLT_USER1", + "DLT_USER10", + "DLT_USER11", + "DLT_USER12", + "DLT_USER13", + "DLT_USER14", + "DLT_USER15", + "DLT_USER2", + "DLT_USER3", + "DLT_USER4", + "DLT_USER5", + "DLT_USER6", + "DLT_USER7", + "DLT_USER8", + "DLT_USER9", + "DLT_WIHART", + "DLT_X2E_SERIAL", + "DLT_X2E_XORAYA", + "DNSMXData", + "DNSPTRData", + "DNSRecord", + "DNSSRVData", + "DNSTXTData", + "DNS_INFO_NO_RECORDS", + "DNS_TYPE_A", + "DNS_TYPE_A6", + "DNS_TYPE_AAAA", + "DNS_TYPE_ADDRS", + "DNS_TYPE_AFSDB", + "DNS_TYPE_ALL", + "DNS_TYPE_ANY", + "DNS_TYPE_ATMA", + "DNS_TYPE_AXFR", + "DNS_TYPE_CERT", + "DNS_TYPE_CNAME", + "DNS_TYPE_DHCID", + "DNS_TYPE_DNAME", + "DNS_TYPE_DNSKEY", + "DNS_TYPE_DS", + "DNS_TYPE_EID", + "DNS_TYPE_GID", + "DNS_TYPE_GPOS", + "DNS_TYPE_HINFO", + "DNS_TYPE_ISDN", + "DNS_TYPE_IXFR", + "DNS_TYPE_KEY", + "DNS_TYPE_KX", + "DNS_TYPE_LOC", + "DNS_TYPE_MAILA", + "DNS_TYPE_MAILB", + "DNS_TYPE_MB", + "DNS_TYPE_MD", + "DNS_TYPE_MF", + "DNS_TYPE_MG", + "DNS_TYPE_MINFO", + "DNS_TYPE_MR", + "DNS_TYPE_MX", + "DNS_TYPE_NAPTR", + "DNS_TYPE_NBSTAT", + "DNS_TYPE_NIMLOC", + "DNS_TYPE_NS", + "DNS_TYPE_NSAP", + "DNS_TYPE_NSAPPTR", + "DNS_TYPE_NSEC", + "DNS_TYPE_NULL", + "DNS_TYPE_NXT", + "DNS_TYPE_OPT", + "DNS_TYPE_PTR", + "DNS_TYPE_PX", + "DNS_TYPE_RP", + "DNS_TYPE_RRSIG", + "DNS_TYPE_RT", + "DNS_TYPE_SIG", + "DNS_TYPE_SINK", + "DNS_TYPE_SOA", + "DNS_TYPE_SRV", + "DNS_TYPE_TEXT", + "DNS_TYPE_TKEY", + "DNS_TYPE_TSIG", + "DNS_TYPE_UID", + "DNS_TYPE_UINFO", + "DNS_TYPE_UNSPEC", + "DNS_TYPE_WINS", + "DNS_TYPE_WINSR", + "DNS_TYPE_WKS", + "DNS_TYPE_X25", + "DT_BLK", + "DT_CHR", + "DT_DIR", + "DT_FIFO", + "DT_LNK", + "DT_REG", + "DT_SOCK", + "DT_UNKNOWN", + "DT_WHT", + "DUPLICATE_CLOSE_SOURCE", + "DUPLICATE_SAME_ACCESS", + "DeleteFile", + "DetachLsf", + "DeviceIoControl", + "Dirent", + "DnsNameCompare", + "DnsQuery", + "DnsRecordListFree", + "DnsSectionAdditional", + "DnsSectionAnswer", + "DnsSectionAuthority", + "DnsSectionQuestion", + "Dup", + "Dup2", + "Dup3", + "DuplicateHandle", + "E2BIG", + "EACCES", + "EADDRINUSE", + "EADDRNOTAVAIL", + "EADV", + "EAFNOSUPPORT", + "EAGAIN", + "EALREADY", + "EAUTH", + "EBADARCH", + "EBADE", + "EBADEXEC", + "EBADF", + "EBADFD", + "EBADMACHO", + "EBADMSG", + "EBADR", + "EBADRPC", + "EBADRQC", + "EBADSLT", + "EBFONT", + "EBUSY", + "ECANCELED", + "ECAPMODE", + "ECHILD", + "ECHO", + "ECHOCTL", + "ECHOE", + "ECHOK", + "ECHOKE", + "ECHONL", + "ECHOPRT", + "ECHRNG", + "ECOMM", + "ECONNABORTED", + "ECONNREFUSED", + "ECONNRESET", + "EDEADLK", + "EDEADLOCK", + "EDESTADDRREQ", + "EDEVERR", + "EDOM", + "EDOOFUS", + "EDOTDOT", + "EDQUOT", + "EEXIST", + "EFAULT", + "EFBIG", + "EFER_LMA", + "EFER_LME", + "EFER_NXE", + "EFER_SCE", + "EFTYPE", + "EHOSTDOWN", + "EHOSTUNREACH", + "EHWPOISON", + "EIDRM", + "EILSEQ", + "EINPROGRESS", + "EINTR", + "EINVAL", + "EIO", + "EIPSEC", + "EISCONN", + "EISDIR", + "EISNAM", + "EKEYEXPIRED", + "EKEYREJECTED", + "EKEYREVOKED", + "EL2HLT", + "EL2NSYNC", + "EL3HLT", + "EL3RST", + "ELAST", + "ELF_NGREG", + "ELF_PRARGSZ", + "ELIBACC", + "ELIBBAD", + "ELIBEXEC", + "ELIBMAX", + "ELIBSCN", + "ELNRNG", + "ELOOP", + "EMEDIUMTYPE", + "EMFILE", + "EMLINK", + "EMSGSIZE", + "EMT_TAGOVF", + "EMULTIHOP", + "EMUL_ENABLED", + "EMUL_LINUX", + "EMUL_LINUX32", + "EMUL_MAXID", + "EMUL_NATIVE", + "ENAMETOOLONG", + "ENAVAIL", + "ENDRUNDISC", + "ENEEDAUTH", + "ENETDOWN", + "ENETRESET", + "ENETUNREACH", + "ENFILE", + "ENOANO", + "ENOATTR", + "ENOBUFS", + "ENOCSI", + "ENODATA", + "ENODEV", + "ENOENT", + "ENOEXEC", + "ENOKEY", + "ENOLCK", + "ENOLINK", + "ENOMEDIUM", + "ENOMEM", + "ENOMSG", + "ENONET", + "ENOPKG", + "ENOPOLICY", + "ENOPROTOOPT", + "ENOSPC", + "ENOSR", + "ENOSTR", + "ENOSYS", + "ENOTBLK", + "ENOTCAPABLE", + "ENOTCONN", + "ENOTDIR", + "ENOTEMPTY", + "ENOTNAM", + "ENOTRECOVERABLE", + "ENOTSOCK", + "ENOTSUP", + "ENOTTY", + "ENOTUNIQ", + "ENXIO", + "EN_SW_CTL_INF", + "EN_SW_CTL_PREC", + "EN_SW_CTL_ROUND", + "EN_SW_DATACHAIN", + "EN_SW_DENORM", + "EN_SW_INVOP", + "EN_SW_OVERFLOW", + "EN_SW_PRECLOSS", + "EN_SW_UNDERFLOW", + "EN_SW_ZERODIV", + "EOPNOTSUPP", + "EOVERFLOW", + "EOWNERDEAD", + "EPERM", + "EPFNOSUPPORT", + "EPIPE", + "EPOLLERR", + "EPOLLET", + "EPOLLHUP", + "EPOLLIN", + "EPOLLMSG", + "EPOLLONESHOT", + "EPOLLOUT", + "EPOLLPRI", + "EPOLLRDBAND", + "EPOLLRDHUP", + "EPOLLRDNORM", + "EPOLLWRBAND", + "EPOLLWRNORM", + "EPOLL_CLOEXEC", + "EPOLL_CTL_ADD", + "EPOLL_CTL_DEL", + "EPOLL_CTL_MOD", + "EPOLL_NONBLOCK", + "EPROCLIM", + "EPROCUNAVAIL", + "EPROGMISMATCH", + "EPROGUNAVAIL", + "EPROTO", + "EPROTONOSUPPORT", + "EPROTOTYPE", + "EPWROFF", + "EQFULL", + "ERANGE", + "EREMCHG", + "EREMOTE", + "EREMOTEIO", + "ERESTART", + "ERFKILL", + "EROFS", + "ERPCMISMATCH", + "ERROR_ACCESS_DENIED", + "ERROR_ALREADY_EXISTS", + "ERROR_BROKEN_PIPE", + "ERROR_BUFFER_OVERFLOW", + "ERROR_DIR_NOT_EMPTY", + "ERROR_ENVVAR_NOT_FOUND", + "ERROR_FILE_EXISTS", + "ERROR_FILE_NOT_FOUND", + "ERROR_HANDLE_EOF", + "ERROR_INSUFFICIENT_BUFFER", + "ERROR_IO_PENDING", + "ERROR_MOD_NOT_FOUND", + "ERROR_MORE_DATA", + "ERROR_NETNAME_DELETED", + "ERROR_NOT_FOUND", + "ERROR_NO_MORE_FILES", + "ERROR_OPERATION_ABORTED", + "ERROR_PATH_NOT_FOUND", + "ERROR_PRIVILEGE_NOT_HELD", + "ERROR_PROC_NOT_FOUND", + "ESHLIBVERS", + "ESHUTDOWN", + "ESOCKTNOSUPPORT", + "ESPIPE", + "ESRCH", + "ESRMNT", + "ESTALE", + "ESTRPIPE", + "ETHERCAP_JUMBO_MTU", + "ETHERCAP_VLAN_HWTAGGING", + "ETHERCAP_VLAN_MTU", + "ETHERMIN", + "ETHERMTU", + "ETHERMTU_JUMBO", + "ETHERTYPE_8023", + "ETHERTYPE_AARP", + "ETHERTYPE_ACCTON", + "ETHERTYPE_AEONIC", + "ETHERTYPE_ALPHA", + "ETHERTYPE_AMBER", + "ETHERTYPE_AMOEBA", + "ETHERTYPE_AOE", + "ETHERTYPE_APOLLO", + "ETHERTYPE_APOLLODOMAIN", + "ETHERTYPE_APPLETALK", + "ETHERTYPE_APPLITEK", + "ETHERTYPE_ARGONAUT", + "ETHERTYPE_ARP", + "ETHERTYPE_AT", + "ETHERTYPE_ATALK", + "ETHERTYPE_ATOMIC", + "ETHERTYPE_ATT", + "ETHERTYPE_ATTSTANFORD", + "ETHERTYPE_AUTOPHON", + "ETHERTYPE_AXIS", + "ETHERTYPE_BCLOOP", + "ETHERTYPE_BOFL", + "ETHERTYPE_CABLETRON", + "ETHERTYPE_CHAOS", + "ETHERTYPE_COMDESIGN", + "ETHERTYPE_COMPUGRAPHIC", + "ETHERTYPE_COUNTERPOINT", + "ETHERTYPE_CRONUS", + "ETHERTYPE_CRONUSVLN", + "ETHERTYPE_DCA", + "ETHERTYPE_DDE", + "ETHERTYPE_DEBNI", + "ETHERTYPE_DECAM", + "ETHERTYPE_DECCUST", + "ETHERTYPE_DECDIAG", + "ETHERTYPE_DECDNS", + "ETHERTYPE_DECDTS", + "ETHERTYPE_DECEXPER", + "ETHERTYPE_DECLAST", + "ETHERTYPE_DECLTM", + "ETHERTYPE_DECMUMPS", + "ETHERTYPE_DECNETBIOS", + "ETHERTYPE_DELTACON", + "ETHERTYPE_DIDDLE", + "ETHERTYPE_DLOG1", + "ETHERTYPE_DLOG2", + "ETHERTYPE_DN", + "ETHERTYPE_DOGFIGHT", + "ETHERTYPE_DSMD", + "ETHERTYPE_ECMA", + "ETHERTYPE_ENCRYPT", + "ETHERTYPE_ES", + "ETHERTYPE_EXCELAN", + "ETHERTYPE_EXPERDATA", + "ETHERTYPE_FLIP", + "ETHERTYPE_FLOWCONTROL", + "ETHERTYPE_FRARP", + "ETHERTYPE_GENDYN", + "ETHERTYPE_HAYES", + "ETHERTYPE_HIPPI_FP", + "ETHERTYPE_HITACHI", + "ETHERTYPE_HP", + "ETHERTYPE_IEEEPUP", + "ETHERTYPE_IEEEPUPAT", + "ETHERTYPE_IMLBL", + "ETHERTYPE_IMLBLDIAG", + "ETHERTYPE_IP", + "ETHERTYPE_IPAS", + "ETHERTYPE_IPV6", + "ETHERTYPE_IPX", + "ETHERTYPE_IPXNEW", + "ETHERTYPE_KALPANA", + "ETHERTYPE_LANBRIDGE", + "ETHERTYPE_LANPROBE", + "ETHERTYPE_LAT", + "ETHERTYPE_LBACK", + "ETHERTYPE_LITTLE", + "ETHERTYPE_LLDP", + "ETHERTYPE_LOGICRAFT", + "ETHERTYPE_LOOPBACK", + "ETHERTYPE_MATRA", + "ETHERTYPE_MAX", + "ETHERTYPE_MERIT", + "ETHERTYPE_MICP", + "ETHERTYPE_MOPDL", + "ETHERTYPE_MOPRC", + "ETHERTYPE_MOTOROLA", + "ETHERTYPE_MPLS", + "ETHERTYPE_MPLS_MCAST", + "ETHERTYPE_MUMPS", + "ETHERTYPE_NBPCC", + "ETHERTYPE_NBPCLAIM", + "ETHERTYPE_NBPCLREQ", + "ETHERTYPE_NBPCLRSP", + "ETHERTYPE_NBPCREQ", + "ETHERTYPE_NBPCRSP", + "ETHERTYPE_NBPDG", + "ETHERTYPE_NBPDGB", + "ETHERTYPE_NBPDLTE", + "ETHERTYPE_NBPRAR", + "ETHERTYPE_NBPRAS", + "ETHERTYPE_NBPRST", + "ETHERTYPE_NBPSCD", + "ETHERTYPE_NBPVCD", + "ETHERTYPE_NBS", + "ETHERTYPE_NCD", + "ETHERTYPE_NESTAR", + "ETHERTYPE_NETBEUI", + "ETHERTYPE_NOVELL", + "ETHERTYPE_NS", + "ETHERTYPE_NSAT", + "ETHERTYPE_NSCOMPAT", + "ETHERTYPE_NTRAILER", + "ETHERTYPE_OS9", + "ETHERTYPE_OS9NET", + "ETHERTYPE_PACER", + "ETHERTYPE_PAE", + "ETHERTYPE_PCS", + "ETHERTYPE_PLANNING", + "ETHERTYPE_PPP", + "ETHERTYPE_PPPOE", + "ETHERTYPE_PPPOEDISC", + "ETHERTYPE_PRIMENTS", + "ETHERTYPE_PUP", + "ETHERTYPE_PUPAT", + "ETHERTYPE_QINQ", + "ETHERTYPE_RACAL", + "ETHERTYPE_RATIONAL", + "ETHERTYPE_RAWFR", + "ETHERTYPE_RCL", + "ETHERTYPE_RDP", + "ETHERTYPE_RETIX", + "ETHERTYPE_REVARP", + "ETHERTYPE_SCA", + "ETHERTYPE_SECTRA", + "ETHERTYPE_SECUREDATA", + "ETHERTYPE_SGITW", + "ETHERTYPE_SG_BOUNCE", + "ETHERTYPE_SG_DIAG", + "ETHERTYPE_SG_NETGAMES", + "ETHERTYPE_SG_RESV", + "ETHERTYPE_SIMNET", + "ETHERTYPE_SLOW", + "ETHERTYPE_SLOWPROTOCOLS", + "ETHERTYPE_SNA", + "ETHERTYPE_SNMP", + "ETHERTYPE_SONIX", + "ETHERTYPE_SPIDER", + "ETHERTYPE_SPRITE", + "ETHERTYPE_STP", + "ETHERTYPE_TALARIS", + "ETHERTYPE_TALARISMC", + "ETHERTYPE_TCPCOMP", + "ETHERTYPE_TCPSM", + "ETHERTYPE_TEC", + "ETHERTYPE_TIGAN", + "ETHERTYPE_TRAIL", + "ETHERTYPE_TRANSETHER", + "ETHERTYPE_TYMSHARE", + "ETHERTYPE_UBBST", + "ETHERTYPE_UBDEBUG", + "ETHERTYPE_UBDIAGLOOP", + "ETHERTYPE_UBDL", + "ETHERTYPE_UBNIU", + "ETHERTYPE_UBNMC", + "ETHERTYPE_VALID", + "ETHERTYPE_VARIAN", + "ETHERTYPE_VAXELN", + "ETHERTYPE_VEECO", + "ETHERTYPE_VEXP", + "ETHERTYPE_VGLAB", + "ETHERTYPE_VINES", + "ETHERTYPE_VINESECHO", + "ETHERTYPE_VINESLOOP", + "ETHERTYPE_VITAL", + "ETHERTYPE_VLAN", + "ETHERTYPE_VLTLMAN", + "ETHERTYPE_VPROD", + "ETHERTYPE_VURESERVED", + "ETHERTYPE_WATERLOO", + "ETHERTYPE_WELLFLEET", + "ETHERTYPE_X25", + "ETHERTYPE_X75", + "ETHERTYPE_XNSSM", + "ETHERTYPE_XTP", + "ETHER_ADDR_LEN", + "ETHER_ALIGN", + "ETHER_CRC_LEN", + "ETHER_CRC_POLY_BE", + "ETHER_CRC_POLY_LE", + "ETHER_HDR_LEN", + "ETHER_MAX_DIX_LEN", + "ETHER_MAX_LEN", + "ETHER_MAX_LEN_JUMBO", + "ETHER_MIN_LEN", + "ETHER_PPPOE_ENCAP_LEN", + "ETHER_TYPE_LEN", + "ETHER_VLAN_ENCAP_LEN", + "ETH_P_1588", + "ETH_P_8021Q", + "ETH_P_802_2", + "ETH_P_802_3", + "ETH_P_AARP", + "ETH_P_ALL", + "ETH_P_AOE", + "ETH_P_ARCNET", + "ETH_P_ARP", + "ETH_P_ATALK", + "ETH_P_ATMFATE", + "ETH_P_ATMMPOA", + "ETH_P_AX25", + "ETH_P_BPQ", + "ETH_P_CAIF", + "ETH_P_CAN", + "ETH_P_CONTROL", + "ETH_P_CUST", + "ETH_P_DDCMP", + "ETH_P_DEC", + "ETH_P_DIAG", + "ETH_P_DNA_DL", + "ETH_P_DNA_RC", + "ETH_P_DNA_RT", + "ETH_P_DSA", + "ETH_P_ECONET", + "ETH_P_EDSA", + "ETH_P_FCOE", + "ETH_P_FIP", + "ETH_P_HDLC", + "ETH_P_IEEE802154", + "ETH_P_IEEEPUP", + "ETH_P_IEEEPUPAT", + "ETH_P_IP", + "ETH_P_IPV6", + "ETH_P_IPX", + "ETH_P_IRDA", + "ETH_P_LAT", + "ETH_P_LINK_CTL", + "ETH_P_LOCALTALK", + "ETH_P_LOOP", + "ETH_P_MOBITEX", + "ETH_P_MPLS_MC", + "ETH_P_MPLS_UC", + "ETH_P_PAE", + "ETH_P_PAUSE", + "ETH_P_PHONET", + "ETH_P_PPPTALK", + "ETH_P_PPP_DISC", + "ETH_P_PPP_MP", + "ETH_P_PPP_SES", + "ETH_P_PUP", + "ETH_P_PUPAT", + "ETH_P_RARP", + "ETH_P_SCA", + "ETH_P_SLOW", + "ETH_P_SNAP", + "ETH_P_TEB", + "ETH_P_TIPC", + "ETH_P_TRAILER", + "ETH_P_TR_802_2", + "ETH_P_WAN_PPP", + "ETH_P_WCCP", + "ETH_P_X25", + "ETIME", + "ETIMEDOUT", + "ETOOMANYREFS", + "ETXTBSY", + "EUCLEAN", + "EUNATCH", + "EUSERS", + "EVFILT_AIO", + "EVFILT_FS", + "EVFILT_LIO", + "EVFILT_MACHPORT", + "EVFILT_PROC", + "EVFILT_READ", + "EVFILT_SIGNAL", + "EVFILT_SYSCOUNT", + "EVFILT_THREADMARKER", + "EVFILT_TIMER", + "EVFILT_USER", + "EVFILT_VM", + "EVFILT_VNODE", + "EVFILT_WRITE", + "EV_ADD", + "EV_CLEAR", + "EV_DELETE", + "EV_DISABLE", + "EV_DISPATCH", + "EV_DROP", + "EV_ENABLE", + "EV_EOF", + "EV_ERROR", + "EV_FLAG0", + "EV_FLAG1", + "EV_ONESHOT", + "EV_OOBAND", + "EV_POLL", + "EV_RECEIPT", + "EV_SYSFLAGS", + "EWINDOWS", + "EWOULDBLOCK", + "EXDEV", + "EXFULL", + "EXTA", + "EXTB", + "EXTPROC", + "Environ", + "EpollCreate", + "EpollCreate1", + "EpollCtl", + "EpollEvent", + "EpollWait", + "Errno", + "EscapeArg", + "Exchangedata", + "Exec", + "Exit", + "ExitProcess", + "FD_CLOEXEC", + "FD_SETSIZE", + "FILE_ACTION_ADDED", + "FILE_ACTION_MODIFIED", + "FILE_ACTION_REMOVED", + "FILE_ACTION_RENAMED_NEW_NAME", + "FILE_ACTION_RENAMED_OLD_NAME", + "FILE_APPEND_DATA", + "FILE_ATTRIBUTE_ARCHIVE", + "FILE_ATTRIBUTE_DIRECTORY", + "FILE_ATTRIBUTE_HIDDEN", + "FILE_ATTRIBUTE_NORMAL", + "FILE_ATTRIBUTE_READONLY", + "FILE_ATTRIBUTE_REPARSE_POINT", + "FILE_ATTRIBUTE_SYSTEM", + "FILE_BEGIN", + "FILE_CURRENT", + "FILE_END", + "FILE_FLAG_BACKUP_SEMANTICS", + "FILE_FLAG_OPEN_REPARSE_POINT", + "FILE_FLAG_OVERLAPPED", + "FILE_LIST_DIRECTORY", + "FILE_MAP_COPY", + "FILE_MAP_EXECUTE", + "FILE_MAP_READ", + "FILE_MAP_WRITE", + "FILE_NOTIFY_CHANGE_ATTRIBUTES", + "FILE_NOTIFY_CHANGE_CREATION", + "FILE_NOTIFY_CHANGE_DIR_NAME", + "FILE_NOTIFY_CHANGE_FILE_NAME", + "FILE_NOTIFY_CHANGE_LAST_ACCESS", + "FILE_NOTIFY_CHANGE_LAST_WRITE", + "FILE_NOTIFY_CHANGE_SIZE", + "FILE_SHARE_DELETE", + "FILE_SHARE_READ", + "FILE_SHARE_WRITE", + "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", + "FILE_SKIP_SET_EVENT_ON_HANDLE", + "FILE_TYPE_CHAR", + "FILE_TYPE_DISK", + "FILE_TYPE_PIPE", + "FILE_TYPE_REMOTE", + "FILE_TYPE_UNKNOWN", + "FILE_WRITE_ATTRIBUTES", + "FLUSHO", + "FORMAT_MESSAGE_ALLOCATE_BUFFER", + "FORMAT_MESSAGE_ARGUMENT_ARRAY", + "FORMAT_MESSAGE_FROM_HMODULE", + "FORMAT_MESSAGE_FROM_STRING", + "FORMAT_MESSAGE_FROM_SYSTEM", + "FORMAT_MESSAGE_IGNORE_INSERTS", + "FORMAT_MESSAGE_MAX_WIDTH_MASK", + "FSCTL_GET_REPARSE_POINT", + "F_ADDFILESIGS", + "F_ADDSIGS", + "F_ALLOCATEALL", + "F_ALLOCATECONTIG", + "F_CANCEL", + "F_CHKCLEAN", + "F_CLOSEM", + "F_DUP2FD", + "F_DUP2FD_CLOEXEC", + "F_DUPFD", + "F_DUPFD_CLOEXEC", + "F_EXLCK", + "F_FINDSIGS", + "F_FLUSH_DATA", + "F_FREEZE_FS", + "F_FSCTL", + "F_FSDIRMASK", + "F_FSIN", + "F_FSINOUT", + "F_FSOUT", + "F_FSPRIV", + "F_FSVOID", + "F_FULLFSYNC", + "F_GETCODEDIR", + "F_GETFD", + "F_GETFL", + "F_GETLEASE", + "F_GETLK", + "F_GETLK64", + "F_GETLKPID", + "F_GETNOSIGPIPE", + "F_GETOWN", + "F_GETOWN_EX", + "F_GETPATH", + "F_GETPATH_MTMINFO", + "F_GETPIPE_SZ", + "F_GETPROTECTIONCLASS", + "F_GETPROTECTIONLEVEL", + "F_GETSIG", + "F_GLOBAL_NOCACHE", + "F_LOCK", + "F_LOG2PHYS", + "F_LOG2PHYS_EXT", + "F_MARKDEPENDENCY", + "F_MAXFD", + "F_NOCACHE", + "F_NODIRECT", + "F_NOTIFY", + "F_OGETLK", + "F_OK", + "F_OSETLK", + "F_OSETLKW", + "F_PARAM_MASK", + "F_PARAM_MAX", + "F_PATHPKG_CHECK", + "F_PEOFPOSMODE", + "F_PREALLOCATE", + "F_RDADVISE", + "F_RDAHEAD", + "F_RDLCK", + "F_READAHEAD", + "F_READBOOTSTRAP", + "F_SETBACKINGSTORE", + "F_SETFD", + "F_SETFL", + "F_SETLEASE", + "F_SETLK", + "F_SETLK64", + "F_SETLKW", + "F_SETLKW64", + "F_SETLKWTIMEOUT", + "F_SETLK_REMOTE", + "F_SETNOSIGPIPE", + "F_SETOWN", + "F_SETOWN_EX", + "F_SETPIPE_SZ", + "F_SETPROTECTIONCLASS", + "F_SETSIG", + "F_SETSIZE", + "F_SHLCK", + "F_SINGLE_WRITER", + "F_TEST", + "F_THAW_FS", + "F_TLOCK", + "F_TRANSCODEKEY", + "F_ULOCK", + "F_UNLCK", + "F_UNLCKSYS", + "F_VOLPOSMODE", + "F_WRITEBOOTSTRAP", + "F_WRLCK", + "Faccessat", + "Fallocate", + "Fbootstraptransfer_t", + "Fchdir", + "Fchflags", + "Fchmod", + "Fchmodat", + "Fchown", + "Fchownat", + "FcntlFlock", + "FdSet", + "Fdatasync", + "FileNotifyInformation", + "Filetime", + "FindClose", + "FindFirstFile", + "FindNextFile", + "Flock", + "Flock_t", + "FlushBpf", + "FlushFileBuffers", + "FlushViewOfFile", + "ForkExec", + "ForkLock", + "FormatMessage", + "Fpathconf", + "FreeAddrInfoW", + "FreeEnvironmentStrings", + "FreeLibrary", + "Fsid", + "Fstat", + "Fstatat", + "Fstatfs", + "Fstore_t", + "Fsync", + "Ftruncate", + "FullPath", + "Futimes", + "Futimesat", + "GENERIC_ALL", + "GENERIC_EXECUTE", + "GENERIC_READ", + "GENERIC_WRITE", + "GUID", + "GetAcceptExSockaddrs", + "GetAdaptersInfo", + "GetAddrInfoW", + "GetCommandLine", + "GetComputerName", + "GetConsoleMode", + "GetCurrentDirectory", + "GetCurrentProcess", + "GetEnvironmentStrings", + "GetEnvironmentVariable", + "GetExitCodeProcess", + "GetFileAttributes", + "GetFileAttributesEx", + "GetFileExInfoStandard", + "GetFileExMaxInfoLevel", + "GetFileInformationByHandle", + "GetFileType", + "GetFullPathName", + "GetHostByName", + "GetIfEntry", + "GetLastError", + "GetLengthSid", + "GetLongPathName", + "GetProcAddress", + "GetProcessTimes", + "GetProtoByName", + "GetQueuedCompletionStatus", + "GetServByName", + "GetShortPathName", + "GetStartupInfo", + "GetStdHandle", + "GetSystemTimeAsFileTime", + "GetTempPath", + "GetTimeZoneInformation", + "GetTokenInformation", + "GetUserNameEx", + "GetUserProfileDirectory", + "GetVersion", + "Getcwd", + "Getdents", + "Getdirentries", + "Getdtablesize", + "Getegid", + "Getenv", + "Geteuid", + "Getfsstat", + "Getgid", + "Getgroups", + "Getpagesize", + "Getpeername", + "Getpgid", + "Getpgrp", + "Getpid", + "Getppid", + "Getpriority", + "Getrlimit", + "Getrusage", + "Getsid", + "Getsockname", + "Getsockopt", + "GetsockoptByte", + "GetsockoptICMPv6Filter", + "GetsockoptIPMreq", + "GetsockoptIPMreqn", + "GetsockoptIPv6MTUInfo", + "GetsockoptIPv6Mreq", + "GetsockoptInet4Addr", + "GetsockoptInt", + "GetsockoptUcred", + "Gettid", + "Gettimeofday", + "Getuid", + "Getwd", + "Getxattr", + "HANDLE_FLAG_INHERIT", + "HKEY_CLASSES_ROOT", + "HKEY_CURRENT_CONFIG", + "HKEY_CURRENT_USER", + "HKEY_DYN_DATA", + "HKEY_LOCAL_MACHINE", + "HKEY_PERFORMANCE_DATA", + "HKEY_USERS", + "HUPCL", + "Handle", + "Hostent", + "ICANON", + "ICMP6_FILTER", + "ICMPV6_FILTER", + "ICMPv6Filter", + "ICRNL", + "IEXTEN", + "IFAN_ARRIVAL", + "IFAN_DEPARTURE", + "IFA_ADDRESS", + "IFA_ANYCAST", + "IFA_BROADCAST", + "IFA_CACHEINFO", + "IFA_F_DADFAILED", + "IFA_F_DEPRECATED", + "IFA_F_HOMEADDRESS", + "IFA_F_NODAD", + "IFA_F_OPTIMISTIC", + "IFA_F_PERMANENT", + "IFA_F_SECONDARY", + "IFA_F_TEMPORARY", + "IFA_F_TENTATIVE", + "IFA_LABEL", + "IFA_LOCAL", + "IFA_MAX", + "IFA_MULTICAST", + "IFA_ROUTE", + "IFA_UNSPEC", + "IFF_ALLMULTI", + "IFF_ALTPHYS", + "IFF_AUTOMEDIA", + "IFF_BROADCAST", + "IFF_CANTCHANGE", + "IFF_CANTCONFIG", + "IFF_DEBUG", + "IFF_DRV_OACTIVE", + "IFF_DRV_RUNNING", + "IFF_DYING", + "IFF_DYNAMIC", + "IFF_LINK0", + "IFF_LINK1", + "IFF_LINK2", + "IFF_LOOPBACK", + "IFF_MASTER", + "IFF_MONITOR", + "IFF_MULTICAST", + "IFF_NOARP", + "IFF_NOTRAILERS", + "IFF_NO_PI", + "IFF_OACTIVE", + "IFF_ONE_QUEUE", + "IFF_POINTOPOINT", + "IFF_POINTTOPOINT", + "IFF_PORTSEL", + "IFF_PPROMISC", + "IFF_PROMISC", + "IFF_RENAMING", + "IFF_RUNNING", + "IFF_SIMPLEX", + "IFF_SLAVE", + "IFF_SMART", + "IFF_STATICARP", + "IFF_TAP", + "IFF_TUN", + "IFF_TUN_EXCL", + "IFF_UP", + "IFF_VNET_HDR", + "IFLA_ADDRESS", + "IFLA_BROADCAST", + "IFLA_COST", + "IFLA_IFALIAS", + "IFLA_IFNAME", + "IFLA_LINK", + "IFLA_LINKINFO", + "IFLA_LINKMODE", + "IFLA_MAP", + "IFLA_MASTER", + "IFLA_MAX", + "IFLA_MTU", + "IFLA_NET_NS_PID", + "IFLA_OPERSTATE", + "IFLA_PRIORITY", + "IFLA_PROTINFO", + "IFLA_QDISC", + "IFLA_STATS", + "IFLA_TXQLEN", + "IFLA_UNSPEC", + "IFLA_WEIGHT", + "IFLA_WIRELESS", + "IFNAMSIZ", + "IFT_1822", + "IFT_A12MPPSWITCH", + "IFT_AAL2", + "IFT_AAL5", + "IFT_ADSL", + "IFT_AFLANE8023", + "IFT_AFLANE8025", + "IFT_ARAP", + "IFT_ARCNET", + "IFT_ARCNETPLUS", + "IFT_ASYNC", + "IFT_ATM", + "IFT_ATMDXI", + "IFT_ATMFUNI", + "IFT_ATMIMA", + "IFT_ATMLOGICAL", + "IFT_ATMRADIO", + "IFT_ATMSUBINTERFACE", + "IFT_ATMVCIENDPT", + "IFT_ATMVIRTUAL", + "IFT_BGPPOLICYACCOUNTING", + "IFT_BLUETOOTH", + "IFT_BRIDGE", + "IFT_BSC", + "IFT_CARP", + "IFT_CCTEMUL", + "IFT_CELLULAR", + "IFT_CEPT", + "IFT_CES", + "IFT_CHANNEL", + "IFT_CNR", + "IFT_COFFEE", + "IFT_COMPOSITELINK", + "IFT_DCN", + "IFT_DIGITALPOWERLINE", + "IFT_DIGITALWRAPPEROVERHEADCHANNEL", + "IFT_DLSW", + "IFT_DOCSCABLEDOWNSTREAM", + "IFT_DOCSCABLEMACLAYER", + "IFT_DOCSCABLEUPSTREAM", + "IFT_DOCSCABLEUPSTREAMCHANNEL", + "IFT_DS0", + "IFT_DS0BUNDLE", + "IFT_DS1FDL", + "IFT_DS3", + "IFT_DTM", + "IFT_DUMMY", + "IFT_DVBASILN", + "IFT_DVBASIOUT", + "IFT_DVBRCCDOWNSTREAM", + "IFT_DVBRCCMACLAYER", + "IFT_DVBRCCUPSTREAM", + "IFT_ECONET", + "IFT_ENC", + "IFT_EON", + "IFT_EPLRS", + "IFT_ESCON", + "IFT_ETHER", + "IFT_FAITH", + "IFT_FAST", + "IFT_FASTETHER", + "IFT_FASTETHERFX", + "IFT_FDDI", + "IFT_FIBRECHANNEL", + "IFT_FRAMERELAYINTERCONNECT", + "IFT_FRAMERELAYMPI", + "IFT_FRDLCIENDPT", + "IFT_FRELAY", + "IFT_FRELAYDCE", + "IFT_FRF16MFRBUNDLE", + "IFT_FRFORWARD", + "IFT_G703AT2MB", + "IFT_G703AT64K", + "IFT_GIF", + "IFT_GIGABITETHERNET", + "IFT_GR303IDT", + "IFT_GR303RDT", + "IFT_H323GATEKEEPER", + "IFT_H323PROXY", + "IFT_HDH1822", + "IFT_HDLC", + "IFT_HDSL2", + "IFT_HIPERLAN2", + "IFT_HIPPI", + "IFT_HIPPIINTERFACE", + "IFT_HOSTPAD", + "IFT_HSSI", + "IFT_HY", + "IFT_IBM370PARCHAN", + "IFT_IDSL", + "IFT_IEEE1394", + "IFT_IEEE80211", + "IFT_IEEE80212", + "IFT_IEEE8023ADLAG", + "IFT_IFGSN", + "IFT_IMT", + "IFT_INFINIBAND", + "IFT_INTERLEAVE", + "IFT_IP", + "IFT_IPFORWARD", + "IFT_IPOVERATM", + "IFT_IPOVERCDLC", + "IFT_IPOVERCLAW", + "IFT_IPSWITCH", + "IFT_IPXIP", + "IFT_ISDN", + "IFT_ISDNBASIC", + "IFT_ISDNPRIMARY", + "IFT_ISDNS", + "IFT_ISDNU", + "IFT_ISO88022LLC", + "IFT_ISO88023", + "IFT_ISO88024", + "IFT_ISO88025", + "IFT_ISO88025CRFPINT", + "IFT_ISO88025DTR", + "IFT_ISO88025FIBER", + "IFT_ISO88026", + "IFT_ISUP", + "IFT_L2VLAN", + "IFT_L3IPVLAN", + "IFT_L3IPXVLAN", + "IFT_LAPB", + "IFT_LAPD", + "IFT_LAPF", + "IFT_LINEGROUP", + "IFT_LOCALTALK", + "IFT_LOOP", + "IFT_MEDIAMAILOVERIP", + "IFT_MFSIGLINK", + "IFT_MIOX25", + "IFT_MODEM", + "IFT_MPC", + "IFT_MPLS", + "IFT_MPLSTUNNEL", + "IFT_MSDSL", + "IFT_MVL", + "IFT_MYRINET", + "IFT_NFAS", + "IFT_NSIP", + "IFT_OPTICALCHANNEL", + "IFT_OPTICALTRANSPORT", + "IFT_OTHER", + "IFT_P10", + "IFT_P80", + "IFT_PARA", + "IFT_PDP", + "IFT_PFLOG", + "IFT_PFLOW", + "IFT_PFSYNC", + "IFT_PLC", + "IFT_PON155", + "IFT_PON622", + "IFT_POS", + "IFT_PPP", + "IFT_PPPMULTILINKBUNDLE", + "IFT_PROPATM", + "IFT_PROPBWAP2MP", + "IFT_PROPCNLS", + "IFT_PROPDOCSWIRELESSDOWNSTREAM", + "IFT_PROPDOCSWIRELESSMACLAYER", + "IFT_PROPDOCSWIRELESSUPSTREAM", + "IFT_PROPMUX", + "IFT_PROPVIRTUAL", + "IFT_PROPWIRELESSP2P", + "IFT_PTPSERIAL", + "IFT_PVC", + "IFT_Q2931", + "IFT_QLLC", + "IFT_RADIOMAC", + "IFT_RADSL", + "IFT_REACHDSL", + "IFT_RFC1483", + "IFT_RS232", + "IFT_RSRB", + "IFT_SDLC", + "IFT_SDSL", + "IFT_SHDSL", + "IFT_SIP", + "IFT_SIPSIG", + "IFT_SIPTG", + "IFT_SLIP", + "IFT_SMDSDXI", + "IFT_SMDSICIP", + "IFT_SONET", + "IFT_SONETOVERHEADCHANNEL", + "IFT_SONETPATH", + "IFT_SONETVT", + "IFT_SRP", + "IFT_SS7SIGLINK", + "IFT_STACKTOSTACK", + "IFT_STARLAN", + "IFT_STF", + "IFT_T1", + "IFT_TDLC", + "IFT_TELINK", + "IFT_TERMPAD", + "IFT_TR008", + "IFT_TRANSPHDLC", + "IFT_TUNNEL", + "IFT_ULTRA", + "IFT_USB", + "IFT_V11", + "IFT_V35", + "IFT_V36", + "IFT_V37", + "IFT_VDSL", + "IFT_VIRTUALIPADDRESS", + "IFT_VIRTUALTG", + "IFT_VOICEDID", + "IFT_VOICEEM", + "IFT_VOICEEMFGD", + "IFT_VOICEENCAP", + "IFT_VOICEFGDEANA", + "IFT_VOICEFXO", + "IFT_VOICEFXS", + "IFT_VOICEOVERATM", + "IFT_VOICEOVERCABLE", + "IFT_VOICEOVERFRAMERELAY", + "IFT_VOICEOVERIP", + "IFT_X213", + "IFT_X25", + "IFT_X25DDN", + "IFT_X25HUNTGROUP", + "IFT_X25MLP", + "IFT_X25PLE", + "IFT_XETHER", + "IGNBRK", + "IGNCR", + "IGNORE", + "IGNPAR", + "IMAXBEL", + "INFINITE", + "INLCR", + "INPCK", + "INVALID_FILE_ATTRIBUTES", + "IN_ACCESS", + "IN_ALL_EVENTS", + "IN_ATTRIB", + "IN_CLASSA_HOST", + "IN_CLASSA_MAX", + "IN_CLASSA_NET", + "IN_CLASSA_NSHIFT", + "IN_CLASSB_HOST", + "IN_CLASSB_MAX", + "IN_CLASSB_NET", + "IN_CLASSB_NSHIFT", + "IN_CLASSC_HOST", + "IN_CLASSC_NET", + "IN_CLASSC_NSHIFT", + "IN_CLASSD_HOST", + "IN_CLASSD_NET", + "IN_CLASSD_NSHIFT", + "IN_CLOEXEC", + "IN_CLOSE", + "IN_CLOSE_NOWRITE", + "IN_CLOSE_WRITE", + "IN_CREATE", + "IN_DELETE", + "IN_DELETE_SELF", + "IN_DONT_FOLLOW", + "IN_EXCL_UNLINK", + "IN_IGNORED", + "IN_ISDIR", + "IN_LINKLOCALNETNUM", + "IN_LOOPBACKNET", + "IN_MASK_ADD", + "IN_MODIFY", + "IN_MOVE", + "IN_MOVED_FROM", + "IN_MOVED_TO", + "IN_MOVE_SELF", + "IN_NONBLOCK", + "IN_ONESHOT", + "IN_ONLYDIR", + "IN_OPEN", + "IN_Q_OVERFLOW", + "IN_RFC3021_HOST", + "IN_RFC3021_MASK", + "IN_RFC3021_NET", + "IN_RFC3021_NSHIFT", + "IN_UNMOUNT", + "IOC_IN", + "IOC_INOUT", + "IOC_OUT", + "IOC_VENDOR", + "IOC_WS2", + "IO_REPARSE_TAG_SYMLINK", + "IPMreq", + "IPMreqn", + "IPPROTO_3PC", + "IPPROTO_ADFS", + "IPPROTO_AH", + "IPPROTO_AHIP", + "IPPROTO_APES", + "IPPROTO_ARGUS", + "IPPROTO_AX25", + "IPPROTO_BHA", + "IPPROTO_BLT", + "IPPROTO_BRSATMON", + "IPPROTO_CARP", + "IPPROTO_CFTP", + "IPPROTO_CHAOS", + "IPPROTO_CMTP", + "IPPROTO_COMP", + "IPPROTO_CPHB", + "IPPROTO_CPNX", + "IPPROTO_DCCP", + "IPPROTO_DDP", + "IPPROTO_DGP", + "IPPROTO_DIVERT", + "IPPROTO_DIVERT_INIT", + "IPPROTO_DIVERT_RESP", + "IPPROTO_DONE", + "IPPROTO_DSTOPTS", + "IPPROTO_EGP", + "IPPROTO_EMCON", + "IPPROTO_ENCAP", + "IPPROTO_EON", + "IPPROTO_ESP", + "IPPROTO_ETHERIP", + "IPPROTO_FRAGMENT", + "IPPROTO_GGP", + "IPPROTO_GMTP", + "IPPROTO_GRE", + "IPPROTO_HELLO", + "IPPROTO_HMP", + "IPPROTO_HOPOPTS", + "IPPROTO_ICMP", + "IPPROTO_ICMPV6", + "IPPROTO_IDP", + "IPPROTO_IDPR", + "IPPROTO_IDRP", + "IPPROTO_IGMP", + "IPPROTO_IGP", + "IPPROTO_IGRP", + "IPPROTO_IL", + "IPPROTO_INLSP", + "IPPROTO_INP", + "IPPROTO_IP", + "IPPROTO_IPCOMP", + "IPPROTO_IPCV", + "IPPROTO_IPEIP", + "IPPROTO_IPIP", + "IPPROTO_IPPC", + "IPPROTO_IPV4", + "IPPROTO_IPV6", + "IPPROTO_IPV6_ICMP", + "IPPROTO_IRTP", + "IPPROTO_KRYPTOLAN", + "IPPROTO_LARP", + "IPPROTO_LEAF1", + "IPPROTO_LEAF2", + "IPPROTO_MAX", + "IPPROTO_MAXID", + "IPPROTO_MEAS", + "IPPROTO_MH", + "IPPROTO_MHRP", + "IPPROTO_MICP", + "IPPROTO_MOBILE", + "IPPROTO_MPLS", + "IPPROTO_MTP", + "IPPROTO_MUX", + "IPPROTO_ND", + "IPPROTO_NHRP", + "IPPROTO_NONE", + "IPPROTO_NSP", + "IPPROTO_NVPII", + "IPPROTO_OLD_DIVERT", + "IPPROTO_OSPFIGP", + "IPPROTO_PFSYNC", + "IPPROTO_PGM", + "IPPROTO_PIGP", + "IPPROTO_PIM", + "IPPROTO_PRM", + "IPPROTO_PUP", + "IPPROTO_PVP", + "IPPROTO_RAW", + "IPPROTO_RCCMON", + "IPPROTO_RDP", + "IPPROTO_ROUTING", + "IPPROTO_RSVP", + "IPPROTO_RVD", + "IPPROTO_SATEXPAK", + "IPPROTO_SATMON", + "IPPROTO_SCCSP", + "IPPROTO_SCTP", + "IPPROTO_SDRP", + "IPPROTO_SEND", + "IPPROTO_SEP", + "IPPROTO_SKIP", + "IPPROTO_SPACER", + "IPPROTO_SRPC", + "IPPROTO_ST", + "IPPROTO_SVMTP", + "IPPROTO_SWIPE", + "IPPROTO_TCF", + "IPPROTO_TCP", + "IPPROTO_TLSP", + "IPPROTO_TP", + "IPPROTO_TPXX", + "IPPROTO_TRUNK1", + "IPPROTO_TRUNK2", + "IPPROTO_TTP", + "IPPROTO_UDP", + "IPPROTO_UDPLITE", + "IPPROTO_VINES", + "IPPROTO_VISA", + "IPPROTO_VMTP", + "IPPROTO_VRRP", + "IPPROTO_WBEXPAK", + "IPPROTO_WBMON", + "IPPROTO_WSN", + "IPPROTO_XNET", + "IPPROTO_XTP", + "IPV6_2292DSTOPTS", + "IPV6_2292HOPLIMIT", + "IPV6_2292HOPOPTS", + "IPV6_2292NEXTHOP", + "IPV6_2292PKTINFO", + "IPV6_2292PKTOPTIONS", + "IPV6_2292RTHDR", + "IPV6_ADDRFORM", + "IPV6_ADD_MEMBERSHIP", + "IPV6_AUTHHDR", + "IPV6_AUTH_LEVEL", + "IPV6_AUTOFLOWLABEL", + "IPV6_BINDANY", + "IPV6_BINDV6ONLY", + "IPV6_BOUND_IF", + "IPV6_CHECKSUM", + "IPV6_DEFAULT_MULTICAST_HOPS", + "IPV6_DEFAULT_MULTICAST_LOOP", + "IPV6_DEFHLIM", + "IPV6_DONTFRAG", + "IPV6_DROP_MEMBERSHIP", + "IPV6_DSTOPTS", + "IPV6_ESP_NETWORK_LEVEL", + "IPV6_ESP_TRANS_LEVEL", + "IPV6_FAITH", + "IPV6_FLOWINFO_MASK", + "IPV6_FLOWLABEL_MASK", + "IPV6_FRAGTTL", + "IPV6_FW_ADD", + "IPV6_FW_DEL", + "IPV6_FW_FLUSH", + "IPV6_FW_GET", + "IPV6_FW_ZERO", + "IPV6_HLIMDEC", + "IPV6_HOPLIMIT", + "IPV6_HOPOPTS", + "IPV6_IPCOMP_LEVEL", + "IPV6_IPSEC_POLICY", + "IPV6_JOIN_ANYCAST", + "IPV6_JOIN_GROUP", + "IPV6_LEAVE_ANYCAST", + "IPV6_LEAVE_GROUP", + "IPV6_MAXHLIM", + "IPV6_MAXOPTHDR", + "IPV6_MAXPACKET", + "IPV6_MAX_GROUP_SRC_FILTER", + "IPV6_MAX_MEMBERSHIPS", + "IPV6_MAX_SOCK_SRC_FILTER", + "IPV6_MIN_MEMBERSHIPS", + "IPV6_MMTU", + "IPV6_MSFILTER", + "IPV6_MTU", + "IPV6_MTU_DISCOVER", + "IPV6_MULTICAST_HOPS", + "IPV6_MULTICAST_IF", + "IPV6_MULTICAST_LOOP", + "IPV6_NEXTHOP", + "IPV6_OPTIONS", + "IPV6_PATHMTU", + "IPV6_PIPEX", + "IPV6_PKTINFO", + "IPV6_PMTUDISC_DO", + "IPV6_PMTUDISC_DONT", + "IPV6_PMTUDISC_PROBE", + "IPV6_PMTUDISC_WANT", + "IPV6_PORTRANGE", + "IPV6_PORTRANGE_DEFAULT", + "IPV6_PORTRANGE_HIGH", + "IPV6_PORTRANGE_LOW", + "IPV6_PREFER_TEMPADDR", + "IPV6_RECVDSTOPTS", + "IPV6_RECVDSTPORT", + "IPV6_RECVERR", + "IPV6_RECVHOPLIMIT", + "IPV6_RECVHOPOPTS", + "IPV6_RECVPATHMTU", + "IPV6_RECVPKTINFO", + "IPV6_RECVRTHDR", + "IPV6_RECVTCLASS", + "IPV6_ROUTER_ALERT", + "IPV6_RTABLE", + "IPV6_RTHDR", + "IPV6_RTHDRDSTOPTS", + "IPV6_RTHDR_LOOSE", + "IPV6_RTHDR_STRICT", + "IPV6_RTHDR_TYPE_0", + "IPV6_RXDSTOPTS", + "IPV6_RXHOPOPTS", + "IPV6_SOCKOPT_RESERVED1", + "IPV6_TCLASS", + "IPV6_UNICAST_HOPS", + "IPV6_USE_MIN_MTU", + "IPV6_V6ONLY", + "IPV6_VERSION", + "IPV6_VERSION_MASK", + "IPV6_XFRM_POLICY", + "IP_ADD_MEMBERSHIP", + "IP_ADD_SOURCE_MEMBERSHIP", + "IP_AUTH_LEVEL", + "IP_BINDANY", + "IP_BLOCK_SOURCE", + "IP_BOUND_IF", + "IP_DEFAULT_MULTICAST_LOOP", + "IP_DEFAULT_MULTICAST_TTL", + "IP_DF", + "IP_DIVERTFL", + "IP_DONTFRAG", + "IP_DROP_MEMBERSHIP", + "IP_DROP_SOURCE_MEMBERSHIP", + "IP_DUMMYNET3", + "IP_DUMMYNET_CONFIGURE", + "IP_DUMMYNET_DEL", + "IP_DUMMYNET_FLUSH", + "IP_DUMMYNET_GET", + "IP_EF", + "IP_ERRORMTU", + "IP_ESP_NETWORK_LEVEL", + "IP_ESP_TRANS_LEVEL", + "IP_FAITH", + "IP_FREEBIND", + "IP_FW3", + "IP_FW_ADD", + "IP_FW_DEL", + "IP_FW_FLUSH", + "IP_FW_GET", + "IP_FW_NAT_CFG", + "IP_FW_NAT_DEL", + "IP_FW_NAT_GET_CONFIG", + "IP_FW_NAT_GET_LOG", + "IP_FW_RESETLOG", + "IP_FW_TABLE_ADD", + "IP_FW_TABLE_DEL", + "IP_FW_TABLE_FLUSH", + "IP_FW_TABLE_GETSIZE", + "IP_FW_TABLE_LIST", + "IP_FW_ZERO", + "IP_HDRINCL", + "IP_IPCOMP_LEVEL", + "IP_IPSECFLOWINFO", + "IP_IPSEC_LOCAL_AUTH", + "IP_IPSEC_LOCAL_CRED", + "IP_IPSEC_LOCAL_ID", + "IP_IPSEC_POLICY", + "IP_IPSEC_REMOTE_AUTH", + "IP_IPSEC_REMOTE_CRED", + "IP_IPSEC_REMOTE_ID", + "IP_MAXPACKET", + "IP_MAX_GROUP_SRC_FILTER", + "IP_MAX_MEMBERSHIPS", + "IP_MAX_SOCK_MUTE_FILTER", + "IP_MAX_SOCK_SRC_FILTER", + "IP_MAX_SOURCE_FILTER", + "IP_MF", + "IP_MINFRAGSIZE", + "IP_MINTTL", + "IP_MIN_MEMBERSHIPS", + "IP_MSFILTER", + "IP_MSS", + "IP_MTU", + "IP_MTU_DISCOVER", + "IP_MULTICAST_IF", + "IP_MULTICAST_IFINDEX", + "IP_MULTICAST_LOOP", + "IP_MULTICAST_TTL", + "IP_MULTICAST_VIF", + "IP_NAT__XXX", + "IP_OFFMASK", + "IP_OLD_FW_ADD", + "IP_OLD_FW_DEL", + "IP_OLD_FW_FLUSH", + "IP_OLD_FW_GET", + "IP_OLD_FW_RESETLOG", + "IP_OLD_FW_ZERO", + "IP_ONESBCAST", + "IP_OPTIONS", + "IP_ORIGDSTADDR", + "IP_PASSSEC", + "IP_PIPEX", + "IP_PKTINFO", + "IP_PKTOPTIONS", + "IP_PMTUDISC", + "IP_PMTUDISC_DO", + "IP_PMTUDISC_DONT", + "IP_PMTUDISC_PROBE", + "IP_PMTUDISC_WANT", + "IP_PORTRANGE", + "IP_PORTRANGE_DEFAULT", + "IP_PORTRANGE_HIGH", + "IP_PORTRANGE_LOW", + "IP_RECVDSTADDR", + "IP_RECVDSTPORT", + "IP_RECVERR", + "IP_RECVIF", + "IP_RECVOPTS", + "IP_RECVORIGDSTADDR", + "IP_RECVPKTINFO", + "IP_RECVRETOPTS", + "IP_RECVRTABLE", + "IP_RECVTOS", + "IP_RECVTTL", + "IP_RETOPTS", + "IP_RF", + "IP_ROUTER_ALERT", + "IP_RSVP_OFF", + "IP_RSVP_ON", + "IP_RSVP_VIF_OFF", + "IP_RSVP_VIF_ON", + "IP_RTABLE", + "IP_SENDSRCADDR", + "IP_STRIPHDR", + "IP_TOS", + "IP_TRAFFIC_MGT_BACKGROUND", + "IP_TRANSPARENT", + "IP_TTL", + "IP_UNBLOCK_SOURCE", + "IP_XFRM_POLICY", + "IPv6MTUInfo", + "IPv6Mreq", + "ISIG", + "ISTRIP", + "IUCLC", + "IUTF8", + "IXANY", + "IXOFF", + "IXON", + "IfAddrmsg", + "IfAnnounceMsghdr", + "IfData", + "IfInfomsg", + "IfMsghdr", + "IfaMsghdr", + "IfmaMsghdr", + "IfmaMsghdr2", + "ImplementsGetwd", + "Inet4Pktinfo", + "Inet6Pktinfo", + "InotifyAddWatch", + "InotifyEvent", + "InotifyInit", + "InotifyInit1", + "InotifyRmWatch", + "InterfaceAddrMessage", + "InterfaceAnnounceMessage", + "InterfaceInfo", + "InterfaceMessage", + "InterfaceMulticastAddrMessage", + "InvalidHandle", + "Ioperm", + "Iopl", + "Iovec", + "IpAdapterInfo", + "IpAddrString", + "IpAddressString", + "IpMaskString", + "Issetugid", + "KEY_ALL_ACCESS", + "KEY_CREATE_LINK", + "KEY_CREATE_SUB_KEY", + "KEY_ENUMERATE_SUB_KEYS", + "KEY_EXECUTE", + "KEY_NOTIFY", + "KEY_QUERY_VALUE", + "KEY_READ", + "KEY_SET_VALUE", + "KEY_WOW64_32KEY", + "KEY_WOW64_64KEY", + "KEY_WRITE", + "Kevent", + "Kevent_t", + "Kill", + "Klogctl", + "Kqueue", + "LANG_ENGLISH", + "LAYERED_PROTOCOL", + "LCNT_OVERLOAD_FLUSH", + "LINUX_REBOOT_CMD_CAD_OFF", + "LINUX_REBOOT_CMD_CAD_ON", + "LINUX_REBOOT_CMD_HALT", + "LINUX_REBOOT_CMD_KEXEC", + "LINUX_REBOOT_CMD_POWER_OFF", + "LINUX_REBOOT_CMD_RESTART", + "LINUX_REBOOT_CMD_RESTART2", + "LINUX_REBOOT_CMD_SW_SUSPEND", + "LINUX_REBOOT_MAGIC1", + "LINUX_REBOOT_MAGIC2", + "LOCK_EX", + "LOCK_NB", + "LOCK_SH", + "LOCK_UN", + "LazyDLL", + "LazyProc", + "Lchown", + "Linger", + "Link", + "Listen", + "Listxattr", + "LoadCancelIoEx", + "LoadConnectEx", + "LoadCreateSymbolicLink", + "LoadDLL", + "LoadGetAddrInfo", + "LoadLibrary", + "LoadSetFileCompletionNotificationModes", + "LocalFree", + "Log2phys_t", + "LookupAccountName", + "LookupAccountSid", + "LookupSID", + "LsfJump", + "LsfSocket", + "LsfStmt", + "Lstat", + "MADV_AUTOSYNC", + "MADV_CAN_REUSE", + "MADV_CORE", + "MADV_DOFORK", + "MADV_DONTFORK", + "MADV_DONTNEED", + "MADV_FREE", + "MADV_FREE_REUSABLE", + "MADV_FREE_REUSE", + "MADV_HUGEPAGE", + "MADV_HWPOISON", + "MADV_MERGEABLE", + "MADV_NOCORE", + "MADV_NOHUGEPAGE", + "MADV_NORMAL", + "MADV_NOSYNC", + "MADV_PROTECT", + "MADV_RANDOM", + "MADV_REMOVE", + "MADV_SEQUENTIAL", + "MADV_SPACEAVAIL", + "MADV_UNMERGEABLE", + "MADV_WILLNEED", + "MADV_ZERO_WIRED_PAGES", + "MAP_32BIT", + "MAP_ALIGNED_SUPER", + "MAP_ALIGNMENT_16MB", + "MAP_ALIGNMENT_1TB", + "MAP_ALIGNMENT_256TB", + "MAP_ALIGNMENT_4GB", + "MAP_ALIGNMENT_64KB", + "MAP_ALIGNMENT_64PB", + "MAP_ALIGNMENT_MASK", + "MAP_ALIGNMENT_SHIFT", + "MAP_ANON", + "MAP_ANONYMOUS", + "MAP_COPY", + "MAP_DENYWRITE", + "MAP_EXECUTABLE", + "MAP_FILE", + "MAP_FIXED", + "MAP_FLAGMASK", + "MAP_GROWSDOWN", + "MAP_HASSEMAPHORE", + "MAP_HUGETLB", + "MAP_INHERIT", + "MAP_INHERIT_COPY", + "MAP_INHERIT_DEFAULT", + "MAP_INHERIT_DONATE_COPY", + "MAP_INHERIT_NONE", + "MAP_INHERIT_SHARE", + "MAP_JIT", + "MAP_LOCKED", + "MAP_NOCACHE", + "MAP_NOCORE", + "MAP_NOEXTEND", + "MAP_NONBLOCK", + "MAP_NORESERVE", + "MAP_NOSYNC", + "MAP_POPULATE", + "MAP_PREFAULT_READ", + "MAP_PRIVATE", + "MAP_RENAME", + "MAP_RESERVED0080", + "MAP_RESERVED0100", + "MAP_SHARED", + "MAP_STACK", + "MAP_TRYFIXED", + "MAP_TYPE", + "MAP_WIRED", + "MAXIMUM_REPARSE_DATA_BUFFER_SIZE", + "MAXLEN_IFDESCR", + "MAXLEN_PHYSADDR", + "MAX_ADAPTER_ADDRESS_LENGTH", + "MAX_ADAPTER_DESCRIPTION_LENGTH", + "MAX_ADAPTER_NAME_LENGTH", + "MAX_COMPUTERNAME_LENGTH", + "MAX_INTERFACE_NAME_LEN", + "MAX_LONG_PATH", + "MAX_PATH", + "MAX_PROTOCOL_CHAIN", + "MCL_CURRENT", + "MCL_FUTURE", + "MNT_DETACH", + "MNT_EXPIRE", + "MNT_FORCE", + "MSG_BCAST", + "MSG_CMSG_CLOEXEC", + "MSG_COMPAT", + "MSG_CONFIRM", + "MSG_CONTROLMBUF", + "MSG_CTRUNC", + "MSG_DONTROUTE", + "MSG_DONTWAIT", + "MSG_EOF", + "MSG_EOR", + "MSG_ERRQUEUE", + "MSG_FASTOPEN", + "MSG_FIN", + "MSG_FLUSH", + "MSG_HAVEMORE", + "MSG_HOLD", + "MSG_IOVUSRSPACE", + "MSG_LENUSRSPACE", + "MSG_MCAST", + "MSG_MORE", + "MSG_NAMEMBUF", + "MSG_NBIO", + "MSG_NEEDSA", + "MSG_NOSIGNAL", + "MSG_NOTIFICATION", + "MSG_OOB", + "MSG_PEEK", + "MSG_PROXY", + "MSG_RCVMORE", + "MSG_RST", + "MSG_SEND", + "MSG_SYN", + "MSG_TRUNC", + "MSG_TRYHARD", + "MSG_USERFLAGS", + "MSG_WAITALL", + "MSG_WAITFORONE", + "MSG_WAITSTREAM", + "MS_ACTIVE", + "MS_ASYNC", + "MS_BIND", + "MS_DEACTIVATE", + "MS_DIRSYNC", + "MS_INVALIDATE", + "MS_I_VERSION", + "MS_KERNMOUNT", + "MS_KILLPAGES", + "MS_MANDLOCK", + "MS_MGC_MSK", + "MS_MGC_VAL", + "MS_MOVE", + "MS_NOATIME", + "MS_NODEV", + "MS_NODIRATIME", + "MS_NOEXEC", + "MS_NOSUID", + "MS_NOUSER", + "MS_POSIXACL", + "MS_PRIVATE", + "MS_RDONLY", + "MS_REC", + "MS_RELATIME", + "MS_REMOUNT", + "MS_RMT_MASK", + "MS_SHARED", + "MS_SILENT", + "MS_SLAVE", + "MS_STRICTATIME", + "MS_SYNC", + "MS_SYNCHRONOUS", + "MS_UNBINDABLE", + "Madvise", + "MapViewOfFile", + "MaxTokenInfoClass", + "Mclpool", + "MibIfRow", + "Mkdir", + "Mkdirat", + "Mkfifo", + "Mknod", + "Mknodat", + "Mlock", + "Mlockall", + "Mmap", + "Mount", + "MoveFile", + "Mprotect", + "Msghdr", + "Munlock", + "Munlockall", + "Munmap", + "MustLoadDLL", + "NAME_MAX", + "NETLINK_ADD_MEMBERSHIP", + "NETLINK_AUDIT", + "NETLINK_BROADCAST_ERROR", + "NETLINK_CONNECTOR", + "NETLINK_DNRTMSG", + "NETLINK_DROP_MEMBERSHIP", + "NETLINK_ECRYPTFS", + "NETLINK_FIB_LOOKUP", + "NETLINK_FIREWALL", + "NETLINK_GENERIC", + "NETLINK_INET_DIAG", + "NETLINK_IP6_FW", + "NETLINK_ISCSI", + "NETLINK_KOBJECT_UEVENT", + "NETLINK_NETFILTER", + "NETLINK_NFLOG", + "NETLINK_NO_ENOBUFS", + "NETLINK_PKTINFO", + "NETLINK_RDMA", + "NETLINK_ROUTE", + "NETLINK_SCSITRANSPORT", + "NETLINK_SELINUX", + "NETLINK_UNUSED", + "NETLINK_USERSOCK", + "NETLINK_XFRM", + "NET_RT_DUMP", + "NET_RT_DUMP2", + "NET_RT_FLAGS", + "NET_RT_IFLIST", + "NET_RT_IFLIST2", + "NET_RT_IFLISTL", + "NET_RT_IFMALIST", + "NET_RT_MAXID", + "NET_RT_OIFLIST", + "NET_RT_OOIFLIST", + "NET_RT_STAT", + "NET_RT_STATS", + "NET_RT_TABLE", + "NET_RT_TRASH", + "NLA_ALIGNTO", + "NLA_F_NESTED", + "NLA_F_NET_BYTEORDER", + "NLA_HDRLEN", + "NLMSG_ALIGNTO", + "NLMSG_DONE", + "NLMSG_ERROR", + "NLMSG_HDRLEN", + "NLMSG_MIN_TYPE", + "NLMSG_NOOP", + "NLMSG_OVERRUN", + "NLM_F_ACK", + "NLM_F_APPEND", + "NLM_F_ATOMIC", + "NLM_F_CREATE", + "NLM_F_DUMP", + "NLM_F_ECHO", + "NLM_F_EXCL", + "NLM_F_MATCH", + "NLM_F_MULTI", + "NLM_F_REPLACE", + "NLM_F_REQUEST", + "NLM_F_ROOT", + "NOFLSH", + "NOTE_ABSOLUTE", + "NOTE_ATTRIB", + "NOTE_BACKGROUND", + "NOTE_CHILD", + "NOTE_CRITICAL", + "NOTE_DELETE", + "NOTE_EOF", + "NOTE_EXEC", + "NOTE_EXIT", + "NOTE_EXITSTATUS", + "NOTE_EXIT_CSERROR", + "NOTE_EXIT_DECRYPTFAIL", + "NOTE_EXIT_DETAIL", + "NOTE_EXIT_DETAIL_MASK", + "NOTE_EXIT_MEMORY", + "NOTE_EXIT_REPARENTED", + "NOTE_EXTEND", + "NOTE_FFAND", + "NOTE_FFCOPY", + "NOTE_FFCTRLMASK", + "NOTE_FFLAGSMASK", + "NOTE_FFNOP", + "NOTE_FFOR", + "NOTE_FORK", + "NOTE_LEEWAY", + "NOTE_LINK", + "NOTE_LOWAT", + "NOTE_NONE", + "NOTE_NSECONDS", + "NOTE_PCTRLMASK", + "NOTE_PDATAMASK", + "NOTE_REAP", + "NOTE_RENAME", + "NOTE_RESOURCEEND", + "NOTE_REVOKE", + "NOTE_SECONDS", + "NOTE_SIGNAL", + "NOTE_TRACK", + "NOTE_TRACKERR", + "NOTE_TRIGGER", + "NOTE_TRUNCATE", + "NOTE_USECONDS", + "NOTE_VM_ERROR", + "NOTE_VM_PRESSURE", + "NOTE_VM_PRESSURE_SUDDEN_TERMINATE", + "NOTE_VM_PRESSURE_TERMINATE", + "NOTE_WRITE", + "NameCanonical", + "NameCanonicalEx", + "NameDisplay", + "NameDnsDomain", + "NameFullyQualifiedDN", + "NameSamCompatible", + "NameServicePrincipal", + "NameUniqueId", + "NameUnknown", + "NameUserPrincipal", + "Nanosleep", + "NetApiBufferFree", + "NetGetJoinInformation", + "NetSetupDomainName", + "NetSetupUnjoined", + "NetSetupUnknownStatus", + "NetSetupWorkgroupName", + "NetUserGetInfo", + "NetlinkMessage", + "NetlinkRIB", + "NetlinkRouteAttr", + "NetlinkRouteRequest", + "NewCallback", + "NewCallbackCDecl", + "NewLazyDLL", + "NlAttr", + "NlMsgerr", + "NlMsghdr", + "NsecToFiletime", + "NsecToTimespec", + "NsecToTimeval", + "Ntohs", + "OCRNL", + "OFDEL", + "OFILL", + "OFIOGETBMAP", + "OID_PKIX_KP_SERVER_AUTH", + "OID_SERVER_GATED_CRYPTO", + "OID_SGC_NETSCAPE", + "OLCUC", + "ONLCR", + "ONLRET", + "ONOCR", + "ONOEOT", + "OPEN_ALWAYS", + "OPEN_EXISTING", + "OPOST", + "O_ACCMODE", + "O_ALERT", + "O_ALT_IO", + "O_APPEND", + "O_ASYNC", + "O_CLOEXEC", + "O_CREAT", + "O_DIRECT", + "O_DIRECTORY", + "O_DP_GETRAWENCRYPTED", + "O_DSYNC", + "O_EVTONLY", + "O_EXCL", + "O_EXEC", + "O_EXLOCK", + "O_FSYNC", + "O_LARGEFILE", + "O_NDELAY", + "O_NOATIME", + "O_NOCTTY", + "O_NOFOLLOW", + "O_NONBLOCK", + "O_NOSIGPIPE", + "O_POPUP", + "O_RDONLY", + "O_RDWR", + "O_RSYNC", + "O_SHLOCK", + "O_SYMLINK", + "O_SYNC", + "O_TRUNC", + "O_TTY_INIT", + "O_WRONLY", + "Open", + "OpenCurrentProcessToken", + "OpenProcess", + "OpenProcessToken", + "Openat", + "Overlapped", + "PACKET_ADD_MEMBERSHIP", + "PACKET_BROADCAST", + "PACKET_DROP_MEMBERSHIP", + "PACKET_FASTROUTE", + "PACKET_HOST", + "PACKET_LOOPBACK", + "PACKET_MR_ALLMULTI", + "PACKET_MR_MULTICAST", + "PACKET_MR_PROMISC", + "PACKET_MULTICAST", + "PACKET_OTHERHOST", + "PACKET_OUTGOING", + "PACKET_RECV_OUTPUT", + "PACKET_RX_RING", + "PACKET_STATISTICS", + "PAGE_EXECUTE_READ", + "PAGE_EXECUTE_READWRITE", + "PAGE_EXECUTE_WRITECOPY", + "PAGE_READONLY", + "PAGE_READWRITE", + "PAGE_WRITECOPY", + "PARENB", + "PARMRK", + "PARODD", + "PENDIN", + "PFL_HIDDEN", + "PFL_MATCHES_PROTOCOL_ZERO", + "PFL_MULTIPLE_PROTO_ENTRIES", + "PFL_NETWORKDIRECT_PROVIDER", + "PFL_RECOMMENDED_PROTO_ENTRY", + "PF_FLUSH", + "PKCS_7_ASN_ENCODING", + "PMC5_PIPELINE_FLUSH", + "PRIO_PGRP", + "PRIO_PROCESS", + "PRIO_USER", + "PRI_IOFLUSH", + "PROCESS_QUERY_INFORMATION", + "PROCESS_TERMINATE", + "PROT_EXEC", + "PROT_GROWSDOWN", + "PROT_GROWSUP", + "PROT_NONE", + "PROT_READ", + "PROT_WRITE", + "PROV_DH_SCHANNEL", + "PROV_DSS", + "PROV_DSS_DH", + "PROV_EC_ECDSA_FULL", + "PROV_EC_ECDSA_SIG", + "PROV_EC_ECNRA_FULL", + "PROV_EC_ECNRA_SIG", + "PROV_FORTEZZA", + "PROV_INTEL_SEC", + "PROV_MS_EXCHANGE", + "PROV_REPLACE_OWF", + "PROV_RNG", + "PROV_RSA_AES", + "PROV_RSA_FULL", + "PROV_RSA_SCHANNEL", + "PROV_RSA_SIG", + "PROV_SPYRUS_LYNKS", + "PROV_SSL", + "PR_CAPBSET_DROP", + "PR_CAPBSET_READ", + "PR_CLEAR_SECCOMP_FILTER", + "PR_ENDIAN_BIG", + "PR_ENDIAN_LITTLE", + "PR_ENDIAN_PPC_LITTLE", + "PR_FPEMU_NOPRINT", + "PR_FPEMU_SIGFPE", + "PR_FP_EXC_ASYNC", + "PR_FP_EXC_DISABLED", + "PR_FP_EXC_DIV", + "PR_FP_EXC_INV", + "PR_FP_EXC_NONRECOV", + "PR_FP_EXC_OVF", + "PR_FP_EXC_PRECISE", + "PR_FP_EXC_RES", + "PR_FP_EXC_SW_ENABLE", + "PR_FP_EXC_UND", + "PR_GET_DUMPABLE", + "PR_GET_ENDIAN", + "PR_GET_FPEMU", + "PR_GET_FPEXC", + "PR_GET_KEEPCAPS", + "PR_GET_NAME", + "PR_GET_PDEATHSIG", + "PR_GET_SECCOMP", + "PR_GET_SECCOMP_FILTER", + "PR_GET_SECUREBITS", + "PR_GET_TIMERSLACK", + "PR_GET_TIMING", + "PR_GET_TSC", + "PR_GET_UNALIGN", + "PR_MCE_KILL", + "PR_MCE_KILL_CLEAR", + "PR_MCE_KILL_DEFAULT", + "PR_MCE_KILL_EARLY", + "PR_MCE_KILL_GET", + "PR_MCE_KILL_LATE", + "PR_MCE_KILL_SET", + "PR_SECCOMP_FILTER_EVENT", + "PR_SECCOMP_FILTER_SYSCALL", + "PR_SET_DUMPABLE", + "PR_SET_ENDIAN", + "PR_SET_FPEMU", + "PR_SET_FPEXC", + "PR_SET_KEEPCAPS", + "PR_SET_NAME", + "PR_SET_PDEATHSIG", + "PR_SET_PTRACER", + "PR_SET_SECCOMP", + "PR_SET_SECCOMP_FILTER", + "PR_SET_SECUREBITS", + "PR_SET_TIMERSLACK", + "PR_SET_TIMING", + "PR_SET_TSC", + "PR_SET_UNALIGN", + "PR_TASK_PERF_EVENTS_DISABLE", + "PR_TASK_PERF_EVENTS_ENABLE", + "PR_TIMING_STATISTICAL", + "PR_TIMING_TIMESTAMP", + "PR_TSC_ENABLE", + "PR_TSC_SIGSEGV", + "PR_UNALIGN_NOPRINT", + "PR_UNALIGN_SIGBUS", + "PTRACE_ARCH_PRCTL", + "PTRACE_ATTACH", + "PTRACE_CONT", + "PTRACE_DETACH", + "PTRACE_EVENT_CLONE", + "PTRACE_EVENT_EXEC", + "PTRACE_EVENT_EXIT", + "PTRACE_EVENT_FORK", + "PTRACE_EVENT_VFORK", + "PTRACE_EVENT_VFORK_DONE", + "PTRACE_GETCRUNCHREGS", + "PTRACE_GETEVENTMSG", + "PTRACE_GETFPREGS", + "PTRACE_GETFPXREGS", + "PTRACE_GETHBPREGS", + "PTRACE_GETREGS", + "PTRACE_GETREGSET", + "PTRACE_GETSIGINFO", + "PTRACE_GETVFPREGS", + "PTRACE_GETWMMXREGS", + "PTRACE_GET_THREAD_AREA", + "PTRACE_KILL", + "PTRACE_OLDSETOPTIONS", + "PTRACE_O_MASK", + "PTRACE_O_TRACECLONE", + "PTRACE_O_TRACEEXEC", + "PTRACE_O_TRACEEXIT", + "PTRACE_O_TRACEFORK", + "PTRACE_O_TRACESYSGOOD", + "PTRACE_O_TRACEVFORK", + "PTRACE_O_TRACEVFORKDONE", + "PTRACE_PEEKDATA", + "PTRACE_PEEKTEXT", + "PTRACE_PEEKUSR", + "PTRACE_POKEDATA", + "PTRACE_POKETEXT", + "PTRACE_POKEUSR", + "PTRACE_SETCRUNCHREGS", + "PTRACE_SETFPREGS", + "PTRACE_SETFPXREGS", + "PTRACE_SETHBPREGS", + "PTRACE_SETOPTIONS", + "PTRACE_SETREGS", + "PTRACE_SETREGSET", + "PTRACE_SETSIGINFO", + "PTRACE_SETVFPREGS", + "PTRACE_SETWMMXREGS", + "PTRACE_SET_SYSCALL", + "PTRACE_SET_THREAD_AREA", + "PTRACE_SINGLEBLOCK", + "PTRACE_SINGLESTEP", + "PTRACE_SYSCALL", + "PTRACE_SYSEMU", + "PTRACE_SYSEMU_SINGLESTEP", + "PTRACE_TRACEME", + "PT_ATTACH", + "PT_ATTACHEXC", + "PT_CONTINUE", + "PT_DATA_ADDR", + "PT_DENY_ATTACH", + "PT_DETACH", + "PT_FIRSTMACH", + "PT_FORCEQUOTA", + "PT_KILL", + "PT_MASK", + "PT_READ_D", + "PT_READ_I", + "PT_READ_U", + "PT_SIGEXC", + "PT_STEP", + "PT_TEXT_ADDR", + "PT_TEXT_END_ADDR", + "PT_THUPDATE", + "PT_TRACE_ME", + "PT_WRITE_D", + "PT_WRITE_I", + "PT_WRITE_U", + "ParseDirent", + "ParseNetlinkMessage", + "ParseNetlinkRouteAttr", + "ParseRoutingMessage", + "ParseRoutingSockaddr", + "ParseSocketControlMessage", + "ParseUnixCredentials", + "ParseUnixRights", + "PathMax", + "Pathconf", + "Pause", + "Pipe", + "Pipe2", + "PivotRoot", + "Pointer", + "PostQueuedCompletionStatus", + "Pread", + "Proc", + "ProcAttr", + "Process32First", + "Process32Next", + "ProcessEntry32", + "ProcessInformation", + "Protoent", + "PtraceAttach", + "PtraceCont", + "PtraceDetach", + "PtraceGetEventMsg", + "PtraceGetRegs", + "PtracePeekData", + "PtracePeekText", + "PtracePokeData", + "PtracePokeText", + "PtraceRegs", + "PtraceSetOptions", + "PtraceSetRegs", + "PtraceSingleStep", + "PtraceSyscall", + "Pwrite", + "REG_BINARY", + "REG_DWORD", + "REG_DWORD_BIG_ENDIAN", + "REG_DWORD_LITTLE_ENDIAN", + "REG_EXPAND_SZ", + "REG_FULL_RESOURCE_DESCRIPTOR", + "REG_LINK", + "REG_MULTI_SZ", + "REG_NONE", + "REG_QWORD", + "REG_QWORD_LITTLE_ENDIAN", + "REG_RESOURCE_LIST", + "REG_RESOURCE_REQUIREMENTS_LIST", + "REG_SZ", + "RLIMIT_AS", + "RLIMIT_CORE", + "RLIMIT_CPU", + "RLIMIT_CPU_USAGE_MONITOR", + "RLIMIT_DATA", + "RLIMIT_FSIZE", + "RLIMIT_NOFILE", + "RLIMIT_STACK", + "RLIM_INFINITY", + "RTAX_ADVMSS", + "RTAX_AUTHOR", + "RTAX_BRD", + "RTAX_CWND", + "RTAX_DST", + "RTAX_FEATURES", + "RTAX_FEATURE_ALLFRAG", + "RTAX_FEATURE_ECN", + "RTAX_FEATURE_SACK", + "RTAX_FEATURE_TIMESTAMP", + "RTAX_GATEWAY", + "RTAX_GENMASK", + "RTAX_HOPLIMIT", + "RTAX_IFA", + "RTAX_IFP", + "RTAX_INITCWND", + "RTAX_INITRWND", + "RTAX_LABEL", + "RTAX_LOCK", + "RTAX_MAX", + "RTAX_MTU", + "RTAX_NETMASK", + "RTAX_REORDERING", + "RTAX_RTO_MIN", + "RTAX_RTT", + "RTAX_RTTVAR", + "RTAX_SRC", + "RTAX_SRCMASK", + "RTAX_SSTHRESH", + "RTAX_TAG", + "RTAX_UNSPEC", + "RTAX_WINDOW", + "RTA_ALIGNTO", + "RTA_AUTHOR", + "RTA_BRD", + "RTA_CACHEINFO", + "RTA_DST", + "RTA_FLOW", + "RTA_GATEWAY", + "RTA_GENMASK", + "RTA_IFA", + "RTA_IFP", + "RTA_IIF", + "RTA_LABEL", + "RTA_MAX", + "RTA_METRICS", + "RTA_MULTIPATH", + "RTA_NETMASK", + "RTA_OIF", + "RTA_PREFSRC", + "RTA_PRIORITY", + "RTA_SRC", + "RTA_SRCMASK", + "RTA_TABLE", + "RTA_TAG", + "RTA_UNSPEC", + "RTCF_DIRECTSRC", + "RTCF_DOREDIRECT", + "RTCF_LOG", + "RTCF_MASQ", + "RTCF_NAT", + "RTCF_VALVE", + "RTF_ADDRCLASSMASK", + "RTF_ADDRCONF", + "RTF_ALLONLINK", + "RTF_ANNOUNCE", + "RTF_BLACKHOLE", + "RTF_BROADCAST", + "RTF_CACHE", + "RTF_CLONED", + "RTF_CLONING", + "RTF_CONDEMNED", + "RTF_DEFAULT", + "RTF_DELCLONE", + "RTF_DONE", + "RTF_DYNAMIC", + "RTF_FLOW", + "RTF_FMASK", + "RTF_GATEWAY", + "RTF_GWFLAG_COMPAT", + "RTF_HOST", + "RTF_IFREF", + "RTF_IFSCOPE", + "RTF_INTERFACE", + "RTF_IRTT", + "RTF_LINKRT", + "RTF_LLDATA", + "RTF_LLINFO", + "RTF_LOCAL", + "RTF_MASK", + "RTF_MODIFIED", + "RTF_MPATH", + "RTF_MPLS", + "RTF_MSS", + "RTF_MTU", + "RTF_MULTICAST", + "RTF_NAT", + "RTF_NOFORWARD", + "RTF_NONEXTHOP", + "RTF_NOPMTUDISC", + "RTF_PERMANENT_ARP", + "RTF_PINNED", + "RTF_POLICY", + "RTF_PRCLONING", + "RTF_PROTO1", + "RTF_PROTO2", + "RTF_PROTO3", + "RTF_PROXY", + "RTF_REINSTATE", + "RTF_REJECT", + "RTF_RNH_LOCKED", + "RTF_ROUTER", + "RTF_SOURCE", + "RTF_SRC", + "RTF_STATIC", + "RTF_STICKY", + "RTF_THROW", + "RTF_TUNNEL", + "RTF_UP", + "RTF_USETRAILERS", + "RTF_WASCLONED", + "RTF_WINDOW", + "RTF_XRESOLVE", + "RTM_ADD", + "RTM_BASE", + "RTM_CHANGE", + "RTM_CHGADDR", + "RTM_DELACTION", + "RTM_DELADDR", + "RTM_DELADDRLABEL", + "RTM_DELETE", + "RTM_DELLINK", + "RTM_DELMADDR", + "RTM_DELNEIGH", + "RTM_DELQDISC", + "RTM_DELROUTE", + "RTM_DELRULE", + "RTM_DELTCLASS", + "RTM_DELTFILTER", + "RTM_DESYNC", + "RTM_F_CLONED", + "RTM_F_EQUALIZE", + "RTM_F_NOTIFY", + "RTM_F_PREFIX", + "RTM_GET", + "RTM_GET2", + "RTM_GETACTION", + "RTM_GETADDR", + "RTM_GETADDRLABEL", + "RTM_GETANYCAST", + "RTM_GETDCB", + "RTM_GETLINK", + "RTM_GETMULTICAST", + "RTM_GETNEIGH", + "RTM_GETNEIGHTBL", + "RTM_GETQDISC", + "RTM_GETROUTE", + "RTM_GETRULE", + "RTM_GETTCLASS", + "RTM_GETTFILTER", + "RTM_IEEE80211", + "RTM_IFANNOUNCE", + "RTM_IFINFO", + "RTM_IFINFO2", + "RTM_LLINFO_UPD", + "RTM_LOCK", + "RTM_LOSING", + "RTM_MAX", + "RTM_MAXSIZE", + "RTM_MISS", + "RTM_NEWACTION", + "RTM_NEWADDR", + "RTM_NEWADDRLABEL", + "RTM_NEWLINK", + "RTM_NEWMADDR", + "RTM_NEWMADDR2", + "RTM_NEWNDUSEROPT", + "RTM_NEWNEIGH", + "RTM_NEWNEIGHTBL", + "RTM_NEWPREFIX", + "RTM_NEWQDISC", + "RTM_NEWROUTE", + "RTM_NEWRULE", + "RTM_NEWTCLASS", + "RTM_NEWTFILTER", + "RTM_NR_FAMILIES", + "RTM_NR_MSGTYPES", + "RTM_OIFINFO", + "RTM_OLDADD", + "RTM_OLDDEL", + "RTM_OOIFINFO", + "RTM_REDIRECT", + "RTM_RESOLVE", + "RTM_RTTUNIT", + "RTM_SETDCB", + "RTM_SETGATE", + "RTM_SETLINK", + "RTM_SETNEIGHTBL", + "RTM_VERSION", + "RTNH_ALIGNTO", + "RTNH_F_DEAD", + "RTNH_F_ONLINK", + "RTNH_F_PERVASIVE", + "RTNLGRP_IPV4_IFADDR", + "RTNLGRP_IPV4_MROUTE", + "RTNLGRP_IPV4_ROUTE", + "RTNLGRP_IPV4_RULE", + "RTNLGRP_IPV6_IFADDR", + "RTNLGRP_IPV6_IFINFO", + "RTNLGRP_IPV6_MROUTE", + "RTNLGRP_IPV6_PREFIX", + "RTNLGRP_IPV6_ROUTE", + "RTNLGRP_IPV6_RULE", + "RTNLGRP_LINK", + "RTNLGRP_ND_USEROPT", + "RTNLGRP_NEIGH", + "RTNLGRP_NONE", + "RTNLGRP_NOTIFY", + "RTNLGRP_TC", + "RTN_ANYCAST", + "RTN_BLACKHOLE", + "RTN_BROADCAST", + "RTN_LOCAL", + "RTN_MAX", + "RTN_MULTICAST", + "RTN_NAT", + "RTN_PROHIBIT", + "RTN_THROW", + "RTN_UNICAST", + "RTN_UNREACHABLE", + "RTN_UNSPEC", + "RTN_XRESOLVE", + "RTPROT_BIRD", + "RTPROT_BOOT", + "RTPROT_DHCP", + "RTPROT_DNROUTED", + "RTPROT_GATED", + "RTPROT_KERNEL", + "RTPROT_MRT", + "RTPROT_NTK", + "RTPROT_RA", + "RTPROT_REDIRECT", + "RTPROT_STATIC", + "RTPROT_UNSPEC", + "RTPROT_XORP", + "RTPROT_ZEBRA", + "RTV_EXPIRE", + "RTV_HOPCOUNT", + "RTV_MTU", + "RTV_RPIPE", + "RTV_RTT", + "RTV_RTTVAR", + "RTV_SPIPE", + "RTV_SSTHRESH", + "RTV_WEIGHT", + "RT_CACHING_CONTEXT", + "RT_CLASS_DEFAULT", + "RT_CLASS_LOCAL", + "RT_CLASS_MAIN", + "RT_CLASS_MAX", + "RT_CLASS_UNSPEC", + "RT_DEFAULT_FIB", + "RT_NORTREF", + "RT_SCOPE_HOST", + "RT_SCOPE_LINK", + "RT_SCOPE_NOWHERE", + "RT_SCOPE_SITE", + "RT_SCOPE_UNIVERSE", + "RT_TABLEID_MAX", + "RT_TABLE_COMPAT", + "RT_TABLE_DEFAULT", + "RT_TABLE_LOCAL", + "RT_TABLE_MAIN", + "RT_TABLE_MAX", + "RT_TABLE_UNSPEC", + "RUSAGE_CHILDREN", + "RUSAGE_SELF", + "RUSAGE_THREAD", + "Radvisory_t", + "RawConn", + "RawSockaddr", + "RawSockaddrAny", + "RawSockaddrDatalink", + "RawSockaddrInet4", + "RawSockaddrInet6", + "RawSockaddrLinklayer", + "RawSockaddrNetlink", + "RawSockaddrUnix", + "RawSyscall", + "RawSyscall6", + "Read", + "ReadConsole", + "ReadDirectoryChanges", + "ReadDirent", + "ReadFile", + "Readlink", + "Reboot", + "Recvfrom", + "Recvmsg", + "RegCloseKey", + "RegEnumKeyEx", + "RegOpenKeyEx", + "RegQueryInfoKey", + "RegQueryValueEx", + "RemoveDirectory", + "Removexattr", + "Rename", + "Renameat", + "Revoke", + "Rlimit", + "Rmdir", + "RouteMessage", + "RouteRIB", + "RoutingMessage", + "RtAttr", + "RtGenmsg", + "RtMetrics", + "RtMsg", + "RtMsghdr", + "RtNexthop", + "Rusage", + "SCM_BINTIME", + "SCM_CREDENTIALS", + "SCM_CREDS", + "SCM_RIGHTS", + "SCM_TIMESTAMP", + "SCM_TIMESTAMPING", + "SCM_TIMESTAMPNS", + "SCM_TIMESTAMP_MONOTONIC", + "SHUT_RD", + "SHUT_RDWR", + "SHUT_WR", + "SID", + "SIDAndAttributes", + "SIGABRT", + "SIGALRM", + "SIGBUS", + "SIGCHLD", + "SIGCLD", + "SIGCONT", + "SIGEMT", + "SIGFPE", + "SIGHUP", + "SIGILL", + "SIGINFO", + "SIGINT", + "SIGIO", + "SIGIOT", + "SIGKILL", + "SIGLIBRT", + "SIGLWP", + "SIGPIPE", + "SIGPOLL", + "SIGPROF", + "SIGPWR", + "SIGQUIT", + "SIGSEGV", + "SIGSTKFLT", + "SIGSTOP", + "SIGSYS", + "SIGTERM", + "SIGTHR", + "SIGTRAP", + "SIGTSTP", + "SIGTTIN", + "SIGTTOU", + "SIGUNUSED", + "SIGURG", + "SIGUSR1", + "SIGUSR2", + "SIGVTALRM", + "SIGWINCH", + "SIGXCPU", + "SIGXFSZ", + "SIOCADDDLCI", + "SIOCADDMULTI", + "SIOCADDRT", + "SIOCAIFADDR", + "SIOCAIFGROUP", + "SIOCALIFADDR", + "SIOCARPIPLL", + "SIOCATMARK", + "SIOCAUTOADDR", + "SIOCAUTONETMASK", + "SIOCBRDGADD", + "SIOCBRDGADDS", + "SIOCBRDGARL", + "SIOCBRDGDADDR", + "SIOCBRDGDEL", + "SIOCBRDGDELS", + "SIOCBRDGFLUSH", + "SIOCBRDGFRL", + "SIOCBRDGGCACHE", + "SIOCBRDGGFD", + "SIOCBRDGGHT", + "SIOCBRDGGIFFLGS", + "SIOCBRDGGMA", + "SIOCBRDGGPARAM", + "SIOCBRDGGPRI", + "SIOCBRDGGRL", + "SIOCBRDGGSIFS", + "SIOCBRDGGTO", + "SIOCBRDGIFS", + "SIOCBRDGRTS", + "SIOCBRDGSADDR", + "SIOCBRDGSCACHE", + "SIOCBRDGSFD", + "SIOCBRDGSHT", + "SIOCBRDGSIFCOST", + "SIOCBRDGSIFFLGS", + "SIOCBRDGSIFPRIO", + "SIOCBRDGSMA", + "SIOCBRDGSPRI", + "SIOCBRDGSPROTO", + "SIOCBRDGSTO", + "SIOCBRDGSTXHC", + "SIOCDARP", + "SIOCDELDLCI", + "SIOCDELMULTI", + "SIOCDELRT", + "SIOCDEVPRIVATE", + "SIOCDIFADDR", + "SIOCDIFGROUP", + "SIOCDIFPHYADDR", + "SIOCDLIFADDR", + "SIOCDRARP", + "SIOCGARP", + "SIOCGDRVSPEC", + "SIOCGETKALIVE", + "SIOCGETLABEL", + "SIOCGETPFLOW", + "SIOCGETPFSYNC", + "SIOCGETSGCNT", + "SIOCGETVIFCNT", + "SIOCGETVLAN", + "SIOCGHIWAT", + "SIOCGIFADDR", + "SIOCGIFADDRPREF", + "SIOCGIFALIAS", + "SIOCGIFALTMTU", + "SIOCGIFASYNCMAP", + "SIOCGIFBOND", + "SIOCGIFBR", + "SIOCGIFBRDADDR", + "SIOCGIFCAP", + "SIOCGIFCONF", + "SIOCGIFCOUNT", + "SIOCGIFDATA", + "SIOCGIFDESCR", + "SIOCGIFDEVMTU", + "SIOCGIFDLT", + "SIOCGIFDSTADDR", + "SIOCGIFENCAP", + "SIOCGIFFIB", + "SIOCGIFFLAGS", + "SIOCGIFGATTR", + "SIOCGIFGENERIC", + "SIOCGIFGMEMB", + "SIOCGIFGROUP", + "SIOCGIFHARDMTU", + "SIOCGIFHWADDR", + "SIOCGIFINDEX", + "SIOCGIFKPI", + "SIOCGIFMAC", + "SIOCGIFMAP", + "SIOCGIFMEDIA", + "SIOCGIFMEM", + "SIOCGIFMETRIC", + "SIOCGIFMTU", + "SIOCGIFNAME", + "SIOCGIFNETMASK", + "SIOCGIFPDSTADDR", + "SIOCGIFPFLAGS", + "SIOCGIFPHYS", + "SIOCGIFPRIORITY", + "SIOCGIFPSRCADDR", + "SIOCGIFRDOMAIN", + "SIOCGIFRTLABEL", + "SIOCGIFSLAVE", + "SIOCGIFSTATUS", + "SIOCGIFTIMESLOT", + "SIOCGIFTXQLEN", + "SIOCGIFVLAN", + "SIOCGIFWAKEFLAGS", + "SIOCGIFXFLAGS", + "SIOCGLIFADDR", + "SIOCGLIFPHYADDR", + "SIOCGLIFPHYRTABLE", + "SIOCGLIFPHYTTL", + "SIOCGLINKSTR", + "SIOCGLOWAT", + "SIOCGPGRP", + "SIOCGPRIVATE_0", + "SIOCGPRIVATE_1", + "SIOCGRARP", + "SIOCGSPPPPARAMS", + "SIOCGSTAMP", + "SIOCGSTAMPNS", + "SIOCGVH", + "SIOCGVNETID", + "SIOCIFCREATE", + "SIOCIFCREATE2", + "SIOCIFDESTROY", + "SIOCIFGCLONERS", + "SIOCINITIFADDR", + "SIOCPROTOPRIVATE", + "SIOCRSLVMULTI", + "SIOCRTMSG", + "SIOCSARP", + "SIOCSDRVSPEC", + "SIOCSETKALIVE", + "SIOCSETLABEL", + "SIOCSETPFLOW", + "SIOCSETPFSYNC", + "SIOCSETVLAN", + "SIOCSHIWAT", + "SIOCSIFADDR", + "SIOCSIFADDRPREF", + "SIOCSIFALTMTU", + "SIOCSIFASYNCMAP", + "SIOCSIFBOND", + "SIOCSIFBR", + "SIOCSIFBRDADDR", + "SIOCSIFCAP", + "SIOCSIFDESCR", + "SIOCSIFDSTADDR", + "SIOCSIFENCAP", + "SIOCSIFFIB", + "SIOCSIFFLAGS", + "SIOCSIFGATTR", + "SIOCSIFGENERIC", + "SIOCSIFHWADDR", + "SIOCSIFHWBROADCAST", + "SIOCSIFKPI", + "SIOCSIFLINK", + "SIOCSIFLLADDR", + "SIOCSIFMAC", + "SIOCSIFMAP", + "SIOCSIFMEDIA", + "SIOCSIFMEM", + "SIOCSIFMETRIC", + "SIOCSIFMTU", + "SIOCSIFNAME", + "SIOCSIFNETMASK", + "SIOCSIFPFLAGS", + "SIOCSIFPHYADDR", + "SIOCSIFPHYS", + "SIOCSIFPRIORITY", + "SIOCSIFRDOMAIN", + "SIOCSIFRTLABEL", + "SIOCSIFRVNET", + "SIOCSIFSLAVE", + "SIOCSIFTIMESLOT", + "SIOCSIFTXQLEN", + "SIOCSIFVLAN", + "SIOCSIFVNET", + "SIOCSIFXFLAGS", + "SIOCSLIFPHYADDR", + "SIOCSLIFPHYRTABLE", + "SIOCSLIFPHYTTL", + "SIOCSLINKSTR", + "SIOCSLOWAT", + "SIOCSPGRP", + "SIOCSRARP", + "SIOCSSPPPPARAMS", + "SIOCSVH", + "SIOCSVNETID", + "SIOCZIFDATA", + "SIO_GET_EXTENSION_FUNCTION_POINTER", + "SIO_GET_INTERFACE_LIST", + "SIO_KEEPALIVE_VALS", + "SIO_UDP_CONNRESET", + "SOCK_CLOEXEC", + "SOCK_DCCP", + "SOCK_DGRAM", + "SOCK_FLAGS_MASK", + "SOCK_MAXADDRLEN", + "SOCK_NONBLOCK", + "SOCK_NOSIGPIPE", + "SOCK_PACKET", + "SOCK_RAW", + "SOCK_RDM", + "SOCK_SEQPACKET", + "SOCK_STREAM", + "SOL_AAL", + "SOL_ATM", + "SOL_DECNET", + "SOL_ICMPV6", + "SOL_IP", + "SOL_IPV6", + "SOL_IRDA", + "SOL_PACKET", + "SOL_RAW", + "SOL_SOCKET", + "SOL_TCP", + "SOL_X25", + "SOMAXCONN", + "SO_ACCEPTCONN", + "SO_ACCEPTFILTER", + "SO_ATTACH_FILTER", + "SO_BINDANY", + "SO_BINDTODEVICE", + "SO_BINTIME", + "SO_BROADCAST", + "SO_BSDCOMPAT", + "SO_DEBUG", + "SO_DETACH_FILTER", + "SO_DOMAIN", + "SO_DONTROUTE", + "SO_DONTTRUNC", + "SO_ERROR", + "SO_KEEPALIVE", + "SO_LABEL", + "SO_LINGER", + "SO_LINGER_SEC", + "SO_LISTENINCQLEN", + "SO_LISTENQLEN", + "SO_LISTENQLIMIT", + "SO_MARK", + "SO_NETPROC", + "SO_NKE", + "SO_NOADDRERR", + "SO_NOHEADER", + "SO_NOSIGPIPE", + "SO_NOTIFYCONFLICT", + "SO_NO_CHECK", + "SO_NO_DDP", + "SO_NO_OFFLOAD", + "SO_NP_EXTENSIONS", + "SO_NREAD", + "SO_NUMRCVPKT", + "SO_NWRITE", + "SO_OOBINLINE", + "SO_OVERFLOWED", + "SO_PASSCRED", + "SO_PASSSEC", + "SO_PEERCRED", + "SO_PEERLABEL", + "SO_PEERNAME", + "SO_PEERSEC", + "SO_PRIORITY", + "SO_PROTOCOL", + "SO_PROTOTYPE", + "SO_RANDOMPORT", + "SO_RCVBUF", + "SO_RCVBUFFORCE", + "SO_RCVLOWAT", + "SO_RCVTIMEO", + "SO_RESTRICTIONS", + "SO_RESTRICT_DENYIN", + "SO_RESTRICT_DENYOUT", + "SO_RESTRICT_DENYSET", + "SO_REUSEADDR", + "SO_REUSEPORT", + "SO_REUSESHAREUID", + "SO_RTABLE", + "SO_RXQ_OVFL", + "SO_SECURITY_AUTHENTICATION", + "SO_SECURITY_ENCRYPTION_NETWORK", + "SO_SECURITY_ENCRYPTION_TRANSPORT", + "SO_SETFIB", + "SO_SNDBUF", + "SO_SNDBUFFORCE", + "SO_SNDLOWAT", + "SO_SNDTIMEO", + "SO_SPLICE", + "SO_TIMESTAMP", + "SO_TIMESTAMPING", + "SO_TIMESTAMPNS", + "SO_TIMESTAMP_MONOTONIC", + "SO_TYPE", + "SO_UPCALLCLOSEWAIT", + "SO_UPDATE_ACCEPT_CONTEXT", + "SO_UPDATE_CONNECT_CONTEXT", + "SO_USELOOPBACK", + "SO_USER_COOKIE", + "SO_VENDOR", + "SO_WANTMORE", + "SO_WANTOOBFLAG", + "SSLExtraCertChainPolicyPara", + "STANDARD_RIGHTS_ALL", + "STANDARD_RIGHTS_EXECUTE", + "STANDARD_RIGHTS_READ", + "STANDARD_RIGHTS_REQUIRED", + "STANDARD_RIGHTS_WRITE", + "STARTF_USESHOWWINDOW", + "STARTF_USESTDHANDLES", + "STD_ERROR_HANDLE", + "STD_INPUT_HANDLE", + "STD_OUTPUT_HANDLE", + "SUBLANG_ENGLISH_US", + "SW_FORCEMINIMIZE", + "SW_HIDE", + "SW_MAXIMIZE", + "SW_MINIMIZE", + "SW_NORMAL", + "SW_RESTORE", + "SW_SHOW", + "SW_SHOWDEFAULT", + "SW_SHOWMAXIMIZED", + "SW_SHOWMINIMIZED", + "SW_SHOWMINNOACTIVE", + "SW_SHOWNA", + "SW_SHOWNOACTIVATE", + "SW_SHOWNORMAL", + "SYMBOLIC_LINK_FLAG_DIRECTORY", + "SYNCHRONIZE", + "SYSCTL_VERSION", + "SYSCTL_VERS_0", + "SYSCTL_VERS_1", + "SYSCTL_VERS_MASK", + "SYS_ABORT2", + "SYS_ACCEPT", + "SYS_ACCEPT4", + "SYS_ACCEPT_NOCANCEL", + "SYS_ACCESS", + "SYS_ACCESS_EXTENDED", + "SYS_ACCT", + "SYS_ADD_KEY", + "SYS_ADD_PROFIL", + "SYS_ADJFREQ", + "SYS_ADJTIME", + "SYS_ADJTIMEX", + "SYS_AFS_SYSCALL", + "SYS_AIO_CANCEL", + "SYS_AIO_ERROR", + "SYS_AIO_FSYNC", + "SYS_AIO_READ", + "SYS_AIO_RETURN", + "SYS_AIO_SUSPEND", + "SYS_AIO_SUSPEND_NOCANCEL", + "SYS_AIO_WRITE", + "SYS_ALARM", + "SYS_ARCH_PRCTL", + "SYS_ARM_FADVISE64_64", + "SYS_ARM_SYNC_FILE_RANGE", + "SYS_ATGETMSG", + "SYS_ATPGETREQ", + "SYS_ATPGETRSP", + "SYS_ATPSNDREQ", + "SYS_ATPSNDRSP", + "SYS_ATPUTMSG", + "SYS_ATSOCKET", + "SYS_AUDIT", + "SYS_AUDITCTL", + "SYS_AUDITON", + "SYS_AUDIT_SESSION_JOIN", + "SYS_AUDIT_SESSION_PORT", + "SYS_AUDIT_SESSION_SELF", + "SYS_BDFLUSH", + "SYS_BIND", + "SYS_BINDAT", + "SYS_BREAK", + "SYS_BRK", + "SYS_BSDTHREAD_CREATE", + "SYS_BSDTHREAD_REGISTER", + "SYS_BSDTHREAD_TERMINATE", + "SYS_CAPGET", + "SYS_CAPSET", + "SYS_CAP_ENTER", + "SYS_CAP_FCNTLS_GET", + "SYS_CAP_FCNTLS_LIMIT", + "SYS_CAP_GETMODE", + "SYS_CAP_GETRIGHTS", + "SYS_CAP_IOCTLS_GET", + "SYS_CAP_IOCTLS_LIMIT", + "SYS_CAP_NEW", + "SYS_CAP_RIGHTS_GET", + "SYS_CAP_RIGHTS_LIMIT", + "SYS_CHDIR", + "SYS_CHFLAGS", + "SYS_CHFLAGSAT", + "SYS_CHMOD", + "SYS_CHMOD_EXTENDED", + "SYS_CHOWN", + "SYS_CHOWN32", + "SYS_CHROOT", + "SYS_CHUD", + "SYS_CLOCK_ADJTIME", + "SYS_CLOCK_GETCPUCLOCKID2", + "SYS_CLOCK_GETRES", + "SYS_CLOCK_GETTIME", + "SYS_CLOCK_NANOSLEEP", + "SYS_CLOCK_SETTIME", + "SYS_CLONE", + "SYS_CLOSE", + "SYS_CLOSEFROM", + "SYS_CLOSE_NOCANCEL", + "SYS_CONNECT", + "SYS_CONNECTAT", + "SYS_CONNECT_NOCANCEL", + "SYS_COPYFILE", + "SYS_CPUSET", + "SYS_CPUSET_GETAFFINITY", + "SYS_CPUSET_GETID", + "SYS_CPUSET_SETAFFINITY", + "SYS_CPUSET_SETID", + "SYS_CREAT", + "SYS_CREATE_MODULE", + "SYS_CSOPS", + "SYS_CSOPS_AUDITTOKEN", + "SYS_DELETE", + "SYS_DELETE_MODULE", + "SYS_DUP", + "SYS_DUP2", + "SYS_DUP3", + "SYS_EACCESS", + "SYS_EPOLL_CREATE", + "SYS_EPOLL_CREATE1", + "SYS_EPOLL_CTL", + "SYS_EPOLL_CTL_OLD", + "SYS_EPOLL_PWAIT", + "SYS_EPOLL_WAIT", + "SYS_EPOLL_WAIT_OLD", + "SYS_EVENTFD", + "SYS_EVENTFD2", + "SYS_EXCHANGEDATA", + "SYS_EXECVE", + "SYS_EXIT", + "SYS_EXIT_GROUP", + "SYS_EXTATTRCTL", + "SYS_EXTATTR_DELETE_FD", + "SYS_EXTATTR_DELETE_FILE", + "SYS_EXTATTR_DELETE_LINK", + "SYS_EXTATTR_GET_FD", + "SYS_EXTATTR_GET_FILE", + "SYS_EXTATTR_GET_LINK", + "SYS_EXTATTR_LIST_FD", + "SYS_EXTATTR_LIST_FILE", + "SYS_EXTATTR_LIST_LINK", + "SYS_EXTATTR_SET_FD", + "SYS_EXTATTR_SET_FILE", + "SYS_EXTATTR_SET_LINK", + "SYS_FACCESSAT", + "SYS_FADVISE64", + "SYS_FADVISE64_64", + "SYS_FALLOCATE", + "SYS_FANOTIFY_INIT", + "SYS_FANOTIFY_MARK", + "SYS_FCHDIR", + "SYS_FCHFLAGS", + "SYS_FCHMOD", + "SYS_FCHMODAT", + "SYS_FCHMOD_EXTENDED", + "SYS_FCHOWN", + "SYS_FCHOWN32", + "SYS_FCHOWNAT", + "SYS_FCHROOT", + "SYS_FCNTL", + "SYS_FCNTL64", + "SYS_FCNTL_NOCANCEL", + "SYS_FDATASYNC", + "SYS_FEXECVE", + "SYS_FFCLOCK_GETCOUNTER", + "SYS_FFCLOCK_GETESTIMATE", + "SYS_FFCLOCK_SETESTIMATE", + "SYS_FFSCTL", + "SYS_FGETATTRLIST", + "SYS_FGETXATTR", + "SYS_FHOPEN", + "SYS_FHSTAT", + "SYS_FHSTATFS", + "SYS_FILEPORT_MAKEFD", + "SYS_FILEPORT_MAKEPORT", + "SYS_FKTRACE", + "SYS_FLISTXATTR", + "SYS_FLOCK", + "SYS_FORK", + "SYS_FPATHCONF", + "SYS_FREEBSD6_FTRUNCATE", + "SYS_FREEBSD6_LSEEK", + "SYS_FREEBSD6_MMAP", + "SYS_FREEBSD6_PREAD", + "SYS_FREEBSD6_PWRITE", + "SYS_FREEBSD6_TRUNCATE", + "SYS_FREMOVEXATTR", + "SYS_FSCTL", + "SYS_FSETATTRLIST", + "SYS_FSETXATTR", + "SYS_FSGETPATH", + "SYS_FSTAT", + "SYS_FSTAT64", + "SYS_FSTAT64_EXTENDED", + "SYS_FSTATAT", + "SYS_FSTATAT64", + "SYS_FSTATFS", + "SYS_FSTATFS64", + "SYS_FSTATV", + "SYS_FSTATVFS1", + "SYS_FSTAT_EXTENDED", + "SYS_FSYNC", + "SYS_FSYNC_NOCANCEL", + "SYS_FSYNC_RANGE", + "SYS_FTIME", + "SYS_FTRUNCATE", + "SYS_FTRUNCATE64", + "SYS_FUTEX", + "SYS_FUTIMENS", + "SYS_FUTIMES", + "SYS_FUTIMESAT", + "SYS_GETATTRLIST", + "SYS_GETAUDIT", + "SYS_GETAUDIT_ADDR", + "SYS_GETAUID", + "SYS_GETCONTEXT", + "SYS_GETCPU", + "SYS_GETCWD", + "SYS_GETDENTS", + "SYS_GETDENTS64", + "SYS_GETDIRENTRIES", + "SYS_GETDIRENTRIES64", + "SYS_GETDIRENTRIESATTR", + "SYS_GETDTABLECOUNT", + "SYS_GETDTABLESIZE", + "SYS_GETEGID", + "SYS_GETEGID32", + "SYS_GETEUID", + "SYS_GETEUID32", + "SYS_GETFH", + "SYS_GETFSSTAT", + "SYS_GETFSSTAT64", + "SYS_GETGID", + "SYS_GETGID32", + "SYS_GETGROUPS", + "SYS_GETGROUPS32", + "SYS_GETHOSTUUID", + "SYS_GETITIMER", + "SYS_GETLCID", + "SYS_GETLOGIN", + "SYS_GETLOGINCLASS", + "SYS_GETPEERNAME", + "SYS_GETPGID", + "SYS_GETPGRP", + "SYS_GETPID", + "SYS_GETPMSG", + "SYS_GETPPID", + "SYS_GETPRIORITY", + "SYS_GETRESGID", + "SYS_GETRESGID32", + "SYS_GETRESUID", + "SYS_GETRESUID32", + "SYS_GETRLIMIT", + "SYS_GETRTABLE", + "SYS_GETRUSAGE", + "SYS_GETSGROUPS", + "SYS_GETSID", + "SYS_GETSOCKNAME", + "SYS_GETSOCKOPT", + "SYS_GETTHRID", + "SYS_GETTID", + "SYS_GETTIMEOFDAY", + "SYS_GETUID", + "SYS_GETUID32", + "SYS_GETVFSSTAT", + "SYS_GETWGROUPS", + "SYS_GETXATTR", + "SYS_GET_KERNEL_SYMS", + "SYS_GET_MEMPOLICY", + "SYS_GET_ROBUST_LIST", + "SYS_GET_THREAD_AREA", + "SYS_GTTY", + "SYS_IDENTITYSVC", + "SYS_IDLE", + "SYS_INITGROUPS", + "SYS_INIT_MODULE", + "SYS_INOTIFY_ADD_WATCH", + "SYS_INOTIFY_INIT", + "SYS_INOTIFY_INIT1", + "SYS_INOTIFY_RM_WATCH", + "SYS_IOCTL", + "SYS_IOPERM", + "SYS_IOPL", + "SYS_IOPOLICYSYS", + "SYS_IOPRIO_GET", + "SYS_IOPRIO_SET", + "SYS_IO_CANCEL", + "SYS_IO_DESTROY", + "SYS_IO_GETEVENTS", + "SYS_IO_SETUP", + "SYS_IO_SUBMIT", + "SYS_IPC", + "SYS_ISSETUGID", + "SYS_JAIL", + "SYS_JAIL_ATTACH", + "SYS_JAIL_GET", + "SYS_JAIL_REMOVE", + "SYS_JAIL_SET", + "SYS_KAS_INFO", + "SYS_KDEBUG_TRACE", + "SYS_KENV", + "SYS_KEVENT", + "SYS_KEVENT64", + "SYS_KEXEC_LOAD", + "SYS_KEYCTL", + "SYS_KILL", + "SYS_KLDFIND", + "SYS_KLDFIRSTMOD", + "SYS_KLDLOAD", + "SYS_KLDNEXT", + "SYS_KLDSTAT", + "SYS_KLDSYM", + "SYS_KLDUNLOAD", + "SYS_KLDUNLOADF", + "SYS_KQUEUE", + "SYS_KQUEUE1", + "SYS_KTIMER_CREATE", + "SYS_KTIMER_DELETE", + "SYS_KTIMER_GETOVERRUN", + "SYS_KTIMER_GETTIME", + "SYS_KTIMER_SETTIME", + "SYS_KTRACE", + "SYS_LCHFLAGS", + "SYS_LCHMOD", + "SYS_LCHOWN", + "SYS_LCHOWN32", + "SYS_LEDGER", + "SYS_LGETFH", + "SYS_LGETXATTR", + "SYS_LINK", + "SYS_LINKAT", + "SYS_LIO_LISTIO", + "SYS_LISTEN", + "SYS_LISTXATTR", + "SYS_LLISTXATTR", + "SYS_LOCK", + "SYS_LOOKUP_DCOOKIE", + "SYS_LPATHCONF", + "SYS_LREMOVEXATTR", + "SYS_LSEEK", + "SYS_LSETXATTR", + "SYS_LSTAT", + "SYS_LSTAT64", + "SYS_LSTAT64_EXTENDED", + "SYS_LSTATV", + "SYS_LSTAT_EXTENDED", + "SYS_LUTIMES", + "SYS_MAC_SYSCALL", + "SYS_MADVISE", + "SYS_MADVISE1", + "SYS_MAXSYSCALL", + "SYS_MBIND", + "SYS_MIGRATE_PAGES", + "SYS_MINCORE", + "SYS_MINHERIT", + "SYS_MKCOMPLEX", + "SYS_MKDIR", + "SYS_MKDIRAT", + "SYS_MKDIR_EXTENDED", + "SYS_MKFIFO", + "SYS_MKFIFOAT", + "SYS_MKFIFO_EXTENDED", + "SYS_MKNOD", + "SYS_MKNODAT", + "SYS_MLOCK", + "SYS_MLOCKALL", + "SYS_MMAP", + "SYS_MMAP2", + "SYS_MODCTL", + "SYS_MODFIND", + "SYS_MODFNEXT", + "SYS_MODIFY_LDT", + "SYS_MODNEXT", + "SYS_MODSTAT", + "SYS_MODWATCH", + "SYS_MOUNT", + "SYS_MOVE_PAGES", + "SYS_MPROTECT", + "SYS_MPX", + "SYS_MQUERY", + "SYS_MQ_GETSETATTR", + "SYS_MQ_NOTIFY", + "SYS_MQ_OPEN", + "SYS_MQ_TIMEDRECEIVE", + "SYS_MQ_TIMEDSEND", + "SYS_MQ_UNLINK", + "SYS_MREMAP", + "SYS_MSGCTL", + "SYS_MSGGET", + "SYS_MSGRCV", + "SYS_MSGRCV_NOCANCEL", + "SYS_MSGSND", + "SYS_MSGSND_NOCANCEL", + "SYS_MSGSYS", + "SYS_MSYNC", + "SYS_MSYNC_NOCANCEL", + "SYS_MUNLOCK", + "SYS_MUNLOCKALL", + "SYS_MUNMAP", + "SYS_NAME_TO_HANDLE_AT", + "SYS_NANOSLEEP", + "SYS_NEWFSTATAT", + "SYS_NFSCLNT", + "SYS_NFSSERVCTL", + "SYS_NFSSVC", + "SYS_NFSTAT", + "SYS_NICE", + "SYS_NLSTAT", + "SYS_NMOUNT", + "SYS_NSTAT", + "SYS_NTP_ADJTIME", + "SYS_NTP_GETTIME", + "SYS_OABI_SYSCALL_BASE", + "SYS_OBREAK", + "SYS_OLDFSTAT", + "SYS_OLDLSTAT", + "SYS_OLDOLDUNAME", + "SYS_OLDSTAT", + "SYS_OLDUNAME", + "SYS_OPEN", + "SYS_OPENAT", + "SYS_OPENBSD_POLL", + "SYS_OPEN_BY_HANDLE_AT", + "SYS_OPEN_DPROTECTED_NP", + "SYS_OPEN_EXTENDED", + "SYS_OPEN_NOCANCEL", + "SYS_OVADVISE", + "SYS_PACCEPT", + "SYS_PATHCONF", + "SYS_PAUSE", + "SYS_PCICONFIG_IOBASE", + "SYS_PCICONFIG_READ", + "SYS_PCICONFIG_WRITE", + "SYS_PDFORK", + "SYS_PDGETPID", + "SYS_PDKILL", + "SYS_PERF_EVENT_OPEN", + "SYS_PERSONALITY", + "SYS_PID_HIBERNATE", + "SYS_PID_RESUME", + "SYS_PID_SHUTDOWN_SOCKETS", + "SYS_PID_SUSPEND", + "SYS_PIPE", + "SYS_PIPE2", + "SYS_PIVOT_ROOT", + "SYS_PMC_CONTROL", + "SYS_PMC_GET_INFO", + "SYS_POLL", + "SYS_POLLTS", + "SYS_POLL_NOCANCEL", + "SYS_POSIX_FADVISE", + "SYS_POSIX_FALLOCATE", + "SYS_POSIX_OPENPT", + "SYS_POSIX_SPAWN", + "SYS_PPOLL", + "SYS_PRCTL", + "SYS_PREAD", + "SYS_PREAD64", + "SYS_PREADV", + "SYS_PREAD_NOCANCEL", + "SYS_PRLIMIT64", + "SYS_PROCCTL", + "SYS_PROCESS_POLICY", + "SYS_PROCESS_VM_READV", + "SYS_PROCESS_VM_WRITEV", + "SYS_PROC_INFO", + "SYS_PROF", + "SYS_PROFIL", + "SYS_PSELECT", + "SYS_PSELECT6", + "SYS_PSET_ASSIGN", + "SYS_PSET_CREATE", + "SYS_PSET_DESTROY", + "SYS_PSYNCH_CVBROAD", + "SYS_PSYNCH_CVCLRPREPOST", + "SYS_PSYNCH_CVSIGNAL", + "SYS_PSYNCH_CVWAIT", + "SYS_PSYNCH_MUTEXDROP", + "SYS_PSYNCH_MUTEXWAIT", + "SYS_PSYNCH_RW_DOWNGRADE", + "SYS_PSYNCH_RW_LONGRDLOCK", + "SYS_PSYNCH_RW_RDLOCK", + "SYS_PSYNCH_RW_UNLOCK", + "SYS_PSYNCH_RW_UNLOCK2", + "SYS_PSYNCH_RW_UPGRADE", + "SYS_PSYNCH_RW_WRLOCK", + "SYS_PSYNCH_RW_YIELDWRLOCK", + "SYS_PTRACE", + "SYS_PUTPMSG", + "SYS_PWRITE", + "SYS_PWRITE64", + "SYS_PWRITEV", + "SYS_PWRITE_NOCANCEL", + "SYS_QUERY_MODULE", + "SYS_QUOTACTL", + "SYS_RASCTL", + "SYS_RCTL_ADD_RULE", + "SYS_RCTL_GET_LIMITS", + "SYS_RCTL_GET_RACCT", + "SYS_RCTL_GET_RULES", + "SYS_RCTL_REMOVE_RULE", + "SYS_READ", + "SYS_READAHEAD", + "SYS_READDIR", + "SYS_READLINK", + "SYS_READLINKAT", + "SYS_READV", + "SYS_READV_NOCANCEL", + "SYS_READ_NOCANCEL", + "SYS_REBOOT", + "SYS_RECV", + "SYS_RECVFROM", + "SYS_RECVFROM_NOCANCEL", + "SYS_RECVMMSG", + "SYS_RECVMSG", + "SYS_RECVMSG_NOCANCEL", + "SYS_REMAP_FILE_PAGES", + "SYS_REMOVEXATTR", + "SYS_RENAME", + "SYS_RENAMEAT", + "SYS_REQUEST_KEY", + "SYS_RESTART_SYSCALL", + "SYS_REVOKE", + "SYS_RFORK", + "SYS_RMDIR", + "SYS_RTPRIO", + "SYS_RTPRIO_THREAD", + "SYS_RT_SIGACTION", + "SYS_RT_SIGPENDING", + "SYS_RT_SIGPROCMASK", + "SYS_RT_SIGQUEUEINFO", + "SYS_RT_SIGRETURN", + "SYS_RT_SIGSUSPEND", + "SYS_RT_SIGTIMEDWAIT", + "SYS_RT_TGSIGQUEUEINFO", + "SYS_SBRK", + "SYS_SCHED_GETAFFINITY", + "SYS_SCHED_GETPARAM", + "SYS_SCHED_GETSCHEDULER", + "SYS_SCHED_GET_PRIORITY_MAX", + "SYS_SCHED_GET_PRIORITY_MIN", + "SYS_SCHED_RR_GET_INTERVAL", + "SYS_SCHED_SETAFFINITY", + "SYS_SCHED_SETPARAM", + "SYS_SCHED_SETSCHEDULER", + "SYS_SCHED_YIELD", + "SYS_SCTP_GENERIC_RECVMSG", + "SYS_SCTP_GENERIC_SENDMSG", + "SYS_SCTP_GENERIC_SENDMSG_IOV", + "SYS_SCTP_PEELOFF", + "SYS_SEARCHFS", + "SYS_SECURITY", + "SYS_SELECT", + "SYS_SELECT_NOCANCEL", + "SYS_SEMCONFIG", + "SYS_SEMCTL", + "SYS_SEMGET", + "SYS_SEMOP", + "SYS_SEMSYS", + "SYS_SEMTIMEDOP", + "SYS_SEM_CLOSE", + "SYS_SEM_DESTROY", + "SYS_SEM_GETVALUE", + "SYS_SEM_INIT", + "SYS_SEM_OPEN", + "SYS_SEM_POST", + "SYS_SEM_TRYWAIT", + "SYS_SEM_UNLINK", + "SYS_SEM_WAIT", + "SYS_SEM_WAIT_NOCANCEL", + "SYS_SEND", + "SYS_SENDFILE", + "SYS_SENDFILE64", + "SYS_SENDMMSG", + "SYS_SENDMSG", + "SYS_SENDMSG_NOCANCEL", + "SYS_SENDTO", + "SYS_SENDTO_NOCANCEL", + "SYS_SETATTRLIST", + "SYS_SETAUDIT", + "SYS_SETAUDIT_ADDR", + "SYS_SETAUID", + "SYS_SETCONTEXT", + "SYS_SETDOMAINNAME", + "SYS_SETEGID", + "SYS_SETEUID", + "SYS_SETFIB", + "SYS_SETFSGID", + "SYS_SETFSGID32", + "SYS_SETFSUID", + "SYS_SETFSUID32", + "SYS_SETGID", + "SYS_SETGID32", + "SYS_SETGROUPS", + "SYS_SETGROUPS32", + "SYS_SETHOSTNAME", + "SYS_SETITIMER", + "SYS_SETLCID", + "SYS_SETLOGIN", + "SYS_SETLOGINCLASS", + "SYS_SETNS", + "SYS_SETPGID", + "SYS_SETPRIORITY", + "SYS_SETPRIVEXEC", + "SYS_SETREGID", + "SYS_SETREGID32", + "SYS_SETRESGID", + "SYS_SETRESGID32", + "SYS_SETRESUID", + "SYS_SETRESUID32", + "SYS_SETREUID", + "SYS_SETREUID32", + "SYS_SETRLIMIT", + "SYS_SETRTABLE", + "SYS_SETSGROUPS", + "SYS_SETSID", + "SYS_SETSOCKOPT", + "SYS_SETTID", + "SYS_SETTID_WITH_PID", + "SYS_SETTIMEOFDAY", + "SYS_SETUID", + "SYS_SETUID32", + "SYS_SETWGROUPS", + "SYS_SETXATTR", + "SYS_SET_MEMPOLICY", + "SYS_SET_ROBUST_LIST", + "SYS_SET_THREAD_AREA", + "SYS_SET_TID_ADDRESS", + "SYS_SGETMASK", + "SYS_SHARED_REGION_CHECK_NP", + "SYS_SHARED_REGION_MAP_AND_SLIDE_NP", + "SYS_SHMAT", + "SYS_SHMCTL", + "SYS_SHMDT", + "SYS_SHMGET", + "SYS_SHMSYS", + "SYS_SHM_OPEN", + "SYS_SHM_UNLINK", + "SYS_SHUTDOWN", + "SYS_SIGACTION", + "SYS_SIGALTSTACK", + "SYS_SIGNAL", + "SYS_SIGNALFD", + "SYS_SIGNALFD4", + "SYS_SIGPENDING", + "SYS_SIGPROCMASK", + "SYS_SIGQUEUE", + "SYS_SIGQUEUEINFO", + "SYS_SIGRETURN", + "SYS_SIGSUSPEND", + "SYS_SIGSUSPEND_NOCANCEL", + "SYS_SIGTIMEDWAIT", + "SYS_SIGWAIT", + "SYS_SIGWAITINFO", + "SYS_SOCKET", + "SYS_SOCKETCALL", + "SYS_SOCKETPAIR", + "SYS_SPLICE", + "SYS_SSETMASK", + "SYS_SSTK", + "SYS_STACK_SNAPSHOT", + "SYS_STAT", + "SYS_STAT64", + "SYS_STAT64_EXTENDED", + "SYS_STATFS", + "SYS_STATFS64", + "SYS_STATV", + "SYS_STATVFS1", + "SYS_STAT_EXTENDED", + "SYS_STIME", + "SYS_STTY", + "SYS_SWAPCONTEXT", + "SYS_SWAPCTL", + "SYS_SWAPOFF", + "SYS_SWAPON", + "SYS_SYMLINK", + "SYS_SYMLINKAT", + "SYS_SYNC", + "SYS_SYNCFS", + "SYS_SYNC_FILE_RANGE", + "SYS_SYSARCH", + "SYS_SYSCALL", + "SYS_SYSCALL_BASE", + "SYS_SYSFS", + "SYS_SYSINFO", + "SYS_SYSLOG", + "SYS_TEE", + "SYS_TGKILL", + "SYS_THREAD_SELFID", + "SYS_THR_CREATE", + "SYS_THR_EXIT", + "SYS_THR_KILL", + "SYS_THR_KILL2", + "SYS_THR_NEW", + "SYS_THR_SELF", + "SYS_THR_SET_NAME", + "SYS_THR_SUSPEND", + "SYS_THR_WAKE", + "SYS_TIME", + "SYS_TIMERFD_CREATE", + "SYS_TIMERFD_GETTIME", + "SYS_TIMERFD_SETTIME", + "SYS_TIMER_CREATE", + "SYS_TIMER_DELETE", + "SYS_TIMER_GETOVERRUN", + "SYS_TIMER_GETTIME", + "SYS_TIMER_SETTIME", + "SYS_TIMES", + "SYS_TKILL", + "SYS_TRUNCATE", + "SYS_TRUNCATE64", + "SYS_TUXCALL", + "SYS_UGETRLIMIT", + "SYS_ULIMIT", + "SYS_UMASK", + "SYS_UMASK_EXTENDED", + "SYS_UMOUNT", + "SYS_UMOUNT2", + "SYS_UNAME", + "SYS_UNDELETE", + "SYS_UNLINK", + "SYS_UNLINKAT", + "SYS_UNMOUNT", + "SYS_UNSHARE", + "SYS_USELIB", + "SYS_USTAT", + "SYS_UTIME", + "SYS_UTIMENSAT", + "SYS_UTIMES", + "SYS_UTRACE", + "SYS_UUIDGEN", + "SYS_VADVISE", + "SYS_VFORK", + "SYS_VHANGUP", + "SYS_VM86", + "SYS_VM86OLD", + "SYS_VMSPLICE", + "SYS_VM_PRESSURE_MONITOR", + "SYS_VSERVER", + "SYS_WAIT4", + "SYS_WAIT4_NOCANCEL", + "SYS_WAIT6", + "SYS_WAITEVENT", + "SYS_WAITID", + "SYS_WAITID_NOCANCEL", + "SYS_WAITPID", + "SYS_WATCHEVENT", + "SYS_WORKQ_KERNRETURN", + "SYS_WORKQ_OPEN", + "SYS_WRITE", + "SYS_WRITEV", + "SYS_WRITEV_NOCANCEL", + "SYS_WRITE_NOCANCEL", + "SYS_YIELD", + "SYS__LLSEEK", + "SYS__LWP_CONTINUE", + "SYS__LWP_CREATE", + "SYS__LWP_CTL", + "SYS__LWP_DETACH", + "SYS__LWP_EXIT", + "SYS__LWP_GETNAME", + "SYS__LWP_GETPRIVATE", + "SYS__LWP_KILL", + "SYS__LWP_PARK", + "SYS__LWP_SELF", + "SYS__LWP_SETNAME", + "SYS__LWP_SETPRIVATE", + "SYS__LWP_SUSPEND", + "SYS__LWP_UNPARK", + "SYS__LWP_UNPARK_ALL", + "SYS__LWP_WAIT", + "SYS__LWP_WAKEUP", + "SYS__NEWSELECT", + "SYS__PSET_BIND", + "SYS__SCHED_GETAFFINITY", + "SYS__SCHED_GETPARAM", + "SYS__SCHED_SETAFFINITY", + "SYS__SCHED_SETPARAM", + "SYS__SYSCTL", + "SYS__UMTX_LOCK", + "SYS__UMTX_OP", + "SYS__UMTX_UNLOCK", + "SYS___ACL_ACLCHECK_FD", + "SYS___ACL_ACLCHECK_FILE", + "SYS___ACL_ACLCHECK_LINK", + "SYS___ACL_DELETE_FD", + "SYS___ACL_DELETE_FILE", + "SYS___ACL_DELETE_LINK", + "SYS___ACL_GET_FD", + "SYS___ACL_GET_FILE", + "SYS___ACL_GET_LINK", + "SYS___ACL_SET_FD", + "SYS___ACL_SET_FILE", + "SYS___ACL_SET_LINK", + "SYS___CLONE", + "SYS___DISABLE_THREADSIGNAL", + "SYS___GETCWD", + "SYS___GETLOGIN", + "SYS___GET_TCB", + "SYS___MAC_EXECVE", + "SYS___MAC_GETFSSTAT", + "SYS___MAC_GET_FD", + "SYS___MAC_GET_FILE", + "SYS___MAC_GET_LCID", + "SYS___MAC_GET_LCTX", + "SYS___MAC_GET_LINK", + "SYS___MAC_GET_MOUNT", + "SYS___MAC_GET_PID", + "SYS___MAC_GET_PROC", + "SYS___MAC_MOUNT", + "SYS___MAC_SET_FD", + "SYS___MAC_SET_FILE", + "SYS___MAC_SET_LCTX", + "SYS___MAC_SET_LINK", + "SYS___MAC_SET_PROC", + "SYS___MAC_SYSCALL", + "SYS___OLD_SEMWAIT_SIGNAL", + "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", + "SYS___POSIX_CHOWN", + "SYS___POSIX_FCHOWN", + "SYS___POSIX_LCHOWN", + "SYS___POSIX_RENAME", + "SYS___PTHREAD_CANCELED", + "SYS___PTHREAD_CHDIR", + "SYS___PTHREAD_FCHDIR", + "SYS___PTHREAD_KILL", + "SYS___PTHREAD_MARKCANCEL", + "SYS___PTHREAD_SIGMASK", + "SYS___QUOTACTL", + "SYS___SEMCTL", + "SYS___SEMWAIT_SIGNAL", + "SYS___SEMWAIT_SIGNAL_NOCANCEL", + "SYS___SETLOGIN", + "SYS___SETUGID", + "SYS___SET_TCB", + "SYS___SIGACTION_SIGTRAMP", + "SYS___SIGTIMEDWAIT", + "SYS___SIGWAIT", + "SYS___SIGWAIT_NOCANCEL", + "SYS___SYSCTL", + "SYS___TFORK", + "SYS___THREXIT", + "SYS___THRSIGDIVERT", + "SYS___THRSLEEP", + "SYS___THRWAKEUP", + "S_ARCH1", + "S_ARCH2", + "S_BLKSIZE", + "S_IEXEC", + "S_IFBLK", + "S_IFCHR", + "S_IFDIR", + "S_IFIFO", + "S_IFLNK", + "S_IFMT", + "S_IFREG", + "S_IFSOCK", + "S_IFWHT", + "S_IREAD", + "S_IRGRP", + "S_IROTH", + "S_IRUSR", + "S_IRWXG", + "S_IRWXO", + "S_IRWXU", + "S_ISGID", + "S_ISTXT", + "S_ISUID", + "S_ISVTX", + "S_IWGRP", + "S_IWOTH", + "S_IWRITE", + "S_IWUSR", + "S_IXGRP", + "S_IXOTH", + "S_IXUSR", + "S_LOGIN_SET", + "SecurityAttributes", + "Seek", + "Select", + "Sendfile", + "Sendmsg", + "SendmsgN", + "Sendto", + "Servent", + "SetBpf", + "SetBpfBuflen", + "SetBpfDatalink", + "SetBpfHeadercmpl", + "SetBpfImmediate", + "SetBpfInterface", + "SetBpfPromisc", + "SetBpfTimeout", + "SetCurrentDirectory", + "SetEndOfFile", + "SetEnvironmentVariable", + "SetFileAttributes", + "SetFileCompletionNotificationModes", + "SetFilePointer", + "SetFileTime", + "SetHandleInformation", + "SetKevent", + "SetLsfPromisc", + "SetNonblock", + "Setdomainname", + "Setegid", + "Setenv", + "Seteuid", + "Setfsgid", + "Setfsuid", + "Setgid", + "Setgroups", + "Sethostname", + "Setlogin", + "Setpgid", + "Setpriority", + "Setprivexec", + "Setregid", + "Setresgid", + "Setresuid", + "Setreuid", + "Setrlimit", + "Setsid", + "Setsockopt", + "SetsockoptByte", + "SetsockoptICMPv6Filter", + "SetsockoptIPMreq", + "SetsockoptIPMreqn", + "SetsockoptIPv6Mreq", + "SetsockoptInet4Addr", + "SetsockoptInt", + "SetsockoptLinger", + "SetsockoptString", + "SetsockoptTimeval", + "Settimeofday", + "Setuid", + "Setxattr", + "Shutdown", + "SidTypeAlias", + "SidTypeComputer", + "SidTypeDeletedAccount", + "SidTypeDomain", + "SidTypeGroup", + "SidTypeInvalid", + "SidTypeLabel", + "SidTypeUnknown", + "SidTypeUser", + "SidTypeWellKnownGroup", + "Signal", + "SizeofBpfHdr", + "SizeofBpfInsn", + "SizeofBpfProgram", + "SizeofBpfStat", + "SizeofBpfVersion", + "SizeofBpfZbuf", + "SizeofBpfZbufHeader", + "SizeofCmsghdr", + "SizeofICMPv6Filter", + "SizeofIPMreq", + "SizeofIPMreqn", + "SizeofIPv6MTUInfo", + "SizeofIPv6Mreq", + "SizeofIfAddrmsg", + "SizeofIfAnnounceMsghdr", + "SizeofIfData", + "SizeofIfInfomsg", + "SizeofIfMsghdr", + "SizeofIfaMsghdr", + "SizeofIfmaMsghdr", + "SizeofIfmaMsghdr2", + "SizeofInet4Pktinfo", + "SizeofInet6Pktinfo", + "SizeofInotifyEvent", + "SizeofLinger", + "SizeofMsghdr", + "SizeofNlAttr", + "SizeofNlMsgerr", + "SizeofNlMsghdr", + "SizeofRtAttr", + "SizeofRtGenmsg", + "SizeofRtMetrics", + "SizeofRtMsg", + "SizeofRtMsghdr", + "SizeofRtNexthop", + "SizeofSockFilter", + "SizeofSockFprog", + "SizeofSockaddrAny", + "SizeofSockaddrDatalink", + "SizeofSockaddrInet4", + "SizeofSockaddrInet6", + "SizeofSockaddrLinklayer", + "SizeofSockaddrNetlink", + "SizeofSockaddrUnix", + "SizeofTCPInfo", + "SizeofUcred", + "SlicePtrFromStrings", + "SockFilter", + "SockFprog", + "Sockaddr", + "SockaddrDatalink", + "SockaddrGen", + "SockaddrInet4", + "SockaddrInet6", + "SockaddrLinklayer", + "SockaddrNetlink", + "SockaddrUnix", + "Socket", + "SocketControlMessage", + "SocketDisableIPv6", + "Socketpair", + "Splice", + "StartProcess", + "StartupInfo", + "Stat", + "Stat_t", + "Statfs", + "Statfs_t", + "Stderr", + "Stdin", + "Stdout", + "StringBytePtr", + "StringByteSlice", + "StringSlicePtr", + "StringToSid", + "StringToUTF16", + "StringToUTF16Ptr", + "Symlink", + "Sync", + "SyncFileRange", + "SysProcAttr", + "SysProcIDMap", + "Syscall", + "Syscall12", + "Syscall15", + "Syscall18", + "Syscall6", + "Syscall9", + "SyscallN", + "Sysctl", + "SysctlUint32", + "Sysctlnode", + "Sysinfo", + "Sysinfo_t", + "Systemtime", + "TCGETS", + "TCIFLUSH", + "TCIOFLUSH", + "TCOFLUSH", + "TCPInfo", + "TCPKeepalive", + "TCP_CA_NAME_MAX", + "TCP_CONGCTL", + "TCP_CONGESTION", + "TCP_CONNECTIONTIMEOUT", + "TCP_CORK", + "TCP_DEFER_ACCEPT", + "TCP_ENABLE_ECN", + "TCP_INFO", + "TCP_KEEPALIVE", + "TCP_KEEPCNT", + "TCP_KEEPIDLE", + "TCP_KEEPINIT", + "TCP_KEEPINTVL", + "TCP_LINGER2", + "TCP_MAXBURST", + "TCP_MAXHLEN", + "TCP_MAXOLEN", + "TCP_MAXSEG", + "TCP_MAXWIN", + "TCP_MAX_SACK", + "TCP_MAX_WINSHIFT", + "TCP_MD5SIG", + "TCP_MD5SIG_MAXKEYLEN", + "TCP_MINMSS", + "TCP_MINMSSOVERLOAD", + "TCP_MSS", + "TCP_NODELAY", + "TCP_NOOPT", + "TCP_NOPUSH", + "TCP_NOTSENT_LOWAT", + "TCP_NSTATES", + "TCP_QUICKACK", + "TCP_RXT_CONNDROPTIME", + "TCP_RXT_FINDROP", + "TCP_SACK_ENABLE", + "TCP_SENDMOREACKS", + "TCP_SYNCNT", + "TCP_VENDOR", + "TCP_WINDOW_CLAMP", + "TCSAFLUSH", + "TCSETS", + "TF_DISCONNECT", + "TF_REUSE_SOCKET", + "TF_USE_DEFAULT_WORKER", + "TF_USE_KERNEL_APC", + "TF_USE_SYSTEM_THREAD", + "TF_WRITE_BEHIND", + "TH32CS_INHERIT", + "TH32CS_SNAPALL", + "TH32CS_SNAPHEAPLIST", + "TH32CS_SNAPMODULE", + "TH32CS_SNAPMODULE32", + "TH32CS_SNAPPROCESS", + "TH32CS_SNAPTHREAD", + "TIME_ZONE_ID_DAYLIGHT", + "TIME_ZONE_ID_STANDARD", + "TIME_ZONE_ID_UNKNOWN", + "TIOCCBRK", + "TIOCCDTR", + "TIOCCONS", + "TIOCDCDTIMESTAMP", + "TIOCDRAIN", + "TIOCDSIMICROCODE", + "TIOCEXCL", + "TIOCEXT", + "TIOCFLAG_CDTRCTS", + "TIOCFLAG_CLOCAL", + "TIOCFLAG_CRTSCTS", + "TIOCFLAG_MDMBUF", + "TIOCFLAG_PPS", + "TIOCFLAG_SOFTCAR", + "TIOCFLUSH", + "TIOCGDEV", + "TIOCGDRAINWAIT", + "TIOCGETA", + "TIOCGETD", + "TIOCGFLAGS", + "TIOCGICOUNT", + "TIOCGLCKTRMIOS", + "TIOCGLINED", + "TIOCGPGRP", + "TIOCGPTN", + "TIOCGQSIZE", + "TIOCGRANTPT", + "TIOCGRS485", + "TIOCGSERIAL", + "TIOCGSID", + "TIOCGSIZE", + "TIOCGSOFTCAR", + "TIOCGTSTAMP", + "TIOCGWINSZ", + "TIOCINQ", + "TIOCIXOFF", + "TIOCIXON", + "TIOCLINUX", + "TIOCMBIC", + "TIOCMBIS", + "TIOCMGDTRWAIT", + "TIOCMGET", + "TIOCMIWAIT", + "TIOCMODG", + "TIOCMODS", + "TIOCMSDTRWAIT", + "TIOCMSET", + "TIOCM_CAR", + "TIOCM_CD", + "TIOCM_CTS", + "TIOCM_DCD", + "TIOCM_DSR", + "TIOCM_DTR", + "TIOCM_LE", + "TIOCM_RI", + "TIOCM_RNG", + "TIOCM_RTS", + "TIOCM_SR", + "TIOCM_ST", + "TIOCNOTTY", + "TIOCNXCL", + "TIOCOUTQ", + "TIOCPKT", + "TIOCPKT_DATA", + "TIOCPKT_DOSTOP", + "TIOCPKT_FLUSHREAD", + "TIOCPKT_FLUSHWRITE", + "TIOCPKT_IOCTL", + "TIOCPKT_NOSTOP", + "TIOCPKT_START", + "TIOCPKT_STOP", + "TIOCPTMASTER", + "TIOCPTMGET", + "TIOCPTSNAME", + "TIOCPTYGNAME", + "TIOCPTYGRANT", + "TIOCPTYUNLK", + "TIOCRCVFRAME", + "TIOCREMOTE", + "TIOCSBRK", + "TIOCSCONS", + "TIOCSCTTY", + "TIOCSDRAINWAIT", + "TIOCSDTR", + "TIOCSERCONFIG", + "TIOCSERGETLSR", + "TIOCSERGETMULTI", + "TIOCSERGSTRUCT", + "TIOCSERGWILD", + "TIOCSERSETMULTI", + "TIOCSERSWILD", + "TIOCSER_TEMT", + "TIOCSETA", + "TIOCSETAF", + "TIOCSETAW", + "TIOCSETD", + "TIOCSFLAGS", + "TIOCSIG", + "TIOCSLCKTRMIOS", + "TIOCSLINED", + "TIOCSPGRP", + "TIOCSPTLCK", + "TIOCSQSIZE", + "TIOCSRS485", + "TIOCSSERIAL", + "TIOCSSIZE", + "TIOCSSOFTCAR", + "TIOCSTART", + "TIOCSTAT", + "TIOCSTI", + "TIOCSTOP", + "TIOCSTSTAMP", + "TIOCSWINSZ", + "TIOCTIMESTAMP", + "TIOCUCNTL", + "TIOCVHANGUP", + "TIOCXMTFRAME", + "TOKEN_ADJUST_DEFAULT", + "TOKEN_ADJUST_GROUPS", + "TOKEN_ADJUST_PRIVILEGES", + "TOKEN_ADJUST_SESSIONID", + "TOKEN_ALL_ACCESS", + "TOKEN_ASSIGN_PRIMARY", + "TOKEN_DUPLICATE", + "TOKEN_EXECUTE", + "TOKEN_IMPERSONATE", + "TOKEN_QUERY", + "TOKEN_QUERY_SOURCE", + "TOKEN_READ", + "TOKEN_WRITE", + "TOSTOP", + "TRUNCATE_EXISTING", + "TUNATTACHFILTER", + "TUNDETACHFILTER", + "TUNGETFEATURES", + "TUNGETIFF", + "TUNGETSNDBUF", + "TUNGETVNETHDRSZ", + "TUNSETDEBUG", + "TUNSETGROUP", + "TUNSETIFF", + "TUNSETLINK", + "TUNSETNOCSUM", + "TUNSETOFFLOAD", + "TUNSETOWNER", + "TUNSETPERSIST", + "TUNSETSNDBUF", + "TUNSETTXFILTER", + "TUNSETVNETHDRSZ", + "Tee", + "TerminateProcess", + "Termios", + "Tgkill", + "Time", + "Time_t", + "Times", + "Timespec", + "TimespecToNsec", + "Timeval", + "Timeval32", + "TimevalToNsec", + "Timex", + "Timezoneinformation", + "Tms", + "Token", + "TokenAccessInformation", + "TokenAuditPolicy", + "TokenDefaultDacl", + "TokenElevation", + "TokenElevationType", + "TokenGroups", + "TokenGroupsAndPrivileges", + "TokenHasRestrictions", + "TokenImpersonationLevel", + "TokenIntegrityLevel", + "TokenLinkedToken", + "TokenLogonSid", + "TokenMandatoryPolicy", + "TokenOrigin", + "TokenOwner", + "TokenPrimaryGroup", + "TokenPrivileges", + "TokenRestrictedSids", + "TokenSandBoxInert", + "TokenSessionId", + "TokenSessionReference", + "TokenSource", + "TokenStatistics", + "TokenType", + "TokenUIAccess", + "TokenUser", + "TokenVirtualizationAllowed", + "TokenVirtualizationEnabled", + "Tokenprimarygroup", + "Tokenuser", + "TranslateAccountName", + "TranslateName", + "TransmitFile", + "TransmitFileBuffers", + "Truncate", + "UNIX_PATH_MAX", + "USAGE_MATCH_TYPE_AND", + "USAGE_MATCH_TYPE_OR", + "UTF16FromString", + "UTF16PtrFromString", + "UTF16ToString", + "Ucred", + "Umask", + "Uname", + "Undelete", + "UnixCredentials", + "UnixRights", + "Unlink", + "Unlinkat", + "UnmapViewOfFile", + "Unmount", + "Unsetenv", + "Unshare", + "UserInfo10", + "Ustat", + "Ustat_t", + "Utimbuf", + "Utime", + "Utimes", + "UtimesNano", + "Utsname", + "VDISCARD", + "VDSUSP", + "VEOF", + "VEOL", + "VEOL2", + "VERASE", + "VERASE2", + "VINTR", + "VKILL", + "VLNEXT", + "VMIN", + "VQUIT", + "VREPRINT", + "VSTART", + "VSTATUS", + "VSTOP", + "VSUSP", + "VSWTC", + "VT0", + "VT1", + "VTDLY", + "VTIME", + "VWERASE", + "VirtualLock", + "VirtualUnlock", + "WAIT_ABANDONED", + "WAIT_FAILED", + "WAIT_OBJECT_0", + "WAIT_TIMEOUT", + "WALL", + "WALLSIG", + "WALTSIG", + "WCLONE", + "WCONTINUED", + "WCOREFLAG", + "WEXITED", + "WLINUXCLONE", + "WNOHANG", + "WNOTHREAD", + "WNOWAIT", + "WNOZOMBIE", + "WOPTSCHECKED", + "WORDSIZE", + "WSABuf", + "WSACleanup", + "WSADESCRIPTION_LEN", + "WSAData", + "WSAEACCES", + "WSAECONNABORTED", + "WSAECONNRESET", + "WSAEnumProtocols", + "WSAID_CONNECTEX", + "WSAIoctl", + "WSAPROTOCOL_LEN", + "WSAProtocolChain", + "WSAProtocolInfo", + "WSARecv", + "WSARecvFrom", + "WSASYS_STATUS_LEN", + "WSASend", + "WSASendTo", + "WSASendto", + "WSAStartup", + "WSTOPPED", + "WTRAPPED", + "WUNTRACED", + "Wait4", + "WaitForSingleObject", + "WaitStatus", + "Win32FileAttributeData", + "Win32finddata", + "Write", + "WriteConsole", + "WriteFile", + "X509_ASN_ENCODING", + "XCASE", + "XP1_CONNECTIONLESS", + "XP1_CONNECT_DATA", + "XP1_DISCONNECT_DATA", + "XP1_EXPEDITED_DATA", + "XP1_GRACEFUL_CLOSE", + "XP1_GUARANTEED_DELIVERY", + "XP1_GUARANTEED_ORDER", + "XP1_IFS_HANDLES", + "XP1_MESSAGE_ORIENTED", + "XP1_MULTIPOINT_CONTROL_PLANE", + "XP1_MULTIPOINT_DATA_PLANE", + "XP1_PARTIAL_MESSAGE", + "XP1_PSEUDO_STREAM", + "XP1_QOS_SUPPORTED", + "XP1_SAN_SUPPORT_SDP", + "XP1_SUPPORT_BROADCAST", + "XP1_SUPPORT_MULTIPOINT", + "XP1_UNI_RECV", + "XP1_UNI_SEND", + }, + "syscall/js": { + "CopyBytesToGo", + "CopyBytesToJS", + "Error", + "Func", + "FuncOf", + "Global", + "Null", + "Type", + "TypeBoolean", + "TypeFunction", + "TypeNull", + "TypeNumber", + "TypeObject", + "TypeString", + "TypeSymbol", + "TypeUndefined", + "Undefined", + "Value", + "ValueError", + "ValueOf", + }, + "testing": { + "AllocsPerRun", + "B", + "Benchmark", + "BenchmarkResult", + "Cover", + "CoverBlock", + "CoverMode", + "Coverage", + "F", + "Init", + "InternalBenchmark", + "InternalExample", + "InternalFuzzTarget", + "InternalTest", + "M", + "Main", + "MainStart", + "PB", + "RegisterCover", + "RunBenchmarks", + "RunExamples", + "RunTests", + "Short", + "T", + "TB", + "Verbose", + }, + "testing/fstest": { + "MapFS", + "MapFile", + "TestFS", + }, + "testing/iotest": { + "DataErrReader", + "ErrReader", + "ErrTimeout", + "HalfReader", + "NewReadLogger", + "NewWriteLogger", + "OneByteReader", + "TestReader", + "TimeoutReader", + "TruncateWriter", + }, + "testing/quick": { + "Check", + "CheckEqual", + "CheckEqualError", + "CheckError", + "Config", + "Generator", + "SetupError", + "Value", + }, + "text/scanner": { + "Char", + "Comment", + "EOF", + "Float", + "GoTokens", + "GoWhitespace", + "Ident", + "Int", + "Position", + "RawString", + "ScanChars", + "ScanComments", + "ScanFloats", + "ScanIdents", + "ScanInts", + "ScanRawStrings", + "ScanStrings", + "Scanner", + "SkipComments", + "String", + "TokenString", + }, + "text/tabwriter": { + "AlignRight", + "Debug", + "DiscardEmptyColumns", + "Escape", + "FilterHTML", + "NewWriter", + "StripEscape", + "TabIndent", + "Writer", + }, + "text/template": { + "ExecError", + "FuncMap", + "HTMLEscape", + "HTMLEscapeString", + "HTMLEscaper", + "IsTrue", + "JSEscape", + "JSEscapeString", + "JSEscaper", + "Must", + "New", + "ParseFS", + "ParseFiles", + "ParseGlob", + "Template", + "URLQueryEscaper", + }, + "text/template/parse": { + "ActionNode", + "BoolNode", + "BranchNode", + "BreakNode", + "ChainNode", + "CommandNode", + "CommentNode", + "ContinueNode", + "DotNode", + "FieldNode", + "IdentifierNode", + "IfNode", + "IsEmptyTree", + "ListNode", + "Mode", + "New", + "NewIdentifier", + "NilNode", + "Node", + "NodeAction", + "NodeBool", + "NodeBreak", + "NodeChain", + "NodeCommand", + "NodeComment", + "NodeContinue", + "NodeDot", + "NodeField", + "NodeIdentifier", + "NodeIf", + "NodeList", + "NodeNil", + "NodeNumber", + "NodePipe", + "NodeRange", + "NodeString", + "NodeTemplate", + "NodeText", + "NodeType", + "NodeVariable", + "NodeWith", + "NumberNode", + "Parse", + "ParseComments", + "PipeNode", + "Pos", + "RangeNode", + "SkipFuncCheck", + "StringNode", + "TemplateNode", + "TextNode", + "Tree", + "VariableNode", + "WithNode", + }, + "time": { + "ANSIC", + "After", + "AfterFunc", + "April", + "August", + "Date", + "DateOnly", + "DateTime", + "December", + "Duration", + "February", + "FixedZone", + "Friday", + "Hour", + "January", + "July", + "June", + "Kitchen", + "Layout", + "LoadLocation", + "LoadLocationFromTZData", + "Local", + "Location", + "March", + "May", + "Microsecond", + "Millisecond", + "Minute", + "Monday", + "Month", + "Nanosecond", + "NewTicker", + "NewTimer", + "November", + "Now", + "October", + "Parse", + "ParseDuration", + "ParseError", + "ParseInLocation", + "RFC1123", + "RFC1123Z", + "RFC3339", + "RFC3339Nano", + "RFC822", + "RFC822Z", + "RFC850", + "RubyDate", + "Saturday", + "Second", + "September", + "Since", + "Sleep", + "Stamp", + "StampMicro", + "StampMilli", + "StampNano", + "Sunday", + "Thursday", + "Tick", + "Ticker", + "Time", + "TimeOnly", + "Timer", + "Tuesday", + "UTC", + "Unix", + "UnixDate", + "UnixMicro", + "UnixMilli", + "Until", + "Wednesday", + "Weekday", + }, + "unicode": { + "ASCII_Hex_Digit", + "Adlam", + "Ahom", + "Anatolian_Hieroglyphs", + "Arabic", + "Armenian", + "Avestan", + "AzeriCase", + "Balinese", + "Bamum", + "Bassa_Vah", + "Batak", + "Bengali", + "Bhaiksuki", + "Bidi_Control", + "Bopomofo", + "Brahmi", + "Braille", + "Buginese", + "Buhid", + "C", + "Canadian_Aboriginal", + "Carian", + "CaseRange", + "CaseRanges", + "Categories", + "Caucasian_Albanian", + "Cc", + "Cf", + "Chakma", + "Cham", + "Cherokee", + "Chorasmian", + "Co", + "Common", + "Coptic", + "Cs", + "Cuneiform", + "Cypriot", + "Cyrillic", + "Dash", + "Deprecated", + "Deseret", + "Devanagari", + "Diacritic", + "Digit", + "Dives_Akuru", + "Dogra", + "Duployan", + "Egyptian_Hieroglyphs", + "Elbasan", + "Elymaic", + "Ethiopic", + "Extender", + "FoldCategory", + "FoldScript", + "Georgian", + "Glagolitic", + "Gothic", + "Grantha", + "GraphicRanges", + "Greek", + "Gujarati", + "Gunjala_Gondi", + "Gurmukhi", + "Han", + "Hangul", + "Hanifi_Rohingya", + "Hanunoo", + "Hatran", + "Hebrew", + "Hex_Digit", + "Hiragana", + "Hyphen", + "IDS_Binary_Operator", + "IDS_Trinary_Operator", + "Ideographic", + "Imperial_Aramaic", + "In", + "Inherited", + "Inscriptional_Pahlavi", + "Inscriptional_Parthian", + "Is", + "IsControl", + "IsDigit", + "IsGraphic", + "IsLetter", + "IsLower", + "IsMark", + "IsNumber", + "IsOneOf", + "IsPrint", + "IsPunct", + "IsSpace", + "IsSymbol", + "IsTitle", + "IsUpper", + "Javanese", + "Join_Control", + "Kaithi", + "Kannada", + "Katakana", + "Kayah_Li", + "Kharoshthi", + "Khitan_Small_Script", + "Khmer", + "Khojki", + "Khudawadi", + "L", + "Lao", + "Latin", + "Lepcha", + "Letter", + "Limbu", + "Linear_A", + "Linear_B", + "Lisu", + "Ll", + "Lm", + "Lo", + "Logical_Order_Exception", + "Lower", + "LowerCase", + "Lt", + "Lu", + "Lycian", + "Lydian", + "M", + "Mahajani", + "Makasar", + "Malayalam", + "Mandaic", + "Manichaean", + "Marchen", + "Mark", + "Masaram_Gondi", + "MaxASCII", + "MaxCase", + "MaxLatin1", + "MaxRune", + "Mc", + "Me", + "Medefaidrin", + "Meetei_Mayek", + "Mende_Kikakui", + "Meroitic_Cursive", + "Meroitic_Hieroglyphs", + "Miao", + "Mn", + "Modi", + "Mongolian", + "Mro", + "Multani", + "Myanmar", + "N", + "Nabataean", + "Nandinagari", + "Nd", + "New_Tai_Lue", + "Newa", + "Nko", + "Nl", + "No", + "Noncharacter_Code_Point", + "Number", + "Nushu", + "Nyiakeng_Puachue_Hmong", + "Ogham", + "Ol_Chiki", + "Old_Hungarian", + "Old_Italic", + "Old_North_Arabian", + "Old_Permic", + "Old_Persian", + "Old_Sogdian", + "Old_South_Arabian", + "Old_Turkic", + "Oriya", + "Osage", + "Osmanya", + "Other", + "Other_Alphabetic", + "Other_Default_Ignorable_Code_Point", + "Other_Grapheme_Extend", + "Other_ID_Continue", + "Other_ID_Start", + "Other_Lowercase", + "Other_Math", + "Other_Uppercase", + "P", + "Pahawh_Hmong", + "Palmyrene", + "Pattern_Syntax", + "Pattern_White_Space", + "Pau_Cin_Hau", + "Pc", + "Pd", + "Pe", + "Pf", + "Phags_Pa", + "Phoenician", + "Pi", + "Po", + "Prepended_Concatenation_Mark", + "PrintRanges", + "Properties", + "Ps", + "Psalter_Pahlavi", + "Punct", + "Quotation_Mark", + "Radical", + "Range16", + "Range32", + "RangeTable", + "Regional_Indicator", + "Rejang", + "ReplacementChar", + "Runic", + "S", + "STerm", + "Samaritan", + "Saurashtra", + "Sc", + "Scripts", + "Sentence_Terminal", + "Sharada", + "Shavian", + "Siddham", + "SignWriting", + "SimpleFold", + "Sinhala", + "Sk", + "Sm", + "So", + "Soft_Dotted", + "Sogdian", + "Sora_Sompeng", + "Soyombo", + "Space", + "SpecialCase", + "Sundanese", + "Syloti_Nagri", + "Symbol", + "Syriac", + "Tagalog", + "Tagbanwa", + "Tai_Le", + "Tai_Tham", + "Tai_Viet", + "Takri", + "Tamil", + "Tangut", + "Telugu", + "Terminal_Punctuation", + "Thaana", + "Thai", + "Tibetan", + "Tifinagh", + "Tirhuta", + "Title", + "TitleCase", + "To", + "ToLower", + "ToTitle", + "ToUpper", + "TurkishCase", + "Ugaritic", + "Unified_Ideograph", + "Upper", + "UpperCase", + "UpperLower", + "Vai", + "Variation_Selector", + "Version", + "Wancho", + "Warang_Citi", + "White_Space", + "Yezidi", + "Yi", + "Z", + "Zanabazar_Square", + "Zl", + "Zp", + "Zs", + }, + "unicode/utf16": { + "AppendRune", + "Decode", + "DecodeRune", + "Encode", + "EncodeRune", + "IsSurrogate", + }, + "unicode/utf8": { + "AppendRune", + "DecodeLastRune", + "DecodeLastRuneInString", + "DecodeRune", + "DecodeRuneInString", + "EncodeRune", + "FullRune", + "FullRuneInString", + "MaxRune", + "RuneCount", + "RuneCountInString", + "RuneError", + "RuneLen", + "RuneSelf", + "RuneStart", + "UTFMax", + "Valid", + "ValidRune", + "ValidString", + }, + "unsafe": { + "Add", + "Alignof", + "Offsetof", + "Pointer", + "Sizeof", + "Slice", + "SliceData", + "String", + "StringData", + }, +} diff --git a/vendor/modules.txt b/vendor/modules.txt index f3738e1c..b0f4ef91 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,5 +1,7 @@ # github.com/Khan/genqlient v0.6.0 ## explicit; go 1.18 +github.com/Khan/genqlient +github.com/Khan/genqlient/generate github.com/Khan/genqlient/graphql # github.com/Masterminds/goutils v1.1.1 ## explicit @@ -33,6 +35,15 @@ github.com/ProtonMail/go-crypto/openpgp/s2k # github.com/agext/levenshtein v1.2.3 ## explicit github.com/agext/levenshtein +# github.com/agnivade/levenshtein v1.1.1 +## explicit; go 1.13 +github.com/agnivade/levenshtein +# github.com/alexflint/go-arg v1.4.2 +## explicit; go 1.13 +github.com/alexflint/go-arg +# github.com/alexflint/go-scalar v1.0.0 +## explicit +github.com/alexflint/go-scalar # github.com/apparentlymart/go-textseg/v13 v13.0.0 ## explicit; go 1.16 github.com/apparentlymart/go-textseg/v13/textseg @@ -269,7 +280,12 @@ github.com/spf13/cast # github.com/vektah/gqlparser/v2 v2.5.1 ## explicit; go 1.16 github.com/vektah/gqlparser/v2/ast +github.com/vektah/gqlparser/v2/formatter github.com/vektah/gqlparser/v2/gqlerror +github.com/vektah/gqlparser/v2/lexer +github.com/vektah/gqlparser/v2/parser +github.com/vektah/gqlparser/v2/validator +github.com/vektah/gqlparser/v2/validator/rules # github.com/vmihailenco/msgpack v4.0.4+incompatible ## explicit github.com/vmihailenco/msgpack @@ -354,16 +370,21 @@ golang.org/x/text/unicode/bidi golang.org/x/text/unicode/norm # golang.org/x/tools v0.8.0 ## explicit; go 1.18 +golang.org/x/tools/go/ast/astutil golang.org/x/tools/go/gcexportdata golang.org/x/tools/go/internal/packagesdriver golang.org/x/tools/go/packages golang.org/x/tools/go/types/objectpath +golang.org/x/tools/imports golang.org/x/tools/internal/event golang.org/x/tools/internal/event/core golang.org/x/tools/internal/event/keys golang.org/x/tools/internal/event/label +golang.org/x/tools/internal/fastwalk golang.org/x/tools/internal/gcimporter golang.org/x/tools/internal/gocommand +golang.org/x/tools/internal/gopathwalk +golang.org/x/tools/internal/imports golang.org/x/tools/internal/packagesinternal golang.org/x/tools/internal/pkgbits golang.org/x/tools/internal/tokeninternal