From 4445b056a482dfe89e0fccb7a7deb757da3f77b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20R=C3=BCger?= Date: Mon, 6 Jun 2022 21:44:04 +0200 Subject: [PATCH] go.mod: Bump to opa v0.41 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Manuel Rüger --- constraint/go.mod | 16 +- constraint/go.sum | 51 +- .../agnivade/levenshtein/.gitignore | 5 + .../agnivade/levenshtein/.travis.yml | 7 + .../agnivade/levenshtein/License.txt | 21 + .../github.com/agnivade/levenshtein/Makefile | 13 + .../github.com/agnivade/levenshtein/README.md | 57 + .../agnivade/levenshtein/levenshtein.go | 75 + .../open-policy-agent/opa/ast/builtins.go | 1969 ++++---- .../open-policy-agent/opa/ast/capabilities.go | 3 +- .../open-policy-agent/opa/ast/check.go | 10 +- .../open-policy-agent/opa/ast/compile.go | 2 +- .../opa/ast/location/location.go | 2 +- .../open-policy-agent/opa/ast/parser.go | 3 +- .../open-policy-agent/opa/ast/parser_ext.go | 9 +- .../open-policy-agent/opa/bundle/bundle.go | 55 +- .../open-policy-agent/opa/bundle/file.go | 11 +- .../open-policy-agent/opa/bundle/verify.go | 6 +- .../opa/capabilities/v0.41.0.json | 4007 +++++++++++++++++ .../open-policy-agent/opa/format/format.go | 4 +- .../opa/internal/compiler/wasm/wasm.go | 10 +- .../opa/internal/jwx/buffer/buffer.go | 11 +- .../opa/internal/jwx/jwa/key_type.go | 12 +- .../opa/internal/jwx/jwa/signature.go | 14 +- .../opa/internal/jwx/jwk/ecdsa.go | 18 +- .../opa/internal/jwx/jwk/headers.go | 20 +- .../opa/internal/jwx/jwk/jwk.go | 27 +- .../opa/internal/jwx/jwk/rsa.go | 18 +- .../opa/internal/jwx/jwk/symmetric.go | 4 +- .../opa/internal/jwx/jws/headers.go | 20 +- .../opa/internal/jwx/jws/jws.go | 36 +- .../opa/internal/jwx/jws/sign/ecdsa.go | 12 +- .../opa/internal/jwx/jws/verify/hmac.go | 10 +- .../opa/internal/wasm/encoding/reader.go | 36 +- .../open-policy-agent/opa/topdown/graphql.go | 378 ++ .../opa/topdown/parse_bytes.go | 16 +- .../opa/topdown/parse_units.go | 115 + .../open-policy-agent/opa/topdown/tokens.go | 126 +- .../open-policy-agent/opa/types/types.go | 117 +- .../open-policy-agent/opa/util/json.go | 3 + .../open-policy-agent/opa/version/version.go | 2 +- .../github.com/vektah/gqlparser/v2/.gitignore | 5 + .../github.com/vektah/gqlparser/v2/LICENSE | 19 + .../vektah/gqlparser/v2/ast/argmap.go | 37 + .../vektah/gqlparser/v2/ast/collections.go | 148 + .../vektah/gqlparser/v2/ast/definition.go | 94 + .../vektah/gqlparser/v2/ast/directive.go | 43 + .../vektah/gqlparser/v2/ast/document.go | 79 + .../vektah/gqlparser/v2/ast/dumper.go | 159 + .../vektah/gqlparser/v2/ast/fragment.go | 38 + .../vektah/gqlparser/v2/ast/operation.go | 30 + .../vektah/gqlparser/v2/ast/path.go | 67 + .../vektah/gqlparser/v2/ast/selection.go | 39 + .../vektah/gqlparser/v2/ast/source.go | 19 + .../vektah/gqlparser/v2/ast/type.go | 68 + .../vektah/gqlparser/v2/ast/value.go | 120 + .../vektah/gqlparser/v2/gqlerror/error.go | 145 + .../vektah/gqlparser/v2/gqlparser.go | 42 + .../vektah/gqlparser/v2/lexer/blockstring.go | 58 + .../vektah/gqlparser/v2/lexer/lexer.go | 510 +++ .../vektah/gqlparser/v2/lexer/lexer_test.yml | 672 +++ .../vektah/gqlparser/v2/lexer/token.go | 148 + .../vektah/gqlparser/v2/parser/parser.go | 136 + .../vektah/gqlparser/v2/parser/query.go | 350 ++ .../vektah/gqlparser/v2/parser/query_test.yml | 544 +++ .../vektah/gqlparser/v2/parser/schema.go | 535 +++ .../gqlparser/v2/parser/schema_test.yml | 646 +++ .../github.com/vektah/gqlparser/v2/readme.md | 17 + .../vektah/gqlparser/v2/validator/error.go | 55 + .../gqlparser/v2/validator/messaging.go | 39 + .../vektah/gqlparser/v2/validator/prelude.go | 15 + .../gqlparser/v2/validator/prelude.graphql | 121 + .../validator/rules/fields_on_correct_type.go | 94 + .../rules/fragments_on_composite_types.go | 39 + .../validator/rules/known_argument_names.go | 57 + .../v2/validator/rules/known_directives.go | 47 + .../validator/rules/known_fragment_names.go | 19 + .../v2/validator/rules/known_type_names.go | 59 + .../rules/lone_anonymous_operation.go | 19 + .../v2/validator/rules/no_fragment_cycles.go | 93 + .../validator/rules/no_undefined_variables.go | 28 + .../v2/validator/rules/no_unused_fragments.go | 30 + .../v2/validator/rules/no_unused_variables.go | 30 + .../rules/overlapping_fields_can_be_merged.go | 560 +++ .../rules/possible_fragment_spreads.go | 68 + .../rules/provided_required_arguments.go | 62 + .../v2/validator/rules/scalar_leafs.go | 36 + .../rules/single_field_subscriptions.go | 86 + .../validator/rules/unique_argument_names.go | 33 + .../rules/unique_directives_per_location.go | 24 + .../validator/rules/unique_fragment_names.go | 22 + .../rules/unique_input_field_names.go | 27 + .../validator/rules/unique_operation_names.go | 22 + .../validator/rules/unique_variable_names.go | 24 + .../validator/rules/values_of_correct_type.go | 168 + .../rules/variables_are_input_types.go | 28 + .../rules/variables_in_allowed_position.go | 38 + .../vektah/gqlparser/v2/validator/schema.go | 490 ++ .../gqlparser/v2/validator/schema_test.yml | 665 +++ .../gqlparser/v2/validator/suggestionList.go | 69 + .../gqlparser/v2/validator/validator.go | 44 + .../vektah/gqlparser/v2/validator/vars.go | 258 ++ .../vektah/gqlparser/v2/validator/walk.go | 292 ++ .../google.golang.org/grpc/clientconn.go | 34 +- .../grpc/encoding/encoding.go | 2 +- .../grpc/internal/transport/controlbuf.go | 6 + .../grpc/internal/transport/http2_client.go | 20 +- .../grpc/internal/transport/http2_server.go | 75 +- .../google.golang.org/grpc/picker_wrapper.go | 8 +- .../vendor/google.golang.org/grpc/server.go | 40 +- .../vendor/google.golang.org/grpc/stream.go | 201 +- .../vendor/google.golang.org/grpc/version.go | 2 +- constraint/vendor/modules.txt | 26 +- 113 files changed, 15048 insertions(+), 1226 deletions(-) create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/.gitignore create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/.travis.yml create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/License.txt create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/Makefile create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/README.md create mode 100644 constraint/vendor/github.com/agnivade/levenshtein/levenshtein.go create mode 100644 constraint/vendor/github.com/open-policy-agent/opa/capabilities/v0.41.0.json create mode 100644 constraint/vendor/github.com/open-policy-agent/opa/topdown/graphql.go create mode 100644 constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_units.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/.gitignore create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/LICENSE create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/argmap.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/collections.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/definition.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/directive.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/document.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/operation.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/path.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/selection.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/source.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/type.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/ast/value.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/gqlparser.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/lexer/token.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/parser/parser.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/parser/query.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/readme.md create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/error.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/validator.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/vars.go create mode 100644 constraint/vendor/github.com/vektah/gqlparser/v2/validator/walk.go diff --git a/constraint/go.mod b/constraint/go.mod index ce50c28d5..bf921c118 100644 --- a/constraint/go.mod +++ b/constraint/go.mod @@ -18,7 +18,7 @@ require ( github.com/davecgh/go-spew v1.1.1 github.com/golang/glog v1.0.0 github.com/google/go-cmp v0.5.8 - github.com/open-policy-agent/opa v0.40.0 + github.com/open-policy-agent/opa v0.41.0 github.com/spf13/cobra v1.4.0 github.com/spf13/pflag v1.0.5 k8s.io/apiextensions-apiserver v0.24.1 @@ -33,6 +33,7 @@ require ( github.com/OneOfOne/xxhash v1.2.8 // indirect github.com/PuerkitoBio/purell v1.1.1 // indirect github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/agnivade/levenshtein v1.0.1 // indirect github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e // indirect github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect github.com/beorn7/perks v1.0.1 // indirect @@ -73,19 +74,20 @@ require ( github.com/prometheus/procfs v0.7.3 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/stoewer/go-strcase v1.2.0 // indirect + github.com/vektah/gqlparser/v2 v2.4.4 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/yashtewari/glob-intersection v0.1.0 // indirect go.opentelemetry.io/contrib v0.20.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0 // indirect - go.opentelemetry.io/otel v1.6.3 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.32.0 // indirect + go.opentelemetry.io/otel v1.7.0 // indirect go.opentelemetry.io/otel/exporters/otlp v0.20.0 // indirect go.opentelemetry.io/otel/metric v0.28.0 // indirect - go.opentelemetry.io/otel/sdk v1.6.3 // indirect + go.opentelemetry.io/otel/sdk v1.7.0 // indirect go.opentelemetry.io/otel/sdk/export/metric v0.20.0 // indirect go.opentelemetry.io/otel/sdk/metric v0.20.0 // indirect - go.opentelemetry.io/otel/trace v1.6.3 // indirect - go.opentelemetry.io/proto/otlp v0.15.0 // indirect + go.opentelemetry.io/otel/trace v1.7.0 // indirect + go.opentelemetry.io/proto/otlp v0.16.0 // indirect golang.org/x/net v0.0.0-20220401154927-543a649e0bdd // indirect golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a // indirect golang.org/x/sys v0.0.0-20220412211240-33da011f77ad // indirect @@ -94,7 +96,7 @@ require ( golang.org/x/time v0.0.0-20220224211638-0e9765cccd65 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220401154344-7eda02b0f484 // indirect - google.golang.org/grpc v1.46.0 // indirect + google.golang.org/grpc v1.47.0 // indirect google.golang.org/protobuf v1.28.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/constraint/go.sum b/constraint/go.sum index a2f274a26..6ab104e30 100644 --- a/constraint/go.sum +++ b/constraint/go.sum @@ -82,7 +82,6 @@ github.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+V github.com/Microsoft/hcsshim v0.8.20/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= github.com/Microsoft/hcsshim v0.8.21/go.mod h1:+w2gRZ5ReXQhFOrvSQeNfhrYB/dg3oDwTOcER2fw4I4= github.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg= -github.com/Microsoft/hcsshim v0.9.1/go.mod h1:Y/0uV2jUab5kBI7SQgl62at0AVX7uaruzADAVmxm3eM= github.com/Microsoft/hcsshim v0.9.2/go.mod h1:7pLA8lDk46WKDWlVsENo92gC0XFa8rbKfyFRBqxEbCc= github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU= github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= @@ -98,6 +97,8 @@ github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdko github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= +github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ= +github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= @@ -105,6 +106,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= github.com/alexflint/go-filemutex v1.1.0/go.mod h1:7P4iRhttt/nUvUOrYIhcpMzv2G6CY9UnI16Z+UJqRyk= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e h1:GCzyKMDDjSGnlpl3clrdAK7I1AaVoaiKDOYkUzChZzg= github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= @@ -144,10 +147,11 @@ github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx2 github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= -github.com/bytecodealliance/wasmtime-go v0.35.0 h1:VZjaZ0XOY0qp9TQfh0CQj9zl/AbdeXePVTALy8V1sKs= -github.com/bytecodealliance/wasmtime-go v0.35.0/go.mod h1:q320gUxqyI8yB+ZqRuaJOEnGkAnHh6WtJjMaT2CW4wI= +github.com/bytecodealliance/wasmtime-go v0.36.0 h1:B6thr7RMM9xQmouBtUqm1RpkJjuLS37m6nxX+iwsQSc= +github.com/bytecodealliance/wasmtime-go v0.36.0/go.mod h1:q320gUxqyI8yB+ZqRuaJOEnGkAnHh6WtJjMaT2CW4wI= github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/certifi/gocertifi v0.0.0-20191021191039-0944d244cd40/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= github.com/certifi/gocertifi v0.0.0-20200922220541-2c3bb06c6054/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= @@ -196,7 +200,6 @@ github.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4S github.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo= github.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU= -github.com/containerd/cgroups v1.0.2/go.mod h1:qpbpJ1jmlqsR9f2IyaLPsdkCdnt0rbDVqIDlhuu5tRY= github.com/containerd/cgroups v1.0.3/go.mod h1:/ofk34relqNjSGyqPrmEULrO4Sc8LJhvJmWbUCUKqj8= github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= github.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= @@ -220,8 +223,8 @@ github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoT github.com/containerd/containerd v1.5.1/go.mod h1:0DOxVqwDy2iZvrZp2JUx/E+hS0UNTVn7dJnIOwtYR4g= github.com/containerd/containerd v1.5.7/go.mod h1:gyvv6+ugqY25TiXxcZC3L5yOeYgEw0QMhscqVp1AR9c= github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s= -github.com/containerd/containerd v1.5.9/go.mod h1:fvQqCfadDGga5HZyn3j4+dx56qj2I9YwBrlSdalvJYQ= -github.com/containerd/containerd v1.6.2/go.mod h1:sidY30/InSE1j2vdD1ihtKoJz+lWdaXMdiAeIupaf+s= +github.com/containerd/containerd v1.6.1/go.mod h1:1nJz5xCZPusx6jJU8Frfct988y0NpumIq9ODB0kLtoE= +github.com/containerd/containerd v1.6.4/go.mod h1:oWOqbuJUZmOVafhA0lj2NAXbiO1u7F0K5l1bUgdyo94= github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= @@ -240,6 +243,7 @@ github.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZH github.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk= github.com/containerd/go-cni v1.1.0/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= github.com/containerd/go-cni v1.1.3/go.mod h1:Rflh2EJ/++BA2/vY5ao3K6WJRR/bZKsX123aPk+kUtA= +github.com/containerd/go-cni v1.1.5/go.mod h1:Rf2ZrMycr1El589IyuRzn7RkfdRZVKaFGaxSDHVAjj0= github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g= @@ -250,6 +254,7 @@ github.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6T github.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow= github.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms= github.com/containerd/imgcrypt v1.1.3/go.mod h1:/TPA1GIDXMzbj01yd8pIbQiLdQxed5ue1wb8bP7PQu4= +github.com/containerd/imgcrypt v1.1.4/go.mod h1:LorQnPtzL/T0IyCeftcsMEO7AqxUDbdO8j/tSUpgxvo= github.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c= github.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= github.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY= @@ -273,13 +278,16 @@ github.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ github.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY= github.com/containernetworking/cni v1.0.1/go.mod h1:AKuhXbN5EzmD4yTNtfSsX3tPcmtrBI6QcRV0NiNt15Y= +github.com/containernetworking/cni v1.1.0/go.mod h1:sDpYKmGVENF3s6uvMvGgldDWeG8dMxakj/u+i9ht9vw= github.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM= github.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8= github.com/containernetworking/plugins v1.0.1/go.mod h1:QHCfGpaTwYTbbH+nZXKVTxNBDZcxSOplJT5ico8/FLE= +github.com/containernetworking/plugins v1.1.1/go.mod h1:Sr5TH/eBsGLXK/h71HeLfX19sZPp3ry5uHSkI4LPxV8= github.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc= github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4= github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= github.com/containers/ocicrypt v1.1.2/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= +github.com/containers/ocicrypt v1.1.3/go.mod h1:xpdkbVAuaH3WzbEabUd5yDsl9SwJA5pABH85425Es2g= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= @@ -394,7 +402,7 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-ini/ini v1.66.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= +github.com/go-ini/ini v1.66.6/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= @@ -682,6 +690,7 @@ github.com/miekg/dns v1.1.25/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKju github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg= github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -722,6 +731,7 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= +github.com/networkplumbing/go-nft v0.2.0/go.mod h1:HnnM+tYvlGAsMU7yoYwXEVLLiDW9gdMmb5HoGcwpuQs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= @@ -744,6 +754,7 @@ github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vv github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= @@ -756,8 +767,8 @@ github.com/onsi/gomega v1.15.0/go.mod h1:cIuvLEne0aoVhAgh/O6ac0Op8WWw9H6eYCriF+t github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= -github.com/open-policy-agent/opa v0.40.0 h1:z/eg0ff3O1y6ovxpbL7xv+NHSwi8rVA7993sLv5Owac= -github.com/open-policy-agent/opa v0.40.0/go.mod h1:UQqv8nJ1njs2+Od1lrPFzUAApdj22ABxTO35+Vpsjz4= +github.com/open-policy-agent/opa v0.41.0 h1:XDTkP8bcUVuY8WOVbRY4e/KZW31+f+/cxisPc0TPe5E= +github.com/open-policy-agent/opa v0.41.0/go.mod h1:+kB8/8/4meTlq6ZmYRnvrL5nNrykd2eckDx4O6rk/dA= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -767,6 +778,7 @@ github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zM github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.2-0.20211117181255-693428a734f5/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= @@ -774,6 +786,7 @@ github.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rm github.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0= github.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0= github.com/opencontainers/runc v1.1.0/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= +github.com/opencontainers/runc v1.1.1/go.mod h1:Tj1hFw6eFWp/o33uxGf5yF2BX5yz2Z6iptFpuvbbKqc= github.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= @@ -785,6 +798,7 @@ github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqi github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= github.com/opencontainers/selinux v1.10.0/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= +github.com/opencontainers/selinux v1.10.1/go.mod h1:2i0OySw99QjzBBQByd1Gr9gSjvuho1lHsJxIJ3gGbJI= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= @@ -811,6 +825,7 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= @@ -862,6 +877,8 @@ github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24 github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= +github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= @@ -936,6 +953,8 @@ github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/vektah/gqlparser/v2 v2.4.4 h1:rh9hwZ5Jx9cCq88zXz2YHKmuQBuwY1JErHU8GywFdwE= +github.com/vektah/gqlparser/v2 v2.4.4/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= @@ -1000,11 +1019,11 @@ go.opentelemetry.io/otel v0.20.0/go.mod h1:Y3ugLH2oa81t5QO+Lty+zXf8zC9L26ax4Nzox go.opentelemetry.io/otel/exporters/otlp v0.20.0 h1:PTNgq9MRmQqqJY0REVbZFvwkYOA85vbdQU/nVfxDyqg= go.opentelemetry.io/otel/exporters/otlp v0.20.0/go.mod h1:YIieizyaN77rtLJra0buKiNBOm9XQfkPEKBeuhoMwAM= go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.3.0/go.mod h1:VpP4/RMn8bv8gNo9uK7/IMY4mtWLELsS+JIP0inH0h4= -go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.6.3/go.mod h1:NEu79Xo32iVb+0gVNV8PMd7GoWqnyDXRlj04yFjqz40= +go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.7.0/go.mod h1:M1hVZHNxcbkAlcvrOMlpQ4YOO3Awf+4N2dxkZL3xm04= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.3.0/go.mod h1:hO1KLR7jcKaDDKDkvI9dP/FIhpmna5lkqPUQdEjFAM8= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.6.3/go.mod h1:UJmXdiVVBaZ63umRUTwJuCMAV//GCMvDiQwn703/GoY= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.7.0/go.mod h1:ceUgdyfNv4h4gLxHR0WNfDiiVmZFodZhZSbOLhpxqXE= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.3.0/go.mod h1:keUU7UfnwWTWpJ+FWnyqmogPa82nuU5VUANFq49hlMY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.6.3/go.mod h1:ycItY/esVj8c0dKgYTOztTERXtPzcfDU/0o8EdwCjoA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.7.0/go.mod h1:E+/KKhwOSw8yoPxSSuUHG6vKppkvhN+S1Jc7Nib3k3o= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.3.0/go.mod h1:QNX1aly8ehqqX1LEa6YniTU7VY9I6R3X/oPxhGdTceE= go.opentelemetry.io/otel/metric v0.20.0 h1:4kzhXFP+btKm4jwxpjIqjs41A7MakRFUS86bqLHTIw8= go.opentelemetry.io/otel/metric v0.20.0/go.mod h1:598I5tYlH1vzBjn+BTuhzTCSb/9debfNp6R3s7Pr1eU= @@ -1094,6 +1113,7 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1386,7 +1406,9 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10-0.20220218145154-897bd77cd717/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1509,8 +1531,9 @@ google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0 h1:9n77onPX5F3qfFCqjy9dhn8PbNQsIKeVU04J9G7umt8= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1647,7 +1670,7 @@ k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a/go.mod h1:jPW/WVKK9YHAvNhRxK0md/ k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 h1:HNSDgDCrr/6Ly3WEGKZftiE7IY19Vz2GdbOCyI4qqhc= k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= -oras.land/oras-go v1.1.0/go.mod h1:1A7vR/0KknT2UkJVWh+xMi95I/AhK8ZrxrnUSmXN0bQ= +oras.land/oras-go v1.1.1/go.mod h1:n2TE1ummt9MUyprGhT+Q7kGZUF4kVUpYysPFxeV2IpQ= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/constraint/vendor/github.com/agnivade/levenshtein/.gitignore b/constraint/vendor/github.com/agnivade/levenshtein/.gitignore new file mode 100644 index 000000000..345780a44 --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/.gitignore @@ -0,0 +1,5 @@ +coverage.txt +fuzz/fuzz-fuzz.zip +fuzz/corpus/corpus/* +fuzz/corpus/suppressions/* +fuzz/corpus/crashes/* diff --git a/constraint/vendor/github.com/agnivade/levenshtein/.travis.yml b/constraint/vendor/github.com/agnivade/levenshtein/.travis.yml new file mode 100644 index 000000000..f830ec4ec --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/.travis.yml @@ -0,0 +1,7 @@ +language: go + +go: +- 1.9.x +- 1.10.x +- 1.11.x +- tip diff --git a/constraint/vendor/github.com/agnivade/levenshtein/License.txt b/constraint/vendor/github.com/agnivade/levenshtein/License.txt new file mode 100644 index 000000000..54b51f499 --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/License.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Agniva De Sarker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/constraint/vendor/github.com/agnivade/levenshtein/Makefile b/constraint/vendor/github.com/agnivade/levenshtein/Makefile new file mode 100644 index 000000000..4bef27dd1 --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/Makefile @@ -0,0 +1,13 @@ +all: test install + +install: + go install + +lint: + gofmt -l -s -w . && go tool vet -all . && golint + +test: + go test -race -v -coverprofile=coverage.txt -covermode=atomic + +bench: + go test -run=XXX -bench=. -benchmem diff --git a/constraint/vendor/github.com/agnivade/levenshtein/README.md b/constraint/vendor/github.com/agnivade/levenshtein/README.md new file mode 100644 index 000000000..b0fd81df7 --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/README.md @@ -0,0 +1,57 @@ +levenshtein [![Build Status](https://travis-ci.org/agnivade/levenshtein.svg?branch=master)](https://travis-ci.org/agnivade/levenshtein) [![Go Report Card](https://goreportcard.com/badge/github.com/agnivade/levenshtein)](https://goreportcard.com/report/github.com/agnivade/levenshtein) [![GoDoc](https://godoc.org/github.com/agnivade/levenshtein?status.svg)](https://godoc.org/github.com/agnivade/levenshtein) +=========== + +[Go](http://golang.org) package to calculate the [Levenshtein Distance](http://en.wikipedia.org/wiki/Levenshtein_distance) + +The library is fully capable of working with non-ascii strings. But the strings are not normalized. That is left as a user-dependant use case. Please normalize the strings before passing it to the library if you have such a requirement. +- https://blog.golang.org/normalization + +Install +------- + + go get github.com/agnivade/levenshtein + +Example +------- + +```go +package main + +import ( + "fmt" + "github.com/agnivade/levenshtein" +) + +func main() { + s1 := "kitten" + s2 := "sitting" + distance := levenshtein.ComputeDistance(s1, s2) + fmt.Printf("The distance between %s and %s is %d.\n", s1, s2, distance) + // Output: + // The distance between kitten and sitting is 3. +} + +``` + +Benchmarks +---------- + +``` +name time/op +Simple/ASCII-4 537ns ± 2% +Simple/French-4 956ns ± 0% +Simple/Nordic-4 1.95µs ± 1% +Simple/Tibetan-4 1.53µs ± 2% + +name alloc/op +Simple/ASCII-4 96.0B ± 0% +Simple/French-4 128B ± 0% +Simple/Nordic-4 192B ± 0% +Simple/Tibetan-4 144B ± 0% + +name allocs/op +Simple/ASCII-4 1.00 ± 0% +Simple/French-4 1.00 ± 0% +Simple/Nordic-4 1.00 ± 0% +Simple/Tibetan-4 1.00 ± 0% +``` diff --git a/constraint/vendor/github.com/agnivade/levenshtein/levenshtein.go b/constraint/vendor/github.com/agnivade/levenshtein/levenshtein.go new file mode 100644 index 000000000..6b08acade --- /dev/null +++ b/constraint/vendor/github.com/agnivade/levenshtein/levenshtein.go @@ -0,0 +1,75 @@ +// Package levenshtein is a Go implementation to calculate Levenshtein Distance. +// +// Implementation taken from +// https://gist.github.com/andrei-m/982927#gistcomment-1931258 +package levenshtein + +import "unicode/utf8" + +// ComputeDistance computes the levenshtein distance between the two +// strings passed as an argument. The return value is the levenshtein distance +// +// Works on runes (Unicode code points) but does not normalize +// the input strings. See https://blog.golang.org/normalization +// and the golang.org/x/text/unicode/norm pacage. +func ComputeDistance(a, b string) int { + if len(a) == 0 { + return utf8.RuneCountInString(b) + } + + if len(b) == 0 { + return utf8.RuneCountInString(a) + } + + if a == b { + return 0 + } + + // We need to convert to []rune if the strings are non-ascii. + // This could be avoided by using utf8.RuneCountInString + // and then doing some juggling with rune indices. + // The primary challenge is keeping track of the previous rune. + // With a range loop, its not that easy. And with a for-loop + // we need to keep track of the inter-rune width using utf8.DecodeRuneInString + s1 := []rune(a) + s2 := []rune(b) + + // swap to save some memory O(min(a,b)) instead of O(a) + if len(s1) > len(s2) { + s1, s2 = s2, s1 + } + lenS1 := len(s1) + lenS2 := len(s2) + + // init the row + x := make([]int, lenS1+1) + for i := 0; i <= lenS1; i++ { + x[i] = i + } + + // fill in the rest + for i := 1; i <= lenS2; i++ { + prev := i + var current int + + for j := 1; j <= lenS1; j++ { + + if s2[i-1] == s1[j-1] { + current = x[j-1] // match + } else { + current = min(min(x[j-1]+1, prev+1), x[j]+1) + } + x[j-1] = prev + prev = current + } + x[lenS1] = prev + } + return x[lenS1] +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/builtins.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/builtins.go index 4ce91b097..3bdf93e5f 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/builtins.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/builtins.go @@ -230,6 +230,13 @@ var DefaultBuiltins = [...]*Builtin{ // HTTP HTTPSend, + // GraphQL + GraphQLParse, + GraphQLParseAndVerify, + GraphQLParseQuery, + GraphQLParseSchema, + GraphQLIsValid, + // Rego RegoParseModule, RegoMetadataChain, @@ -255,6 +262,7 @@ var DefaultBuiltins = [...]*Builtin{ GlobQuoteMeta, // Units + UnitsParse, UnitsParseBytes, // UUIDs @@ -343,219 +351,283 @@ var MemberWithKey = &Builtin{ /** * Comparisons */ +var comparison = category("comparison") -// GreaterThan represents the ">" comparison operator. var GreaterThan = &Builtin{ - Name: "gt", - Infix: ">", + Name: "gt", + Infix: ">", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is greater than `y`; false otherwise"), ), } -// GreaterThanEq represents the ">=" comparison operator. var GreaterThanEq = &Builtin{ - Name: "gte", - Infix: ">=", + Name: "gte", + Infix: ">=", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is greater or equal to `y`; false otherwise"), ), } // LessThan represents the "<" comparison operator. var LessThan = &Builtin{ - Name: "lt", - Infix: "<", + Name: "lt", + Infix: "<", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is less than `y`; false otherwise"), ), } -// LessThanEq represents the "<=" comparison operator. var LessThanEq = &Builtin{ - Name: "lte", - Infix: "<=", + Name: "lte", + Infix: "<=", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is less than or equal to `y`; false otherwise"), ), } -// NotEqual represents the "!=" comparison operator. var NotEqual = &Builtin{ - Name: "neq", - Infix: "!=", + Name: "neq", + Infix: "!=", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is not equal to `y`; false otherwise"), ), } // Equal represents the "==" comparison operator. var Equal = &Builtin{ - Name: "equal", - Infix: "==", + Name: "equal", + Infix: "==", + Categories: comparison, Decl: types.NewFunction( - types.Args(types.A, types.A), - types.B, + types.Args( + types.Named("x", types.A), + types.Named("y", types.A), + ), + types.Named("result", types.B).Description("true if `x` is equal to `y`; false otherwise"), ), } /** * Arithmetic */ +var number = category("numbers") -// Plus adds two numbers together. var Plus = &Builtin{ - Name: "plus", - Infix: "+", + Name: "plus", + Infix: "+", + Description: "Plus adds two numbers together.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N).Description("the sum of `x` and `y`"), ), + Categories: number, } -// Minus subtracts the second number from the first number or computes the diff -// between two sets. var Minus = &Builtin{ - Name: "minus", - Infix: "-", + Name: "minus", + Infix: "-", + Description: "Minus subtracts the second number from the first number or computes the difference between two sets.", Decl: types.NewFunction( types.Args( - types.NewAny(types.N, types.NewSet(types.A)), - types.NewAny(types.N, types.NewSet(types.A)), + types.Named("x", types.NewAny(types.N, types.NewSet(types.A))), + types.Named("y", types.NewAny(types.N, types.NewSet(types.A))), ), - types.NewAny(types.N, types.NewSet(types.A)), + types.Named("z", types.NewAny(types.N, types.NewSet(types.A))).Description("the difference of `x` and `y`"), ), + Categories: category("sets", "numbers"), } -// Multiply multiplies two numbers together. var Multiply = &Builtin{ - Name: "mul", - Infix: "*", + Name: "mul", + Infix: "*", + Description: "Multiplies two numbers.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N).Description("the product of `x` and `y`"), ), + Categories: number, } -// Divide divides the first number by the second number. var Divide = &Builtin{ - Name: "div", - Infix: "/", + Name: "div", + Infix: "/", + Description: "Divides the first number by the second number.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N).Description("the dividend"), + types.Named("y", types.N).Description("the divisor"), + ), + types.Named("z", types.N).Description("the result of `x` divided by `y`"), ), + Categories: number, } -// Round rounds the number to the nearest integer. var Round = &Builtin{ - Name: "round", + Name: "round", + Description: "Rounds the number to the nearest integer.", Decl: types.NewFunction( - types.Args(types.N), - types.N, + types.Args( + types.Named("x", types.N).Description("the number to round"), + ), + types.Named("y", types.N).Description("the result of rounding `x`"), ), + Categories: number, } -// Ceil rounds the number up to the nearest integer. var Ceil = &Builtin{ - Name: "ceil", + Name: "ceil", + Description: "Rounds the number _up_ to the nearest integer.", Decl: types.NewFunction( - types.Args(types.N), - types.N, + types.Args( + types.Named("x", types.N).Description("the number to round"), + ), + types.Named("y", types.N).Description("the result of rounding `x` _up_"), ), + Categories: number, } -// Floor rounds the number down to the nearest integer. var Floor = &Builtin{ - Name: "floor", + Name: "floor", + Description: "Rounds the number _down_ to the nearest integer.", Decl: types.NewFunction( - types.Args(types.N), - types.N, + types.Args( + types.Named("x", types.N).Description("the number to round"), + ), + types.Named("y", types.N).Description("the result of rounding `x` _down_"), ), + Categories: number, } -// Abs returns the number without its sign. var Abs = &Builtin{ - Name: "abs", + Name: "abs", + Description: "Returns the number without its sign.", Decl: types.NewFunction( - types.Args(types.N), - types.N, + types.Args( + types.Named("x", types.N), + ), + types.Named("y", types.N).Description("the absolute value of `x`"), ), + Categories: number, } -// Rem returns the remainder for x%y for y != 0. var Rem = &Builtin{ - Name: "rem", - Infix: "%", + Name: "rem", + Infix: "%", + Description: "Returns the remainder for of `x` divided by `y`, for `y != 0`.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N).Description("the remainder"), ), + Categories: number, } /** * Bitwise */ -// BitsOr returns the bitwise "or" of two integers. var BitsOr = &Builtin{ - Name: "bits.or", + Name: "bits.or", + Description: "Returns the bitwise \"OR\" of two integers.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N), ), } -// BitsAnd returns the bitwise "and" of two integers. var BitsAnd = &Builtin{ - Name: "bits.and", + Name: "bits.and", + Description: "Returns the bitwise \"AND\" of two integers.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N), ), } -// BitsNegate returns the bitwise "negation" of an integer (i.e. flips each -// bit). var BitsNegate = &Builtin{ - Name: "bits.negate", + Name: "bits.negate", + Description: "Returns the bitwise negation (flip) of an integer.", Decl: types.NewFunction( - types.Args(types.N), - types.N, + types.Args( + types.Named("x", types.N), + ), + types.Named("z", types.N), ), } -// BitsXOr returns the bitwise "exclusive-or" of two integers. var BitsXOr = &Builtin{ - Name: "bits.xor", + Name: "bits.xor", + Description: "Returns the bitwise \"XOR\" (exclusive-or) of two integers.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("y", types.N), + ), + types.Named("z", types.N), ), } -// BitsShiftLeft returns a new integer with its bits shifted some value to the -// left. var BitsShiftLeft = &Builtin{ - Name: "bits.lsh", + Name: "bits.lsh", + Description: "Returns a new integer with its bits shifted `s` bits to the left.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("s", types.N), + ), + types.Named("z", types.N), ), } -// BitsShiftRight returns a new integer with its bits shifted some value to the -// right. var BitsShiftRight = &Builtin{ - Name: "bits.rsh", + Name: "bits.rsh", + Description: "Returns a new integer with its bits shifted `s` bits to the right.", Decl: types.NewFunction( - types.Args(types.N, types.N), - types.N, + types.Args( + types.Named("x", types.N), + types.Named("s", types.N), + ), + types.Named("z", types.N), ), } @@ -563,251 +635,303 @@ var BitsShiftRight = &Builtin{ * Sets */ -// And performs an intersection operation on sets. +var sets = category("sets") + var And = &Builtin{ - Name: "and", - Infix: "&", + Name: "and", + Infix: "&", + Description: "Returns the intersection of two sets.", Decl: types.NewFunction( types.Args( - types.NewSet(types.A), - types.NewSet(types.A), + types.Named("x", types.NewSet(types.A)), + types.Named("y", types.NewSet(types.A)), ), - types.NewSet(types.A), + types.Named("z", types.NewSet(types.A)).Description("the intersection of `x` and `y`"), ), + Categories: sets, } // Or performs a union operation on sets. var Or = &Builtin{ - Name: "or", - Infix: "|", + Name: "or", + Infix: "|", + Description: "Returns the union of two sets.", Decl: types.NewFunction( types.Args( - types.NewSet(types.A), - types.NewSet(types.A), + types.Named("x", types.NewSet(types.A)), + types.Named("y", types.NewSet(types.A)), ), - types.NewSet(types.A), + types.Named("z", types.NewSet(types.A)).Description("the union of `x` and `y`"), + ), + Categories: sets, +} + +var Intersection = &Builtin{ + Name: "intersection", + Description: "Returns the intersection of the given input sets.", + Decl: types.NewFunction( + types.Args( + types.Named("xs", types.NewSet(types.NewSet(types.A))).Description("set of sets to intersect"), + ), + types.Named("y", types.NewSet(types.A)).Description("the intersection of all `xs` sets"), ), + Categories: sets, +} + +var Union = &Builtin{ + Name: "union", + Description: "Returns the union of the given input sets.", + Decl: types.NewFunction( + types.Args( + types.Named("xs", types.NewSet(types.NewSet(types.A))).Description("set of sets to merge"), + ), + types.Named("y", types.NewSet(types.A)).Description("the union of all `xs` sets"), + ), + Categories: sets, } /** * Aggregates */ -// Count takes a collection or string and counts the number of elements in it. +var aggregates = category("aggregates") + var Count = &Builtin{ - Name: "count", + Name: "count", + Description: " Count takes a collection or string and returns the number of elements (or characters) in it.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("collection", types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.S, - ), + )).Description("the set/array/object/string to be counted"), ), - types.N, + types.Named("n", types.N).Description("the count of elements, key/val pairs, or characters, respectively."), ), + Categories: aggregates, } -// Sum takes an array or set of numbers and sums them. var Sum = &Builtin{ - Name: "sum", + Name: "sum", + Description: "Sums elements of an array or set of numbers.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("collection", types.NewAny( types.NewSet(types.N), types.NewArray(nil, types.N), - ), + )), ), - types.N, + types.Named("n", types.N).Description("the sum of all elements"), ), + Categories: aggregates, } -// Product takes an array or set of numbers and multiplies them. var Product = &Builtin{ - Name: "product", + Name: "product", + Description: "Muliplies elements of an array or set of numbers", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("collection", types.NewAny( types.NewSet(types.N), types.NewArray(nil, types.N), - ), + )), ), - types.N, + types.Named("n", types.N).Description("the product of all elements"), ), + Categories: aggregates, } -// Max returns the maximum value in a collection. var Max = &Builtin{ - Name: "max", + Name: "max", + Description: "Returns the maximum value in a collection.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("collection", types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), - ), + )), ), - types.A, + types.Named("n", types.A).Description("the maximum of all elements"), ), + Categories: aggregates, } -// Min returns the minimum value in a collection. var Min = &Builtin{ - Name: "min", + Name: "min", + Description: "Returns the minimum value in a collection.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("collection", types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), - ), + )), + ), + types.Named("n", types.A).Description("the minimum of all elements"), + ), + Categories: aggregates, +} + +/** + * Sorting + */ + +var Sort = &Builtin{ + Name: "sort", + Description: "Returns a sorted array.", + Decl: types.NewFunction( + types.Args( + types.Named("collection", types.NewAny( + types.NewArray(nil, types.A), + types.NewSet(types.A), + )).Description("the array or set to be sorted"), ), - types.A, + types.Named("n", types.NewArray(nil, types.A)).Description("the sorted array"), ), + Categories: aggregates, } /** * Arrays */ -// ArrayConcat returns the result of concatenating two arrays together. var ArrayConcat = &Builtin{ - Name: "array.concat", + Name: "array.concat", + Description: "Concatenates two arrays.", Decl: types.NewFunction( types.Args( - types.NewArray(nil, types.A), - types.NewArray(nil, types.A), + types.Named("x", types.NewArray(nil, types.A)), + types.Named("y", types.NewArray(nil, types.A)), ), - types.NewArray(nil, types.A), + types.Named("z", types.NewArray(nil, types.A)).Description("the concatenation of `x` and `y`"), ), } -// ArraySlice returns a slice of a given array var ArraySlice = &Builtin{ - Name: "array.slice", + Name: "array.slice", + Description: "Returns a slice of a given array. If `start` is greater or equal than `stop`, `slice` is `[]`.", Decl: types.NewFunction( types.Args( - types.NewArray(nil, types.A), - types.NewNumber(), - types.NewNumber(), + types.Named("arr", types.NewArray(nil, types.A)).Description("the array to be sliced"), + types.Named("start", types.NewNumber()).Description("the start index of the returned slice; if less than zero, it's clamped to 0"), + types.Named("stop", types.NewNumber()).Description("the stop index of the returned slice; if larger than `count(arr)`, it's clamped to `count(arr)`"), ), - types.NewArray(nil, types.A), + types.Named("slice", types.NewArray(nil, types.A)).Description("the subslice of `array`, from `start` to `end`, including `arr[start]`, but excluding `arr[end]`"), ), -} +} // NOTE(sr): this function really needs examples -// ArrayReverse returns a given array, reversed var ArrayReverse = &Builtin{ - Name: "array.reverse", + Name: "array.reverse", + Description: "Returns the reverse of a given array.", Decl: types.NewFunction( types.Args( - types.NewArray(nil, types.A), + types.Named("arr", types.NewArray(nil, types.A)).Description("the array to be reversed"), ), - types.NewArray(nil, types.A), + types.Named("rev", types.NewArray(nil, types.A)).Description("an array containing the elements of `arr` in reverse order"), ), } /** * Conversions */ +var conversions = category("conversions") -// ToNumber takes a string, bool, or number value and converts it to a number. -// Strings are converted to numbers using strconv.Atoi. -// Boolean false is converted to 0 and boolean true is converted to 1. var ToNumber = &Builtin{ - Name: "to_number", + Name: "to_number", + Description: "Converts a string, bool, or number value to a number: Strings are converted to numbers using `strconv.Atoi`, Boolean `false` is converted to 0 and `true` is converted to 1.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("x", types.NewAny( types.N, types.S, types.B, types.NewNull(), - ), + )), ), - types.N, + types.Named("num", types.N), ), + Categories: conversions, } /** * Regular Expressions */ -// RegexMatch takes two strings and evaluates to true if the string in the second -// position matches the pattern in the first position. var RegexMatch = &Builtin{ - Name: "regex.match", + Name: "regex.match", + Description: "Matches a string against a regular expression.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("pattern", types.S).Description("regular expression"), + types.Named("value", types.S).Description("value to match against `pattern`"), ), - types.B, + types.Named("result", types.B), ), } -// RegexIsValid returns true if the regex pattern string is valid, otherwise false. var RegexIsValid = &Builtin{ - Name: "regex.is_valid", + Name: "regex.is_valid", + Description: "Checks if a string is a valid regular expression: the detailed syntax for patterns is defined by https://github.com/google/re2/wiki/Syntax.", Decl: types.NewFunction( types.Args( - types.S, + types.Named("pattern", types.S).Description("regular expression"), ), - types.B, + types.Named("result", types.B), ), } -// RegexFindAllStringSubmatch returns an array of all successive matches of the expression. -// It takes two strings and a number, the pattern, the value and number of matches to -// return, -1 means all matches. var RegexFindAllStringSubmatch = &Builtin{ - Name: "regex.find_all_string_submatch_n", + Name: "regex.find_all_string_submatch_n", + Description: "Returns all successive matches of the expression.", Decl: types.NewFunction( types.Args( - types.S, - types.S, - types.N, + types.Named("pattern", types.S).Description("regular expression"), + types.Named("value", types.S).Description("string to match"), + types.Named("number", types.N).Description("number of matches to return; `-1` means all matches"), ), - types.NewArray(nil, types.NewArray(nil, types.S)), + types.Named("output", types.NewArray(nil, types.NewArray(nil, types.S))), ), } -// RegexTemplateMatch takes two strings and evaluates to true if the string in the second -// position matches the pattern in the first position. var RegexTemplateMatch = &Builtin{ - Name: "regex.template_match", + Name: "regex.template_match", + Description: "Matches a string against a pattern, where there pattern may be glob-like", Decl: types.NewFunction( types.Args( - types.S, - types.S, - types.S, - types.S, + types.Named("template", types.S).Description("template expression containing `0..n` regular expressions"), + types.Named("value", types.S).Description("string to match"), + types.Named("delimiter_start", types.S).Description("start delimiter of the regular expression in `template`"), + types.Named("delimiter_end", types.S).Description("end delimiter of the regular expression in `template`"), ), - types.B, + types.Named("result", types.B), ), -} +} // TODO(sr): example:`regex.template_match("urn:foo:{.*}", "urn:foo:bar:baz", "{", "}")`` returns ``true``. -// RegexSplit splits the input string by the occurrences of the given pattern. var RegexSplit = &Builtin{ - Name: "regex.split", + Name: "regex.split", + Description: "Splits the input string by the occurrences of the given pattern.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("pattern", types.S).Description("regular expression"), + types.Named("value", types.S).Description("string to match"), ), - types.NewArray(nil, types.S), + types.Named("output", types.NewArray(nil, types.S)).Description("the parts obtained by splitting `value`"), ), } // RegexFind takes two strings and a number, the pattern, the value and number of match values to // return, -1 means all match values. var RegexFind = &Builtin{ - Name: "regex.find_n", + Name: "regex.find_n", + Description: "Returns the specified number of matches when matching the input against the pattern.", Decl: types.NewFunction( types.Args( - types.S, - types.S, - types.N, + types.Named("pattern", types.S).Description("regular expression"), + types.Named("value", types.S).Description("string to match"), + types.Named("number", types.N).Description("number of matches to return, if `-1`, returns all matches"), ), - types.NewArray(nil, types.S), + types.Named("output", types.NewArray(nil, types.S)).Description("collected matches"), ), } @@ -818,277 +942,300 @@ var RegexFind = &Builtin{ // - "[a-z]*" and [0-9]+" -> not true. var GlobsMatch = &Builtin{ Name: "regex.globs_match", + Description: `Checks if the intersection of two glob-style regular expressions matches a non-empty set of non-empty strings. +The set of regex symbols is limited for this builtin: only ` + "`.`, `*`, `+`, `[`, `-`, `]` and `\\` are treated as special symbols.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("glob1", types.S), + types.Named("glob2", types.S), ), - types.B, + types.Named("result", types.B), ), } /** * Strings */ +var stringsCat = category("strings") -// Concat joins an array of strings with an input string. var Concat = &Builtin{ - Name: "concat", + Name: "concat", + Description: "Joins a set or array of strings with a delimiter.", Decl: types.NewFunction( types.Args( - types.S, - types.NewAny( + types.Named("delimiter", types.S), + types.Named("collection", types.NewAny( types.NewSet(types.S), types.NewArray(nil, types.S), - ), + )).Description("strings to join"), ), - types.S, + types.Named("output", types.S), ), + Categories: stringsCat, } -// FormatInt returns the string representation of the number in the given base after converting it to an integer value. var FormatInt = &Builtin{ - Name: "format_int", + Name: "format_int", + Description: "Returns the string representation of the number in the given base after converting it to an integer value.", Decl: types.NewFunction( types.Args( - types.N, - types.N, + types.Named("number", types.N).Description("number to format"), + types.Named("base", types.N).Description("base of number representation to use"), ), - types.S, + types.Named("output", types.S).Description("formatted number"), ), + Categories: stringsCat, } -// IndexOf returns the index of a substring contained inside a string var IndexOf = &Builtin{ - Name: "indexof", + Name: "indexof", + Description: "Returns the index of a substring contained inside a string.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("haystack", types.S).Description("string to search in"), + types.Named("needle", types.S).Description("substring to look for"), ), - types.N, + types.Named("output", types.N).Description("index of first occurrence, `-1` if not found"), ), + Categories: stringsCat, } -// IndexOfN returns a list of all the indexes of a substring contained inside a string var IndexOfN = &Builtin{ - Name: "indexof_n", + Name: "indexof_n", + Description: "Returns a list of all the indexes of a substring contained inside a string.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("haystack", types.S).Description("string to search in"), + types.Named("needle", types.S).Description("substring to look for"), ), - types.NewArray(nil, types.N), + types.Named("output", types.NewArray(nil, types.N)).Description("all indices at which `needle` occurs in `haystack`, may be empty"), ), + Categories: stringsCat, } -// Substring returns the portion of a string for a given start index and a length. -// If the length is less than zero, then substring returns the remainder of the string. var Substring = &Builtin{ - Name: "substring", + Name: "substring", + Description: "Returns the portion of a string for a given `offset` and a `length`. If `length < 0`, `output` is the remainder of the string.", Decl: types.NewFunction( types.Args( - types.S, - types.N, - types.N, + types.Named("value", types.S), + types.Named("offset", types.N).Description("offset, must be positive"), + types.Named("length", types.N).Description("length of the substring starting from `offset`"), ), - types.S, + types.Named("output", types.S).Description("substring of `value` from `offset`, of length `length`"), ), + Categories: stringsCat, } -// Contains returns true if the search string is included in the base string var Contains = &Builtin{ - Name: "contains", + Name: "contains", + Description: "Returns `true` if the search string is included in the base string", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("haystack", types.S).Description("string to search in"), + types.Named("needle", types.S).Description("substring to look for"), ), - types.B, + types.Named("result", types.B).Description("result of the containment check"), ), + Categories: stringsCat, } -// StartsWith returns true if the search string begins with the base string var StartsWith = &Builtin{ - Name: "startswith", + Name: "startswith", + Description: "Returns true if the search string begins with the base string.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("base", types.S).Description("base string"), + types.Named("search", types.S).Description("search string"), ), - types.B, + types.Named("result", types.B).Description("result of the prefix check"), ), + Categories: stringsCat, } -// EndsWith returns true if the search string begins with the base string var EndsWith = &Builtin{ - Name: "endswith", + Name: "endswith", + Description: "Returns true if the search string begins with the base string.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("base", types.S).Description("base string"), + types.Named("search", types.S).Description("search string"), ), - types.B, + types.Named("result", types.B).Description("result of the suffix check"), ), + Categories: stringsCat, } -// Lower returns the input string but with all characters in lower-case var Lower = &Builtin{ - Name: "lower", + Name: "lower", + Description: "Returns the input string but with all characters in lower-case.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S).Description("string that is converted to lower-case"), + ), + types.Named("y", types.S).Description("lower-case of x"), ), + Categories: stringsCat, } -// Upper returns the input string but with all characters in upper-case var Upper = &Builtin{ - Name: "upper", + Name: "upper", + Description: "Returns the input string but with all characters in upper-case.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S).Description("string that is converted to upper-case"), + ), + types.Named("y", types.S).Description("upper-case of x"), ), + Categories: stringsCat, } -// Split returns an array containing elements of the input string split on a delimiter. var Split = &Builtin{ - Name: "split", + Name: "split", + Description: "Split returns an array containing elements of the input string split on a delimiter.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("x", types.S).Description("string that is split"), + types.Named("delimiter", types.S).Description("delimiter used for splitting"), ), - types.NewArray(nil, types.S), + types.Named("ys", types.NewArray(nil, types.S)).Description("splitted parts"), ), + Categories: stringsCat, } -// Replace returns the given string with all instances of the second argument replaced -// by the third. var Replace = &Builtin{ - Name: "replace", + Name: "replace", + Description: "Replace replaces all instances of a sub-string.", Decl: types.NewFunction( types.Args( - types.S, - types.S, - types.S, + types.Named("x", types.S).Description("string being processed"), + types.Named("old", types.S).Description("substring to replace"), + types.Named("new", types.S).Description("string to replace `old` with"), ), - types.S, + types.Named("y", types.S).Description("string with replaced substrings"), ), + Categories: stringsCat, } -// ReplaceN replaces a string from a list of old, new string pairs. -// Replacements are performed in the order they appear in the target string, without overlapping matches. -// The old string comparisons are done in argument order. var ReplaceN = &Builtin{ Name: "strings.replace_n", + Description: `Replaces a string from a list of old, new string pairs. +Replacements are performed in the order they appear in the target string, without overlapping matches. +The old string comparisons are done in argument order.`, Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("patterns", types.NewObject( nil, types.NewDynamicProperty( types.S, types.S)), - types.S, + ).Description("replacement pairs"), + types.Named("value", types.S).Description("string to replace substring matches in"), ), - types.S, + types.Named("output", types.S), ), } -// Trim returns the given string with all leading or trailing instances of the second -// argument removed. var Trim = &Builtin{ - Name: "trim", + Name: "trim", + Description: "Returns `value` with all leading or trailing instances of the `cutset` characters removed.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("value", types.S).Description("string to trim"), + types.Named("cutset", types.S).Description("string of characters that are cut off"), ), - types.S, + types.Named("output", types.S).Description("string trimmed of `cutset` characters"), ), + Categories: stringsCat, } -// TrimLeft returns the given string with all leading instances of second argument removed. var TrimLeft = &Builtin{ - Name: "trim_left", + Name: "trim_left", + Description: "Returns `value` with all leading instances of the `cutset` chartacters removed.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("value", types.S).Description("string to trim"), + types.Named("cutset", types.S).Description("string of characters that are cut off on the left"), ), - types.S, + types.Named("output", types.S).Description("string left-trimmed of `cutset` characters"), ), + Categories: stringsCat, } -// TrimPrefix returns the given string without the second argument prefix string. -// If the given string doesn't start with prefix, it is returned unchanged. var TrimPrefix = &Builtin{ - Name: "trim_prefix", + Name: "trim_prefix", + Description: "Returns `value` without the prefix. If `value` doesn't start with `prefix`, it is returned unchanged.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("value", types.S).Description("string to trim"), + types.Named("prefix", types.S).Description("prefix to cut off"), ), - types.S, + types.Named("output", types.S).Description("string with `prefix` cut off"), ), + Categories: stringsCat, } -// TrimRight returns the given string with all trailing instances of second argument removed. var TrimRight = &Builtin{ - Name: "trim_right", + Name: "trim_right", + Description: "Returns `value` with all trailing instances of the `cutset` chartacters removed.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("value", types.S).Description("string to trim"), + types.Named("cutset", types.S).Description("string of characters that are cut off on the right"), ), - types.S, + types.Named("output", types.S).Description("string right-trimmed of `cutset` characters"), ), + Categories: stringsCat, } -// TrimSuffix returns the given string without the second argument suffix string. -// If the given string doesn't end with suffix, it is returned unchanged. var TrimSuffix = &Builtin{ - Name: "trim_suffix", + Name: "trim_suffix", + Description: "Returns `value` without the suffix. If `value` doesn't end with `suffix`, it is returned unchanged.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("value", types.S).Description("string to trim"), + types.Named("suffix", types.S).Description("suffix to cut off"), ), - types.S, + types.Named("output", types.S).Description("string with `suffix` cut off"), ), + Categories: stringsCat, } -// TrimSpace return the given string with all leading and trailing white space removed. var TrimSpace = &Builtin{ - Name: "trim_space", + Name: "trim_space", + Description: "Return the given string with all leading and trailing white space removed.", Decl: types.NewFunction( types.Args( - types.S, + types.Named("value", types.S).Description("string to trim"), ), - types.S, + types.Named("output", types.S).Description("string leading and trailing white space cut off"), ), + Categories: stringsCat, } -// Sprintf returns the given string, formatted. var Sprintf = &Builtin{ - Name: "sprintf", + Name: "sprintf", + Description: "Returns the given string, formatted.", Decl: types.NewFunction( types.Args( - types.S, - types.NewArray(nil, types.A), + types.Named("format", types.S).Description("string with formatting verbs"), + types.Named("values", types.NewArray(nil, types.A)).Description("arguments to format into formatting verbs"), ), - types.S, + types.Named("output", types.S).Description("`format` formatted by the values in `values`"), ), + Categories: stringsCat, } -// StringReverse returns the given string, reversed. var StringReverse = &Builtin{ - Name: "strings.reverse", + Name: "strings.reverse", + Description: "Reverses a given string.", Decl: types.NewFunction( types.Args( - types.S, + types.Named("x", types.S), ), - types.S, + types.Named("y", types.S), ), + Categories: stringsCat, } /** @@ -1097,25 +1244,27 @@ var StringReverse = &Builtin{ // RandIntn returns a random number 0 - n var RandIntn = &Builtin{ - Name: "rand.intn", + Name: "rand.intn", + Description: "Returns a random integer between `0` and `n` (`n` exlusive). If `n` is `0`, then `y` is always `0`. For any given argument pair (`str`, `n`), the output will be consistent throughout a query evaluation.", Decl: types.NewFunction( types.Args( - types.S, - types.N, + types.Named("str", types.S), + types.Named("n", types.N), ), - types.N, + types.Named("y", types.N).Description("random integer in the range `[0, abs(n))`"), ), + Categories: number, } -// NumbersRange returns an array of numbers in the given inclusive range. var NumbersRange = &Builtin{ - Name: "numbers.range", + Name: "numbers.range", + Description: "Returns an array of numbers in the given (inclusive) range. If `a==b`, then `range == [a]`; if `a > b`, then `range` is in descending order.", Decl: types.NewFunction( types.Args( - types.N, - types.N, + types.Named("a", types.N), + types.Named("b", types.N), ), - types.NewArray(nil, types.N), + types.Named("range", types.NewArray(nil, types.N)).Description("the range between `a` and `b`"), ), } @@ -1123,15 +1272,34 @@ var NumbersRange = &Builtin{ * Units */ -// UnitsParseBytes converts strings like 10GB, 5K, 4mb, and the like into an -// integer number of bytes. +// UnitsParse +var UnitsParse = &Builtin{ + Name: "units.parse", + Description: `Converts strings like "10G", "5K", "4M", "1500m" and the like into a number. +This number can be a non-integer, such as 1.5, 0.22, etc. Supports standard metric decimal and +binary SI units (e.g., K, Ki, M, Mi, G, Gi etc.) m, K, M, G, T, P, and E are treated as decimal +units and Ki, Mi, Gi, Ti, Pi, and Ei are treated as binary units. + +Note that 'm' and 'M' are case-sensitive, to allow distinguishing between "milli" and "mega" units respectively. Other units are case-insensitive.`, + Decl: types.NewFunction( + types.Args( + types.Named("x", types.S).Description("the unit to parse"), + ), + types.Named("y", types.N).Description("the parsed number"), + ), +} + var UnitsParseBytes = &Builtin{ Name: "units.parse_bytes", + Description: `Converts strings like "10GB", "5K", "4mb" into an integer number of bytes. +Supports standard byte units (e.g., KB, KiB, etc.) KB, MB, GB, and TB are treated as decimal +units and KiB, MiB, GiB, and TiB are treated as binary units. The bytes symbol (b/B) in the +unit is optional and omitting it wil give the same result (e.g. Mi and MiB).`, Decl: types.NewFunction( types.Args( - types.S, + types.Named("x", types.S).Description("the byte unit to parse"), ), - types.N, + types.Named("y", types.N).Description("the parsed number"), ), } @@ -1142,10 +1310,13 @@ var UnitsParseBytes = &Builtin{ // UUIDRFC4122 returns a version 4 UUID string. var UUIDRFC4122 = &Builtin{ - Name: "uuid.rfc4122", + Name: "uuid.rfc4122", + Description: "Returns a new UUIDv4.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("k", types.S), + ), + types.Named("output", types.S).Description("a version 4 UUID; for any given `k`, the output will be consistent throughout a query evaluation"), ), } @@ -1153,43 +1324,20 @@ var UUIDRFC4122 = &Builtin{ * JSON */ -// JSONMarshal serializes the input term. -var JSONMarshal = &Builtin{ - Name: "json.marshal", - Decl: types.NewFunction( - types.Args(types.A), - types.S, - ), -} - -// JSONUnmarshal deserializes the input string. -var JSONUnmarshal = &Builtin{ - Name: "json.unmarshal", - Decl: types.NewFunction( - types.Args(types.S), - types.A, - ), -} +var objectCat = category("object") -// JSONIsValid verifies the input string is a valid JSON document. -var JSONIsValid = &Builtin{ - Name: "json.is_valid", - Decl: types.NewFunction( - types.Args(types.S), - types.B, - ), -} - -// JSONFilter filters the JSON object var JSONFilter = &Builtin{ Name: "json.filter", + Description: "Filters the object. " + + "For example: `json.filter({\"a\": {\"b\": \"x\", \"c\": \"y\"}}, [\"a/b\"])` will result in `{\"a\": {\"b\": \"x\"}}`). " + + "Paths are not filtered in-order and are deduplicated before being evaluated.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("object", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), - types.NewAny( + )), + types.Named("paths", types.NewAny( types.NewArray( nil, types.NewAny( @@ -1209,22 +1357,25 @@ var JSONFilter = &Builtin{ ), ), ), - ), + )).Description("JSON string paths"), ), - types.A, + types.Named("filtered", types.A).Description("remaining data from `object` with only keys specified in `paths`"), ), + Categories: objectCat, } -// JSONRemove removes paths in the JSON object var JSONRemove = &Builtin{ Name: "json.remove", + Description: "Removes paths from an object. " + + "For example: `json.remove({\"a\": {\"b\": \"x\", \"c\": \"y\"}}, [\"a/b\"])` will result in `{\"a\": {\"c\": \"y\"}}`. " + + "Paths are not removed in-order and are deduplicated before being evaluated.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("object", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), - types.NewAny( + )), + types.Named("paths", types.NewAny( types.NewArray( nil, types.NewAny( @@ -1244,19 +1395,21 @@ var JSONRemove = &Builtin{ ), ), ), - ), + )).Description("JSON string paths"), ), - types.A, + types.Named("output", types.A).Description("result of removing all keys specified in `paths`"), ), + Categories: objectCat, } -// JSONPatch patches a JSON object according to RFC6902 var JSONPatch = &Builtin{ Name: "json.patch", + Description: "Patches an object according to RFC6902. " + + "For example: `json.patch({\"a\": {\"foo\": 1}}, [{\"op\": \"add\", \"path\": \"/a/bar\", \"value\": 2}])` results in `{\"a\": {\"foo\": 1, \"bar\": 2}`. The patches are applied atomically: if any of them fails, the result will be undefined.", Decl: types.NewFunction( types.Args( - types.A, - types.NewArray( + types.Named("object", types.A), // TODO(sr): types.A? + types.Named("patches", types.NewArray( nil, types.NewObject( []*types.StaticProperty{ @@ -1265,561 +1418,675 @@ var JSONPatch = &Builtin{ }, types.NewDynamicProperty(types.A, types.A), ), - ), + )), ), - types.A, + types.Named("output", types.A).Description("result obtained after consecutively applying all patch operations in `patches`"), ), + Categories: objectCat, } -// ObjectGet returns takes an object and returns a value under its key if -// present, otherwise it returns the default. var ObjectGet = &Builtin{ Name: "object.get", + Description: "Returns value of an object's key if present, otherwise a default. " + + "If the supplied `key` is an `array`, then `object.get` will search through a nested object or array using each key in turn. " + + "For example: `object.get({\"a\": [{ \"b\": true }]}, [\"a\", 0, \"b\"], false)` results in `true`.", Decl: types.NewFunction( types.Args( - types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), - types.A, - types.A, + types.Named("object", types.NewObject(nil, types.NewDynamicProperty(types.A, types.A))).Description("object to get `key` from"), + types.Named("key", types.A).Description("key to lookup in `object`"), + types.Named("default", types.A).Description("default to use when lookup fails"), ), - types.A, + types.Named("value", types.A).Description("`object[key]` if present, otherwise `default`"), ), } -// ObjectUnion creates a new object that is the asymmetric union of two objects var ObjectUnion = &Builtin{ Name: "object.union", + Description: "Creates a new object of the asymmetric union of two objects. " + + "For example: `object.union({\"a\": 1, \"b\": 2, \"c\": {\"d\": 3}}, {\"a\": 7, \"c\": {\"d\": 4, \"e\": 5}})` will result in `{\"a\": 7, \"b\": 2, \"c\": {\"d\": 4, \"e\": 5}}`.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("a", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), - types.NewObject( + )), + types.Named("b", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), + )), ), - types.A, - ), + types.Named("output", types.A).Description("a new object which is the result of an asymmetric recursive union of two objects where conflicts are resolved by choosing the key from the right-hand object `b`"), + ), // TODO(sr): types.A? ^^^^^^^ (also below) } -// ObjectUnionN creates a new object that is the asymmetric union of all objects merged from left to right var ObjectUnionN = &Builtin{ Name: "object.union_n", + Description: "Creates a new object that is the asymmetric union of all objects merged from left to right. " + + "For example: `object.union_n([{\"a\": 1}, {\"b\": 2}, {\"a\": 3}])` will result in `{\"b\": 2, \"a\": 3}`.", Decl: types.NewFunction( types.Args( - types.NewArray( + types.Named("objects", types.NewArray( nil, types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), - ), + )), ), - types.A, + types.Named("output", types.A).Description("asymmetric recursive union of all objects in `objects`, merged from left to right, where conflicts are resolved by choosing the key from the right-hand object"), ), } -// ObjectRemove Removes specified keys from an object var ObjectRemove = &Builtin{ - Name: "object.remove", + Name: "object.remove", + Description: "Removes specified keys from an object.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("object", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), - types.NewAny( + )).Description("object to remove keys from"), + types.Named("keys", types.NewAny( types.NewArray(nil, types.A), types.NewSet(types.A), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), - ), + )).Description("keys to remove from x"), ), - types.A, + types.Named("output", types.A).Description("result of removing the specified `keys` from `object`"), ), } -// ObjectFilter filters the object by keeping only specified keys var ObjectFilter = &Builtin{ Name: "object.filter", + Description: "Filters the object by keeping only specified keys. " + + "For example: `object.filter({\"a\": {\"b\": \"x\", \"c\": \"y\"}, \"d\": \"z\"}, [\"a\"])` will result in `{\"a\": {\"b\": \"x\", \"c\": \"y\"}}`).", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("object", types.NewObject( nil, types.NewDynamicProperty(types.A, types.A), - ), - types.NewAny( + )).Description("object to filter keys"), + types.Named("keys", types.NewAny( types.NewArray(nil, types.A), types.NewSet(types.A), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), - ), + )), + ), + types.Named("filtered", types.A).Description("remaining data from `object` with only keys specified in `keys`"), + ), +} + +/* + * Encoding + */ +var encoding = category("encoding") + +var JSONMarshal = &Builtin{ + Name: "json.marshal", + Description: "Serializes the input term to JSON.", + Decl: types.NewFunction( + types.Args( + types.Named("x", types.A).Description("the term to serialize"), + ), + types.Named("y", types.S).Description("the JSON string representation of `x`"), + ), + Categories: encoding, +} + +var JSONUnmarshal = &Builtin{ + Name: "json.unmarshal", + Description: "Deserializes the input string.", + Decl: types.NewFunction( + types.Args( + types.Named("x", types.S).Description("a JSON string"), + ), + types.Named("y", types.A).Description("the term deseralized from `x`"), + ), + Categories: encoding, +} + +var JSONIsValid = &Builtin{ + Name: "json.is_valid", + Description: "Verifies the input string is a valid JSON document.", + Decl: types.NewFunction( + types.Args( + types.Named("x", types.S).Description("a JSON string"), ), - types.A, + types.Named("result", types.B).Description("`true` if `x` is valid JSON, `false` otherwise"), ), + Categories: encoding, } -// Base64Encode serializes the input string into base64 encoding. var Base64Encode = &Builtin{ - Name: "base64.encode", + Name: "base64.encode", + Description: "Serializes the input string into base64 encoding.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("base64 serialization of `x`"), ), + Categories: encoding, } -// Base64Decode deserializes the base64 encoded input string. var Base64Decode = &Builtin{ - Name: "base64.decode", + Name: "base64.decode", + Description: "Deserializes the base64 encoded input string.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("base64 deserialization of `x`"), ), + Categories: encoding, } -// Base64IsValid verifies the input string is base64 encoded. var Base64IsValid = &Builtin{ - Name: "base64.is_valid", + Name: "base64.is_valid", + Description: "Verifies the input string is base64 encoded.", Decl: types.NewFunction( - types.Args(types.S), - types.B, + types.Args( + types.Named("x", types.S), + ), + types.Named("result", types.B).Description("`true` if `x` is valid base64 encoded value, `false` otherwise"), ), + Categories: encoding, } -// Base64UrlEncode serializes the input string into base64url encoding. var Base64UrlEncode = &Builtin{ - Name: "base64url.encode", + Name: "base64url.encode", + Description: "Serializes the input string into base64url encoding.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("base64url serialization of `x`"), ), + Categories: encoding, } -// Base64UrlEncodeNoPad serializes the input string into base64url encoding without padding. var Base64UrlEncodeNoPad = &Builtin{ - Name: "base64url.encode_no_pad", + Name: "base64url.encode_no_pad", + Description: "Serializes the input string into base64url encoding without padding.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("base64url serialization of `x`"), ), + Categories: encoding, } -// Base64UrlDecode deserializes the base64url encoded input string. var Base64UrlDecode = &Builtin{ - Name: "base64url.decode", + Name: "base64url.decode", + Description: "Deserializes the base64url encoded input string.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("base64url deserialization of `x`"), ), + Categories: encoding, } -// URLQueryDecode decodes a URL encoded input string. var URLQueryDecode = &Builtin{ - Name: "urlquery.decode", + Name: "urlquery.decode", + Description: "Decodes a URL-encoded input string.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("URL-encoding deserialization of `x`"), ), + Categories: encoding, } -// URLQueryEncode encodes the input string into a URL encoded string. var URLQueryEncode = &Builtin{ - Name: "urlquery.encode", + Name: "urlquery.encode", + Description: "Encodes the input string into a URL-encoded string.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("URL-encoding serialization of `x`"), ), + Categories: encoding, } -// URLQueryEncodeObject encodes the given JSON into a URL encoded query string. var URLQueryEncodeObject = &Builtin{ - Name: "urlquery.encode_object", + Name: "urlquery.encode_object", + Description: "Encodes the given object into a URL encoded query string.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("object", types.NewObject( nil, types.NewDynamicProperty( types.S, types.NewAny( types.S, types.NewArray(nil, types.S), - types.NewSet(types.S))))), - types.S, + types.NewSet(types.S)))))), + types.Named("y", types.S).Description("the URL-encoded serialization of `object`"), ), + Categories: encoding, } -// URLQueryDecodeObject decodes the given URL query string into an object. var URLQueryDecodeObject = &Builtin{ - Name: "urlquery.decode_object", + Name: "urlquery.decode_object", + Description: "Decodes the given URL query string into an object.", Decl: types.NewFunction( - types.Args(types.S), - types.NewObject(nil, types.NewDynamicProperty( + types.Args( + types.Named("x", types.S).Description("the query string"), + ), + types.Named("object", types.NewObject(nil, types.NewDynamicProperty( types.S, - types.NewArray(nil, types.S))), + types.NewArray(nil, types.S)))).Description("the resulting object"), ), + Categories: encoding, } -// YAMLMarshal serializes the input term. var YAMLMarshal = &Builtin{ - Name: "yaml.marshal", + Name: "yaml.marshal", + Description: "Serializes the input term to YAML.", Decl: types.NewFunction( - types.Args(types.A), - types.S, + types.Args( + types.Named("x", types.A).Description("the term to serialize"), + ), + types.Named("y", types.S).Description("the YAML string representation of `x`"), ), + Categories: encoding, } -// YAMLUnmarshal deserializes the input string. var YAMLUnmarshal = &Builtin{ - Name: "yaml.unmarshal", + Name: "yaml.unmarshal", + Description: "Deserializes the input string.", Decl: types.NewFunction( - types.Args(types.S), - types.A, + types.Args( + types.Named("x", types.S).Description("a YAML string"), + ), + types.Named("y", types.A).Description("the term deseralized from `x`"), ), + Categories: encoding, } // YAMLIsValid verifies the input string is a valid YAML document. var YAMLIsValid = &Builtin{ - Name: "yaml.is_valid", + Name: "yaml.is_valid", + Description: "Verifies the input string is a valid YAML document.", Decl: types.NewFunction( - types.Args(types.S), - types.B, + types.Args( + types.Named("x", types.S).Description("a YAML string"), + ), + types.Named("result", types.B).Description("`true` if `x` is valid YAML, `false` otherwise"), ), + Categories: encoding, } -// HexEncode serializes the input string into hex encoding. var HexEncode = &Builtin{ - Name: "hex.encode", + Name: "hex.encode", + Description: "Serializes the input string using hex-encoding.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("serialization of `x` using hex-encoding"), ), + Categories: encoding, } -// HexDecode deserializes the hex encoded input string. var HexDecode = &Builtin{ - Name: "hex.decode", + Name: "hex.decode", + Description: "Deserializes the hex-encoded input string.", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S).Description("a hex-encoded string"), + ), + types.Named("y", types.S).Description("deseralized from `x`"), ), + Categories: encoding, } /** * Tokens */ +var tokensCat = category("tokens") -// JWTDecode decodes a JSON Web Token and outputs it as an Object. var JWTDecode = &Builtin{ - Name: "io.jwt.decode", + Name: "io.jwt.decode", + Description: "Decodes a JSON Web Token and outputs it as an object.", Decl: types.NewFunction( - types.Args(types.S), - types.NewArray([]types.Type{ + types.Args( + types.Named("jwt", types.S).Description("JWT token to decode"), + ), + types.Named("output", types.NewArray([]types.Type{ types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.S, - }, nil), + }, nil)).Description("`[header, payload, sig]`, where `header` and `payload` are objects; `sig` is the hexadecimal representation of the signature on the token."), ), + Categories: tokensCat, } -// JWTVerifyRS256 verifies if a RS256 JWT signature is valid or not. var JWTVerifyRS256 = &Builtin{ - Name: "io.jwt.verify_rs256", + Name: "io.jwt.verify_rs256", + Description: "Verifies if a RS256 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyRS384 verifies if a RS384 JWT signature is valid or not. var JWTVerifyRS384 = &Builtin{ - Name: "io.jwt.verify_rs384", + Name: "io.jwt.verify_rs384", + Description: "Verifies if a RS384 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyRS512 verifies if a RS512 JWT signature is valid or not. var JWTVerifyRS512 = &Builtin{ - Name: "io.jwt.verify_rs512", + Name: "io.jwt.verify_rs512", + Description: "Verifies if a RS512 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyPS256 verifies if a PS256 JWT signature is valid or not. var JWTVerifyPS256 = &Builtin{ - Name: "io.jwt.verify_ps256", + Name: "io.jwt.verify_ps256", + Description: "Verifies if a PS256 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyPS384 verifies if a PS384 JWT signature is valid or not. var JWTVerifyPS384 = &Builtin{ - Name: "io.jwt.verify_ps384", + Name: "io.jwt.verify_ps384", + Description: "Verifies if a PS384 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyPS512 verifies if a PS512 JWT signature is valid or not. var JWTVerifyPS512 = &Builtin{ - Name: "io.jwt.verify_ps512", + Name: "io.jwt.verify_ps512", + Description: "Verifies if a PS512 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyES256 verifies if a ES256 JWT signature is valid or not. var JWTVerifyES256 = &Builtin{ - Name: "io.jwt.verify_es256", + Name: "io.jwt.verify_es256", + Description: "Verifies if a ES256 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyES384 verifies if a ES384 JWT signature is valid or not. var JWTVerifyES384 = &Builtin{ - Name: "io.jwt.verify_es384", + Name: "io.jwt.verify_es384", + Description: "Verifies if a ES384 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyES512 verifies if a ES512 JWT signature is valid or not. var JWTVerifyES512 = &Builtin{ - Name: "io.jwt.verify_es512", + Name: "io.jwt.verify_es512", + Description: "Verifies if a ES512 JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("certificate", types.S).Description("PEM encoded certificate, PEM encoded public key, or the JWK key (set) used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyHS256 verifies if a HS256 (secret) JWT signature is valid or not. var JWTVerifyHS256 = &Builtin{ - Name: "io.jwt.verify_hs256", + Name: "io.jwt.verify_hs256", + Description: "Verifies if a HS256 (secret) JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("secret", types.S).Description("plain text secret used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyHS384 verifies if a HS384 (secret) JWT signature is valid or not. var JWTVerifyHS384 = &Builtin{ - Name: "io.jwt.verify_hs384", + Name: "io.jwt.verify_hs384", + Description: "Verifies if a HS384 (secret) JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("secret", types.S).Description("plain text secret used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTVerifyHS512 verifies if a HS512 (secret) JWT signature is valid or not. var JWTVerifyHS512 = &Builtin{ - Name: "io.jwt.verify_hs512", + Name: "io.jwt.verify_hs512", + Description: "Verifies if a HS512 (secret) JWT signature is valid.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified"), + types.Named("secret", types.S).Description("plain text secret used to verify the signature"), ), - types.B, + types.Named("result", types.B).Description("`true` if the signature is valid, `false` otherwise"), ), + Categories: tokensCat, } -// JWTDecodeVerify verifies a JWT signature under parameterized constraints and decodes the claims if it is valid. var JWTDecodeVerify = &Builtin{ Name: "io.jwt.decode_verify", + Description: `Verifies a JWT signature under parameterized constraints and decodes the claims if it is valid. +Supports the following algorithms: HS256, HS384, HS512, RS256, RS384, RS512, ES256, ES384, ES512, PS256, PS384 and PS512.`, Decl: types.NewFunction( types.Args( - types.S, - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Named("jwt", types.S).Description("JWT token whose signature is to be verified and whose claims are to be checked"), + types.Named("constraints", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("claim verification constraints"), ), - types.NewArray([]types.Type{ + types.Named("output", types.NewArray([]types.Type{ types.B, types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), - }, nil), + }, nil)).Description("`[valid, header, payload]`: if the input token is verified and meets the requirements of `constraints` then `valid` is `true`; `header` and `payload` are objects containing the JOSE header and the JWT claim set; otherwise, `valid` is `false`, `header` and `payload` are `{}`"), ), + Categories: tokensCat, } -// JWTEncodeSignRaw encodes and optionally sign a JSON Web Token. -// Inputs are protected headers, payload, secret +var tokenSign = category("tokensign") + var JWTEncodeSignRaw = &Builtin{ - Name: "io.jwt.encode_sign_raw", + Name: "io.jwt.encode_sign_raw", + Description: "Encodes and optionally signs a JSON Web Token.", Decl: types.NewFunction( types.Args( - types.S, - types.S, - types.S, + types.Named("headers", types.S).Description("JWS Protected Header"), + types.Named("payload", types.S).Description("JWS Payload"), + types.Named("key", types.S).Description("JSON Web Key (RFC7517)"), ), - types.S, + types.Named("output", types.S).Description("signed JWT"), ), + Categories: tokenSign, } -// JWTEncodeSign encodes and optionally sign a JSON Web Token. -// Inputs are protected headers, payload, secret var JWTEncodeSign = &Builtin{ - Name: "io.jwt.encode_sign", + Name: "io.jwt.encode_sign", + Description: "Encodes and optionally signs a JSON Web Token. Inputs are taken as objects, not encoded strings (see `io.jwt.encode_sign_raw`).", Decl: types.NewFunction( types.Args( - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Named("headers", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("JWS Protected Header"), + types.Named("payload", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("JWS Payload"), + types.Named("key", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("JSON Web Key (RFC7517)"), ), - types.S, + types.Named("output", types.S).Description("signed JWT"), ), + Categories: tokenSign, } /** * Time */ -// NowNanos returns the current time since epoch in nanoseconds. var NowNanos = &Builtin{ - Name: "time.now_ns", + Name: "time.now_ns", + Description: "Returns the current time since epoch in nanoseconds.", Decl: types.NewFunction( nil, - types.N, + types.Named("now", types.N).Description("nanoseconds since epoch"), ), } -// ParseNanos returns the time in nanoseconds parsed from the string in the given format. var ParseNanos = &Builtin{ - Name: "time.parse_ns", + Name: "time.parse_ns", + Description: "Returns the time in nanoseconds parsed from the string in the given format. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("layout", types.S).Description("format used for parsing, see the [Go `time` package documentation](https://golang.org/pkg/time/#Parse) for more details"), + types.Named("value", types.S).Description("input to parse according to `layout`"), ), - types.N, + types.Named("ns", types.N).Description("`value` in nanoseconds since epoch"), ), } -// ParseRFC3339Nanos returns the time in nanoseconds parsed from the string in RFC3339 format. var ParseRFC3339Nanos = &Builtin{ - Name: "time.parse_rfc3339_ns", + Name: "time.parse_rfc3339_ns", + Description: "Returns the time in nanoseconds parsed from the string in RFC3339 format. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", Decl: types.NewFunction( - types.Args(types.S), - types.N, + types.Args( + types.Named("value", types.S), + ), + types.Named("ns", types.N).Description("`value` in nanoseconds since epoch"), ), } -// ParseDurationNanos returns the duration in nanoseconds represented by a duration string. -// Duration string is similar to the Go time.ParseDuration string var ParseDurationNanos = &Builtin{ - Name: "time.parse_duration_ns", + Name: "time.parse_duration_ns", + Description: "Returns the duration in nanoseconds represented by a string.", Decl: types.NewFunction( - types.Args(types.S), - types.N, + types.Args( + types.Named("duration", types.S).Description("a duration like \"3m\"; seethe [Go `time` package documentation](https://golang.org/pkg/time/#ParseDuration) for more details"), + ), + types.Named("ns", types.N).Description("the `duration` in nanoseconds"), ), } -// Date returns the [year, month, day] for the nanoseconds since epoch. var Date = &Builtin{ - Name: "time.date", + Name: "time.date", + Description: "Returns the `[year, month, day]` for the nanoseconds since epoch.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("x", types.NewAny( types.N, types.NewArray([]types.Type{types.N, types.S}, nil), - ), + )).Description("a number representing the nanoseconds since the epoch (UTC); or a two-element array of the nanoseconds, and a timezone string"), ), - types.NewArray([]types.Type{types.N, types.N, types.N}, nil), + types.Named("date", types.NewArray([]types.Type{types.N, types.N, types.N}, nil)).Description("an array of `year`, `month` (1-12), and `day` (1-31)"), ), } -// Clock returns the [hour, minute, second] of the day for the nanoseconds since epoch. var Clock = &Builtin{ - Name: "time.clock", + Name: "time.clock", + Description: "Returns the `[hour, minute, second]` of the day for the nanoseconds since epoch.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("x", types.NewAny( types.N, types.NewArray([]types.Type{types.N, types.S}, nil), - ), + )).Description("a number representing the nanoseconds since the epoch (UTC); or a two-element array of the nanoseconds, and a timezone string"), ), - types.NewArray([]types.Type{types.N, types.N, types.N}, nil), + types.Named("output", types.NewArray([]types.Type{types.N, types.N, types.N}, nil)). + Description("the `hour`, `minute` (0-59), and `second` (0-59) representing the time of day for the nanoseconds since epoch in the supplied timezone (or UTC)"), ), } -// Weekday returns the day of the week (Monday, Tuesday, ...) for the nanoseconds since epoch. var Weekday = &Builtin{ - Name: "time.weekday", + Name: "time.weekday", + Description: "Returns the day of the week (Monday, Tuesday, ...) for the nanoseconds since epoch.", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("x", types.NewAny( types.N, types.NewArray([]types.Type{types.N, types.S}, nil), - ), + )).Description("a number representing the nanoseconds since the epoch (UTC); or a two-element array of the nanoseconds, and a timezone string"), ), - types.S, + types.Named("day", types.S).Description("the weekday represented by `ns` nanoseconds since the epoch in the supplied timezone (or UTC)"), ), } -// AddDate returns the nanoseconds since epoch after adding years, months and days to nanoseconds. var AddDate = &Builtin{ - Name: "time.add_date", + Name: "time.add_date", + Description: "Returns the nanoseconds since epoch after adding years, months and days to nanoseconds. `undefined` if the result would be outside the valid time range that can fit within an `int64`.", Decl: types.NewFunction( types.Args( - types.N, - types.N, - types.N, - types.N, + types.Named("ns", types.N).Description("nanoseconds since the epoch"), + types.Named("years", types.N), + types.Named("months", types.N), + types.Named("days", types.N), ), - types.N, + types.Named("output", types.N).Description("nanoseconds since the epoch representing the input time, with years, months and days added"), ), } -// Diff returns the difference [years, months, days, hours, minutes, seconds] between two unix timestamps in nanoseconds var Diff = &Builtin{ - Name: "time.diff", + Name: "time.diff", + Description: "Returns the difference between two unix timestamps in nanoseconds (with optional timezone strings).", Decl: types.NewFunction( types.Args( - types.NewAny( + types.Named("ns1", types.NewAny( types.N, types.NewArray([]types.Type{types.N, types.S}, nil), - ), - types.NewAny( + )), + types.Named("ns2", types.NewAny( types.N, types.NewArray([]types.Type{types.N, types.S}, nil), - ), + )), ), - types.NewArray([]types.Type{types.N, types.N, types.N, types.N, types.N, types.N}, nil), + types.Named("output", types.NewArray([]types.Type{types.N, types.N, types.N, types.N, types.N, types.N}, nil)).Description("difference between `ns1` and `ns2` (in their supplied timezones, if supplied, or UTC) as array of numbers: `[years, months, days, hours, minutes, seconds]`"), ), } @@ -1827,161 +2094,169 @@ var Diff = &Builtin{ * Crypto. */ -// CryptoX509ParseCertificates returns one or more certificates from the given -// base64 encoded string containing DER encoded certificates that have been -// concatenated. var CryptoX509ParseCertificates = &Builtin{ - Name: "crypto.x509.parse_certificates", + Name: "crypto.x509.parse_certificates", + Description: "Returns one or more certificates from the given base64 encoded string containing DER encoded certificates that have been concatenated.", Decl: types.NewFunction( - types.Args(types.S), - types.NewArray(nil, types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))), + types.Args( + types.Named("certs", types.S).Description("base64 encoded DER or PEM data containing one or more certificates or a PEM string of one or more certificates"), + ), + types.Named("output", types.NewArray(nil, types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)))).Description("parsed X.509 certificates represented as objects"), ), } -// CryptoX509ParseAndVerifyCertificates returns one or more certificates from the given -// string containing PEM or base64 encoded DER certificates after verifying the supplied -// certificates form a complete certificate chain back to a trusted root. -// -// The first certificate is treated as the root and the last is treated as the leaf, -// with all others being treated as intermediates var CryptoX509ParseAndVerifyCertificates = &Builtin{ Name: "crypto.x509.parse_and_verify_certificates", + Description: `Returns one or more certificates from the given string containing PEM +or base64 encoded DER certificates after verifying the supplied certificates form a complete +certificate chain back to a trusted root. + +The first certificate is treated as the root and the last is treated as the leaf, +with all others being treated as intermediates.`, Decl: types.NewFunction( - types.Args(types.S), - types.NewArray([]types.Type{ + types.Args( + types.Named("certs", types.S).Description("base64 encoded DER or PEM data containing two or more certificates where the first is a root CA, the last is a leaf certificate, and all others are intermediate CAs"), + ), + types.Named("output", types.NewArray([]types.Type{ types.B, types.NewArray(nil, types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))), - }, nil), + }, nil)).Description("array of `[valid, certs]`: if the input certificate chain could be verified then `valid` is `true` and `certs` is an array of X.509 certificates represented as objects; if the input certificate chain could not be verified then `valid` is `false` and `certs` is `[]`"), ), } -// CryptoX509ParseCertificateRequest returns a PKCS #10 certificate signing -// request from the given PEM-encoded PKCS#10 certificate signing request. var CryptoX509ParseCertificateRequest = &Builtin{ - Name: "crypto.x509.parse_certificate_request", + Name: "crypto.x509.parse_certificate_request", + Description: "Returns a PKCS #10 certificate signing request from the given PEM-encoded PKCS#10 certificate signing request.", Decl: types.NewFunction( - types.Args(types.S), - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Args( + types.Named("csr", types.S).Description("base64 string containing either a PEM encoded or DER CSR or a string containing a PEM CSR"), + ), + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("X.509 CSR represented as an object"), ), } -// CryptoX509ParseRSAPrivateKey returns a JWK for signing a JWT from the given -// PEM-encoded RSA private key. var CryptoX509ParseRSAPrivateKey = &Builtin{ - Name: "crypto.x509.parse_rsa_private_key", + Name: "crypto.x509.parse_rsa_private_key", + Description: "Returns a JWK for signing a JWT from the given PEM-encoded RSA private key.", Decl: types.NewFunction( - types.Args(types.S), - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Args( + types.Named("pem", types.S).Description("base64 string containing a PEM encoded RSA private key"), + ), + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))).Description("JWK as an object"), ), } -// CryptoMd5 returns a string representing the input string hashed with the md5 function var CryptoMd5 = &Builtin{ - Name: "crypto.md5", + Name: "crypto.md5", + Description: "Returns a string representing the input string hashed with the MD5 function", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("MD5-hash of `x`"), ), } -// CryptoSha1 returns a string representing the input string hashed with the sha1 function var CryptoSha1 = &Builtin{ - Name: "crypto.sha1", + Name: "crypto.sha1", + Description: "Returns a string representing the input string hashed with the SHA1 function", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("SHA1-hash of `x`"), ), } -// CryptoSha256 returns a string representing the input string hashed with the sha256 function var CryptoSha256 = &Builtin{ - Name: "crypto.sha256", + Name: "crypto.sha256", + Description: "Returns a string representing the input string hashed with the SHA256 function", Decl: types.NewFunction( - types.Args(types.S), - types.S, + types.Args( + types.Named("x", types.S), + ), + types.Named("y", types.S).Description("SHA256-hash of `x`"), ), } -// CryptoHmacMd5 returns a string representing the MD-5 HMAC of the input message using the input key -// Inputs are message, key var CryptoHmacMd5 = &Builtin{ - Name: "crypto.hmac.md5", + Name: "crypto.hmac.md5", + Description: "Returns a string representing the MD5 HMAC of the input message using the input key.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("x", types.S).Description("input string"), + types.Named("key", types.S).Description("key to use"), ), - types.S, + types.Named("y", types.S).Description("MD5-HMAC of `x`"), ), } -// CryptoHmacSha1 returns a string representing the SHA-1 HMAC of the input message using the input key -// Inputs are message, key var CryptoHmacSha1 = &Builtin{ - Name: "crypto.hmac.sha1", + Name: "crypto.hmac.sha1", + Description: "Returns a string representing the SHA1 HMAC of the input message using the input key.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("x", types.S).Description("input string"), + types.Named("key", types.S).Description("key to use"), ), - types.S, + types.Named("y", types.S).Description("SHA1-HMAC of `x`"), ), } -// CryptoHmacSha256 returns a string representing the SHA-256 HMAC of the input message using the input key -// Inputs are message, key var CryptoHmacSha256 = &Builtin{ - Name: "crypto.hmac.sha256", + Name: "crypto.hmac.sha256", + Description: "Returns a string representing the SHA256 HMAC of the input message using the input key.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("x", types.S).Description("input string"), + types.Named("key", types.S).Description("key to use"), ), - types.S, + types.Named("y", types.S).Description("SHA256-HMAC of `x`"), ), } -// CryptoHmacSha512 returns a string representing the SHA-512 HMAC of the input message using the input key -// Inputs are message, key var CryptoHmacSha512 = &Builtin{ - Name: "crypto.hmac.sha512", + Name: "crypto.hmac.sha512", + Description: "Returns a string representing the SHA512 HMAC of the input message using the input key.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("x", types.S).Description("input string"), + types.Named("key", types.S).Description("key to use"), ), - types.S, + types.Named("y", types.S).Description("SHA512-HMAC of `x`"), ), } /** * Graphs. */ +var graphs = category("graph") -// WalkBuiltin generates [path, value] tuples for all nested documents -// (recursively). var WalkBuiltin = &Builtin{ - Name: "walk", - Relation: true, + Name: "walk", + Relation: true, + Description: "Generates `[path, value]` tuples for all nested documents of `x` (recursively). Queries can use `walk` to traverse documents nested under `x`.", Decl: types.NewFunction( - types.Args(types.A), - types.NewArray( + types.Args( + types.Named("x", types.A), + ), + types.Named("output", types.NewArray( []types.Type{ types.NewArray(nil, types.A), types.A, }, nil, - ), + )).Description("pairs of `path` and `value`: `path` is an array representing the pointer to `value` in `x`"), ), + Categories: graphs, } -// ReachableBuiltin computes the set of reachable nodes in the graph from a set -// of starting nodes. var ReachableBuiltin = &Builtin{ - Name: "graph.reachable", + Name: "graph.reachable", + Description: "Computes the set of reachable nodes in the graph from a set of starting nodes.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("graph", types.NewObject( nil, types.NewDynamicProperty( types.A, @@ -1989,19 +2264,19 @@ var ReachableBuiltin = &Builtin{ types.NewSet(types.A), types.NewArray(nil, types.A)), )), - types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A)), + ).Description("object containing a set or array of neighboring vertices"), + types.Named("initial", types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A))).Description("set or array of root vertices"), ), - types.NewSet(types.A), + types.Named("output", types.NewSet(types.A)).Description("set of vertices reachable from the `initial` vertices in the directed `graph`"), ), } -// ReachablePathsBuiltin computes the set of reachable paths in the graph from a set -// of starting nodes. var ReachablePathsBuiltin = &Builtin{ - Name: "graph.reachable_paths", + Name: "graph.reachable_paths", + Description: "Computes the set of reachable paths in the graph from a set of starting nodes.", Decl: types.NewFunction( types.Args( - types.NewObject( + types.Named("graph", types.NewObject( nil, types.NewDynamicProperty( types.A, @@ -2009,109 +2284,100 @@ var ReachablePathsBuiltin = &Builtin{ types.NewSet(types.A), types.NewArray(nil, types.A)), )), - types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A)), - ), - types.NewSet(types.NewArray(nil, types.A)), - ), -} - -/** - * Sorting - */ - -// Sort returns a sorted array. -var Sort = &Builtin{ - Name: "sort", - Decl: types.NewFunction( - types.Args( - types.NewAny( - types.NewArray(nil, types.A), - types.NewSet(types.A), - ), + ).Description("object containing a set or array of root vertices"), + types.Named("initial", types.NewAny(types.NewSet(types.A), types.NewArray(nil, types.A))).Description("initial paths"), // TODO(sr): copied. is that correct? ), - types.NewArray(nil, types.A), + types.Named("output", types.NewSet(types.NewArray(nil, types.A))).Description("paths reachable from the `initial` vertices in the directed `graph`"), ), } /** * Type */ +var typesCat = category("types") -// IsNumber returns true if the input value is a number var IsNumber = &Builtin{ - Name: "is_number", + Name: "is_number", + Description: "Returns `true` if the input value is a number.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is a number, `false` otherwise."), ), + Categories: typesCat, } -// IsString returns true if the input value is a string. var IsString = &Builtin{ - Name: "is_string", + Name: "is_string", + Description: "Returns `true` if the input value is a string.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is a string, `false` otherwise."), ), + Categories: typesCat, } -// IsBoolean returns true if the input value is a boolean. var IsBoolean = &Builtin{ - Name: "is_boolean", + Name: "is_boolean", + Description: "Returns `true` if the input value is a boolean.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is an boolean, `false` otherwise."), ), + Categories: typesCat, } -// IsArray returns true if the input value is an array. var IsArray = &Builtin{ - Name: "is_array", + Name: "is_array", + Description: "Returns `true` if the input value is an array.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is an array, `false` otherwise."), ), + Categories: typesCat, } -// IsSet returns true if the input value is a set. var IsSet = &Builtin{ - Name: "is_set", + Name: "is_set", + Description: "Returns `true` if the input value is a set.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is a set, `false` otherwise."), ), + Categories: typesCat, } -// IsObject returns true if the input value is an object. var IsObject = &Builtin{ - Name: "is_object", + Name: "is_object", + Description: "Returns true if the input value is an object", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is an object, `false` otherwise."), ), + Categories: typesCat, } -// IsNull returns true if the input value is null. var IsNull = &Builtin{ - Name: "is_null", + Name: "is_null", + Description: "Returns `true` if the input value is null.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("x", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `x` is null, `false` otherwise."), ), + Categories: typesCat, } /** @@ -2135,14 +2401,91 @@ var TypeNameBuiltin = &Builtin{ * HTTP Request */ -// HTTPSend returns a HTTP response to the given HTTP request. var HTTPSend = &Builtin{ - Name: "http.send", + Name: "http.send", + Description: "Returns a HTTP response to the given HTTP request.", Decl: types.NewFunction( types.Args( - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Named("request", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))), ), - types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), + types.Named("response", types.NewObject(nil, types.NewDynamicProperty(types.A, types.A))), + ), +} + +/** + * GraphQL + */ + +// GraphQLParse returns a pair of AST objects from parsing/validation. +var GraphQLParse = &Builtin{ + Name: "graphql.parse", + Description: "Returns AST objects for a given GraphQL query and schema after validating the query against the schema. Returns undefined if errors were encountered during parsing or validation.", + Decl: types.NewFunction( + types.Args( + types.Named("query", types.S), + types.Named("schema", types.S), + ), + types.Named("output", types.NewArray([]types.Type{ + types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), + types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), + }, nil)).Description("`output` is of the form `[query_ast, schema_ast]`. If the GraphQL query is valid given the provided schema, then `query_ast` and `schema_ast` are objects describing the ASTs for the query and schema."), + ), +} + +// GraphQLParseAndVerify returns a boolean and a pair of AST object from parsing/validation. +var GraphQLParseAndVerify = &Builtin{ + Name: "graphql.parse_and_verify", + Description: "Returns a boolean indicating success or failure alongside the parsed ASTs for a given GraphQL query and schema after validating the query against the schema.", + Decl: types.NewFunction( + types.Args( + types.Named("query", types.S), + types.Named("schema", types.S), + ), + types.Named("output", types.NewArray([]types.Type{ + types.B, + types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), + types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), + }, nil)).Description(" `output` is of the form `[valid, query_ast, schema_ast]`. If the query is valid given the provided schema, then `valid` is `true`, and `query_ast` and `schema_ast` are objects describing the ASTs for the GraphQL query and schema. Otherwise, `valid` is `false` and `query_ast` and `schema_ast` are `{}`."), + ), +} + +// GraphQLParseQuery parses the input GraphQL query and returns a JSON +// representation of its AST. +var GraphQLParseQuery = &Builtin{ + Name: "graphql.parse_query", + Description: "Returns an AST object for a GraphQL query.", + Decl: types.NewFunction( + types.Args( + types.Named("query", types.S), + ), + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.A, types.A))).Description("AST object for the GraphQL query."), + ), +} + +// GraphQLParseSchema parses the input GraphQL schema and returns a JSON +// representation of its AST. +var GraphQLParseSchema = &Builtin{ + Name: "graphql.parse_schema", + Description: "Returns an AST object for a GraphQL schema.", + Decl: types.NewFunction( + types.Args( + types.Named("schema", types.S), + ), + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.A, types.A))).Description("AST object for the GraphQL schema."), + ), +} + +// GraphQLIsValid returns true if a GraphQL query is valid with a given +// schema, and returns false for all other inputs. +var GraphQLIsValid = &Builtin{ + Name: "graphql.is_valid", + Description: "Checks that a GraphQL query is valid against a given schema.", + Decl: types.NewFunction( + types.Args( + types.Named("query", types.S), + types.Named("schema", types.S), + ), + types.Named("output", types.B).Description("`true` if the query is valid under the given schema. `false` otherwise."), ), } @@ -2150,34 +2493,37 @@ var HTTPSend = &Builtin{ * Rego */ -// RegoParseModule parses the input Rego file and returns a JSON representation -// of the AST. var RegoParseModule = &Builtin{ - Name: "rego.parse_module", + Name: "rego.parse_module", + Description: "Parses the input Rego string and returns an object representation of the AST.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("filename", types.S).Description("file name to attach to AST nodes' locations"), + types.Named("rego", types.S).Description("Rego module"), ), - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), // TODO(tsandall): import AST schema + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))), // TODO(tsandall): import AST schema ), } -// RegoMetadataChain returns the chain of metadata for the active rule var RegoMetadataChain = &Builtin{ Name: "rego.metadata.chain", + Description: `Returns the chain of metadata for the active rule. +Ordered starting at the active rule, going outward to the most distant node in its package ancestry. +A chain entry is a JSON document with two members: "path", an array representing the path of the node; and "annotations", a JSON document containing the annotations declared for the node. +The first entry in the chain always points to the active rule, even if it has no declared annotations (in which case the "annotations" member is not present).`, Decl: types.NewFunction( types.Args(), - types.NewArray(nil, types.A), + types.Named("chain", types.NewArray(nil, types.A)).Description("each array entry represents a node in the path ancestry (chain) of the active rule that also has declared annotations"), ), } // RegoMetadataRule returns the metadata for the active rule var RegoMetadataRule = &Builtin{ - Name: "rego.metadata.rule", + Name: "rego.metadata.rule", + Description: "Returns annotations declared for the active rule and using the _rule_ scope.", Decl: types.NewFunction( types.Args(), - types.A, + types.Named("output", types.A).Description("\"rule\" scope annotations for this rule; empty object if no annotations exist"), ), } @@ -2185,147 +2531,131 @@ var RegoMetadataRule = &Builtin{ * OPA */ -// OPARuntime returns an object containing OPA runtime information such as the -// configuration that OPA was booted with. var OPARuntime = &Builtin{ - Name: "opa.runtime", + Name: "opa.runtime", + Description: "Returns an object that describes the runtime environment where OPA is deployed.", Decl: types.NewFunction( nil, - types.NewObject(nil, types.NewDynamicProperty(types.S, types.A)), + types.Named("output", types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))). + Description("includes a `config` key if OPA was started with a configuration file; an `env` key containing the environment variables that the OPA process was started with; includes `version` and `commit` keys containing the version and build commit of OPA."), ), } /** * Trace */ +var tracing = category("tracing") -// Trace prints a note that is included in the query explanation. var Trace = &Builtin{ - Name: "trace", - Decl: types.NewFunction( - types.Args( - types.S, - ), - types.B, - ), -} - -/** - * Set - */ - -// Intersection returns the intersection of the given input sets -var Intersection = &Builtin{ - Name: "intersection", - Decl: types.NewFunction( - types.Args( - types.NewSet(types.NewSet(types.A)), - ), - types.NewSet(types.A), - ), -} - -// Union returns the union of the given input sets -var Union = &Builtin{ - Name: "union", + Name: "trace", + Description: "Emits `note` as a `Note` event in the query explanation. Query explanations show the exact expressions evaluated by OPA during policy execution. For example, `trace(\"Hello There!\")` includes `Note \"Hello There!\"` in the query explanation. To include variables in the message, use `sprintf`. For example, `person := \"Bob\"; trace(sprintf(\"Hello There! %v\", [person]))` will emit `Note \"Hello There! Bob\"` inside of the explanation.", Decl: types.NewFunction( types.Args( - types.NewSet(types.NewSet(types.A)), + types.Named("note", types.S).Description("the note to include"), ), - types.NewSet(types.A), + types.Named("result", types.B).Description("always `true`"), ), + Categories: tracing, } /** * Glob */ -// GlobMatch - not to be confused with regex.globs_match - parses and matches strings against the glob notation. var GlobMatch = &Builtin{ - Name: "glob.match", + Name: "glob.match", + Description: "Parses and matches strings against the glob notation. Not to be confused with `regex.globs_match`.", Decl: types.NewFunction( types.Args( - types.S, - types.NewArray(nil, types.S), - types.S, + types.Named("pattern", types.S), + types.Named("delimiters", types.NewArray(nil, types.S)).Description("glob pattern delimiters, e.g. `[\".\", \":\"]`, defaults to `[\".\"]` if unset."), + types.Named("match", types.S), ), - types.B, + types.Named("result", types.B).Description("true if `match` can be found in `pattern` which is separated by `delimiters`"), ), } -// GlobQuoteMeta returns a string which represents a version of the pattern where all asterisks have been escaped. +// GlobQuoteMeta var GlobQuoteMeta = &Builtin{ - Name: "glob.quote_meta", + Name: "glob.quote_meta", + Description: "Returns a string which represents a version of the pattern where all asterisks have been escaped.", Decl: types.NewFunction( types.Args( - types.S, + types.Named("pattern", types.S), ), - types.S, + types.Named("output", types.S).Description("the escaped string of `pattern`"), ), + // TODO(sr): example for this was: Calling ``glob.quote_meta("*.github.com", output)`` returns ``\\*.github.com`` as ``output``. } /** * Networking */ -// NetCIDRIntersects checks if a cidr intersects with another cidr and returns true or false var NetCIDRIntersects = &Builtin{ - Name: "net.cidr_intersects", + Name: "net.cidr_intersects", + Description: "Checks if a CIDR intersects with another CIDR (e.g. `192.168.0.0/16` overlaps with `192.168.1.0/24`). Supports both IPv4 and IPv6 notations.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("cidr1", types.S), + types.Named("cidr2", types.S), ), - types.B, + types.Named("result", types.B), ), } -// NetCIDRExpand returns a set of hosts inside the specified cidr. var NetCIDRExpand = &Builtin{ - Name: "net.cidr_expand", + Name: "net.cidr_expand", + Description: "Expands CIDR to set of hosts (e.g., `net.cidr_expand(\"192.168.0.0/30\")` generates 4 hosts: `{\"192.168.0.0\", \"192.168.0.1\", \"192.168.0.2\", \"192.168.0.3\"}`).", Decl: types.NewFunction( types.Args( - types.S, + types.Named("cidr", types.S), ), - types.NewSet(types.S), + types.Named("hosts", types.NewSet(types.S)).Description("set of IP addresses the CIDR `cidr` expands to"), ), } -// NetCIDRContains checks if a cidr or ip is contained within another cidr and returns true or false var NetCIDRContains = &Builtin{ - Name: "net.cidr_contains", + Name: "net.cidr_contains", + Description: "Checks if a CIDR or IP is contained within another CIDR. `output` is `true` if `cidr_or_ip` (e.g. `127.0.0.64/26` or `127.0.0.1`) is contained within `cidr` (e.g. `127.0.0.1/24`) and `false` otherwise. Supports both IPv4 and IPv6 notations.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("cidr", types.S), + types.Named("cidr_or_ip", types.S), ), - types.B, + types.Named("result", types.B), ), } -// NetCIDRContainsMatches checks if collections of cidrs or ips are contained within another collection of cidrs and returns matches. var NetCIDRContainsMatches = &Builtin{ Name: "net.cidr_contains_matches", + Description: "Checks if collections of cidrs or ips are contained within another collection of cidrs and returns matches. " + + "This function is similar to `net.cidr_contains` except it allows callers to pass collections of CIDRs or IPs as arguments and returns the matches (as opposed to a boolean result indicating a match between two CIDRs/IPs).", Decl: types.NewFunction( - types.Args(netCidrContainsMatchesOperandType, netCidrContainsMatchesOperandType), - types.NewSet(types.NewArray([]types.Type{types.A, types.A}, nil)), + types.Args( + types.Named("cidrs", netCidrContainsMatchesOperandType), + types.Named("cidrs_or_ips", netCidrContainsMatchesOperandType), + ), + types.Named("output", types.NewSet(types.NewArray([]types.Type{types.A, types.A}, nil))).Description("tuples identifying matches where `cidrs_or_ips` are contained within `cidrs`"), ), } -// NetCIDRMerge merges IP addresses and subnets into the smallest possible list of CIDRs. var NetCIDRMerge = &Builtin{ Name: "net.cidr_merge", + Description: "Merges IP addresses and subnets into the smallest possible list of CIDRs (e.g., `net.cidr_merge([\"192.0.128.0/24\", \"192.0.129.0/24\"])` generates `{\"192.0.128.0/23\"}`." + + `This function merges adjacent subnets where possible, those contained within others and also removes any duplicates. +Supports both IPv4 and IPv6 notations. IPv6 inputs need a prefix length (e.g. "/128").`, Decl: types.NewFunction( - types.Args(netCidrMergeOperandType), - types.NewSet(types.S), + types.Args( + types.Named("addrs", types.NewAny( + types.NewArray(nil, types.NewAny(types.S)), + types.NewSet(types.S), + )).Description("CIDRs or IP addresses"), + ), + types.Named("output", types.NewSet(types.S)).Description("smallest possible set of CIDRs obtained after merging the provided list of IP addresses and subnets in `addrs`"), ), } -var netCidrMergeOperandType = types.NewAny( - types.NewArray(nil, types.NewAny(types.S)), - types.NewSet(types.S), -) - var netCidrContainsMatchesOperandType = types.NewAny( types.S, types.NewArray(nil, types.NewAny( @@ -2345,14 +2675,14 @@ var netCidrContainsMatchesOperandType = types.NewAny( )), ) -// NetLookupIPAddr returns the set of IP addresses (as strings, both v4 and v6) -// that the passed-in name (string) resolves to using the standard name resolution -// mechanisms available. var NetLookupIPAddr = &Builtin{ - Name: "net.lookup_ip_addr", + Name: "net.lookup_ip_addr", + Description: "Returns the set of IP addresses (both v4 and v6) that the passed-in `name` resolves to using the standard name resolution mechanisms available.", Decl: types.NewFunction( - types.Args(types.S), - types.NewSet(types.S), + types.Args( + types.Named("name", types.S).Description("domain name to resolve"), + ), + types.Named("addrs", types.NewSet(types.S)).Description("IP addresses (v4 and v6) that `name` resolves to"), ), } @@ -2360,29 +2690,26 @@ var NetLookupIPAddr = &Builtin{ * Semantic Versions */ -// SemVerIsValid validiates a the term is a valid SemVer as a string, returns -// false for all other input var SemVerIsValid = &Builtin{ - Name: "semver.is_valid", + Name: "semver.is_valid", + Description: "Validates that the input is a valid SemVer string.", Decl: types.NewFunction( types.Args( - types.A, + types.Named("vsn", types.A), ), - types.B, + types.Named("result", types.B).Description("`true` if `vsn` is a valid SemVer; `false` otherwise"), ), } -// SemVerCompare compares valid SemVer formatted version strings. Given two -// version strings, if A < B returns -1, if A > B returns 1. If A == B, returns -// 0 var SemVerCompare = &Builtin{ - Name: "semver.compare", + Name: "semver.compare", + Description: "Compares valid SemVer formatted version strings.", Decl: types.NewFunction( types.Args( - types.S, - types.S, + types.Named("a", types.S), + types.Named("b", types.S), ), - types.N, + types.Named("result", types.N).Description("`-1` if `a < b`; `1` if `b > a`; `0` if `a == b`"), ), } @@ -2541,13 +2868,25 @@ var Any = &Builtin{ // Builtin represents a built-in function supported by OPA. Every built-in // function is uniquely identified by a name. type Builtin struct { - Name string `json:"name"` // Unique name of built-in function, e.g., (arg1,arg2,...,argN) + Name string `json:"name"` // Unique name of built-in function, e.g., (arg1,arg2,...,argN) + Description string `json:"description,omitempty"` // Description of what the built-in function does. + + // Categories of the built-in function. Omitted for namespaced + // built-ins, i.e. "array.concat" is taken to be of the "array" category. + // "minus" for example, is part of two categories: numbers and sets. (NOTE(sr): aspirational) + Categories []string `json:"categories,omitempty"` + Decl *types.Function `json:"decl"` // Built-in function type declaration. Infix string `json:"infix,omitempty"` // Unique name of infix operator. Default should be unset. Relation bool `json:"relation,omitempty"` // Indicates if the built-in acts as a relation. deprecated bool // Indicates if the built-in has been deprecated. } +// category is a helper for specifying a Builtin's Categories +func category(cs ...string) []string { + return cs +} + // IsDeprecated returns true if the Builtin function is deprecated and will be removed in a future release. func (b *Builtin) IsDeprecated() bool { return b.deprecated diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/capabilities.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/capabilities.go index 02fc3c277..40a949d30 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/capabilities.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/capabilities.go @@ -49,7 +49,8 @@ func CapabilitiesForThisVersion() *Capabilities { f.WasmABIVersions = append(f.WasmABIVersions, WasmABIVersion{Version: vers[0], Minor: vers[1]}) } - f.Builtins = append(f.Builtins, Builtins...) + f.Builtins = make([]*Builtin, len(Builtins)) + copy(f.Builtins, Builtins) sort.Slice(f.Builtins, func(i, j int) bool { return f.Builtins[i].Name < f.Builtins[j].Name }) diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/check.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/check.go index 43f5ad2c7..fd35d017a 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/check.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/check.go @@ -314,17 +314,19 @@ func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error { } fargs := ftpe.FuncArgs() + namedFargs := ftpe.NamedFuncArgs() if ftpe.Result() != nil { fargs.Args = append(fargs.Args, ftpe.Result()) + namedFargs.Args = append(namedFargs.Args, ftpe.NamedResult()) } if len(args) > len(fargs.Args) && fargs.Variadic == nil { - return newArgError(expr.Location, name, "too many arguments", pre, fargs) + return newArgError(expr.Location, name, "too many arguments", pre, namedFargs) } if len(args) < len(ftpe.FuncArgs().Args) { - return newArgError(expr.Location, name, "too few arguments", pre, fargs) + return newArgError(expr.Location, name, "too few arguments", pre, namedFargs) } for i := range args { @@ -333,7 +335,7 @@ func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error { for i := range args { post[i] = env.Get(args[i]) } - return newArgError(expr.Location, name, "invalid argument(s)", post, fargs) + return newArgError(expr.Location, name, "invalid argument(s)", post, namedFargs) } } @@ -380,7 +382,7 @@ func (tc *typeChecker) checkExprWith(env *TypeEnv, expr *Expr, i int) *Error { switch v := valueType.(type) { case *types.Function: // ...by function if !unifies(targetType, valueType) { - return newArgError(expr.With[i].Loc(), target.Value.(Ref), "arity mismatch", v.Args(), t.FuncArgs()) + return newArgError(expr.With[i].Loc(), target.Value.(Ref), "arity mismatch", v.Args(), t.NamedFuncArgs()) } default: // ... by value, nothing to check } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/compile.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/compile.go index df4695331..42b1736d0 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/compile.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/compile.go @@ -901,7 +901,7 @@ func arityMismatchError(env *TypeEnv, f Ref, expr *Expr, exp, act int) *Error { for i, op := range expr.Operands() { have[i] = env.Get(op) } - return newArgError(expr.Loc(), f, "arity mismatch", have, want.FuncArgs()) + return newArgError(expr.Loc(), f, "arity mismatch", have, want.NamedFuncArgs()) } if act != 1 { return NewError(TypeErr, expr.Loc(), "function %v has arity %d, got %d arguments", f, exp, act) diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/location/location.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/location/location.go index 13ae6e35d..5bdce013c 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/location/location.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/location/location.go @@ -39,7 +39,7 @@ func (loc *Location) Errorf(f string, a ...interface{}) error { // Wrapf returns a new error value that wraps an existing error with a message formatted // to include the location info (e.g., line, column, filename, etc.) func (loc *Location) Wrapf(err error, f string, a ...interface{}) error { - return errors.Wrap(err, loc.Format(f, a...)) + return fmt.Errorf(loc.Format(f, a...)+": %w", err) } // Format returns a formatted string prefixed with the location information. diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/parser.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/parser.go index 1084341a2..f38f62fa8 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/parser.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/parser.go @@ -16,7 +16,6 @@ import ( "strconv" "strings" - "github.com/pkg/errors" "gopkg.in/yaml.v2" "github.com/open-policy-agent/opa/ast/internal/scanner" @@ -2088,7 +2087,7 @@ func (b *metadataParser) Parse() (*Annotations, error) { case map[interface{}]interface{}: w, err := convertYAMLMapKeyTypes(v, nil) if err != nil { - return nil, errors.Wrap(err, "invalid schema definition") + return nil, fmt.Errorf("invalid schema definition: %w", err) } a.Definition = &w default: diff --git a/constraint/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go b/constraint/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go index da2b75158..77ff09b5a 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/ast/parser_ext.go @@ -12,11 +12,10 @@ package ast import ( "bytes" + "errors" "fmt" "strings" "unicode" - - "github.com/pkg/errors" ) // MustParseBody returns a parsed body. @@ -464,7 +463,7 @@ func ParseBodyWithOpts(input string, popts ParserOptions) (Body, error) { func ParseExpr(input string) (*Expr, error) { body, err := ParseBody(input) if err != nil { - return nil, errors.Wrap(err, "failed to parse expression") + return nil, fmt.Errorf("failed to parse expression: %w", err) } if len(body) != 1 { return nil, fmt.Errorf("expected exactly one expression but got: %v", body) @@ -491,7 +490,7 @@ func ParsePackage(input string) (*Package, error) { func ParseTerm(input string) (*Term, error) { body, err := ParseBody(input) if err != nil { - return nil, errors.Wrap(err, "failed to parse term") + return nil, fmt.Errorf("failed to parse term: %w", err) } if len(body) != 1 { return nil, fmt.Errorf("expected exactly one term but got: %v", body) @@ -507,7 +506,7 @@ func ParseTerm(input string) (*Term, error) { func ParseRef(input string) (Ref, error) { term, err := ParseTerm(input) if err != nil { - return nil, errors.Wrap(err, "failed to parse ref") + return nil, fmt.Errorf("failed to parse ref: %w", err) } ref, ok := term.Value.(Ref) if !ok { diff --git a/constraint/vendor/github.com/open-policy-agent/opa/bundle/bundle.go b/constraint/vendor/github.com/open-policy-agent/opa/bundle/bundle.go index 41419ee35..cfa342df7 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/bundle/bundle.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/bundle/bundle.go @@ -11,6 +11,7 @@ import ( "compress/gzip" "encoding/hex" "encoding/json" + "errors" "fmt" "io" "net/url" @@ -18,8 +19,6 @@ import ( "reflect" "strings" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/format" "github.com/open-policy-agent/opa/internal/file/archive" @@ -499,7 +498,7 @@ func (r *Reader) Read() (Bundle, error) { r.metrics.Timer(metrics.RegoDataParse).Stop() if err != nil { - return bundle, errors.Wrapf(err, "bundle load failed on %v", r.fullPath(path)) + return bundle, fmt.Errorf("bundle load failed on %v: %w", r.fullPath(path), err) } if err := insertValue(&bundle, path, value); err != nil { @@ -515,7 +514,7 @@ func (r *Reader) Read() (Bundle, error) { r.metrics.Timer(metrics.RegoDataParse).Stop() if err != nil { - return bundle, errors.Wrapf(err, "bundle load failed on %v", r.fullPath(path)) + return bundle, fmt.Errorf("bundle load failed on %v: %w", r.fullPath(path), err) } if err := insertValue(&bundle, path, value); err != nil { @@ -524,7 +523,7 @@ func (r *Reader) Read() (Bundle, error) { } else if strings.HasSuffix(path, ManifestExt) { if err := util.NewJSONDecoder(&buf).Decode(&bundle.Manifest); err != nil { - return bundle, errors.Wrap(err, "bundle load failed on manifest decode") + return bundle, fmt.Errorf("bundle load failed on manifest decode: %w", err) } } } @@ -577,18 +576,18 @@ func (r *Reader) Read() (Bundle, error) { b, err := json.Marshal(&bundle.Manifest) if err != nil { - return bundle, errors.Wrap(err, "bundle load failed on manifest marshal") + return bundle, fmt.Errorf("bundle load failed on manifest marshal: %w", err) } err = util.UnmarshalJSON(b, &metadata) if err != nil { - return bundle, errors.Wrap(err, "bundle load failed on manifest unmarshal") + return bundle, fmt.Errorf("bundle load failed on manifest unmarshal: %w", err) } // For backwards compatibility always write to the old unnamed manifest path // This will *not* be correct if >1 bundle is in use... if err := bundle.insertData(legacyManifestStoragePath, metadata); err != nil { - return bundle, errors.Wrapf(err, "bundle load failed on %v", legacyRevisionStoragePath) + return bundle, fmt.Errorf("bundle load failed on %v: %w", legacyRevisionStoragePath, err) } } @@ -852,25 +851,29 @@ func hashBundleFiles(hash SignatureHasher, b *Bundle) ([]FileInfo, error) { files = append(files, NewFile(strings.TrimPrefix(planmodule.Path, "/"), hex.EncodeToString(bs), defaultHashingAlg)) } - // Parse the manifest into a JSON structure; + // If the manifest is essentially empty, don't add it to the signatures since it + // won't be written to the bundle. Otherwise: + // parse the manifest into a JSON structure; // then recursively order the fields of all objects alphabetically and then apply // the hash function to result to compute the hash. - mbs, err := json.Marshal(b.Manifest) - if err != nil { - return files, err - } + if !b.Manifest.Equal(Manifest{}) { + mbs, err := json.Marshal(b.Manifest) + if err != nil { + return files, err + } - var result map[string]interface{} - if err := util.Unmarshal(mbs, &result); err != nil { - return files, err - } + var result map[string]interface{} + if err := util.Unmarshal(mbs, &result); err != nil { + return files, err + } - bs, err = hash.HashFile(result) - if err != nil { - return files, err - } + bs, err = hash.HashFile(result) + if err != nil { + return files, err + } - files = append(files, NewFile(strings.TrimPrefix(ManifestExt, "/"), hex.EncodeToString(bs), defaultHashingAlg)) + files = append(files, NewFile(strings.TrimPrefix(ManifestExt, "/"), hex.EncodeToString(bs), defaultHashingAlg)) + } return files, err } @@ -1209,7 +1212,7 @@ func insertValue(b *Bundle, path string, value interface{}) error { key = strings.Split(dirpath, "/") } if err := b.insertData(key, value); err != nil { - return errors.Wrapf(err, "bundle load failed on %v", path) + return fmt.Errorf("bundle load failed on %v: %w", path, err) } return nil } @@ -1264,7 +1267,7 @@ func preProcessBundle(loader DirectoryLoader, skipVerify bool, sizeLimitBytes in } if err != nil { - return signatures, patch, nil, errors.Wrap(err, "bundle read failed") + return signatures, patch, nil, fmt.Errorf("bundle read failed: %w", err) } // check for the signatures file @@ -1275,7 +1278,7 @@ func preProcessBundle(loader DirectoryLoader, skipVerify bool, sizeLimitBytes in } if err := util.NewJSONDecoder(&buf).Decode(&signatures); err != nil { - return signatures, patch, nil, errors.Wrap(err, "bundle load failed on signatures decode") + return signatures, patch, nil, fmt.Errorf("bundle load failed on signatures decode: %w", err) } } else if !strings.HasSuffix(f.Path(), SignaturesFile) { descriptors = append(descriptors, f) @@ -1292,7 +1295,7 @@ func preProcessBundle(loader DirectoryLoader, skipVerify bool, sizeLimitBytes in } if err := util.NewJSONDecoder(&buf).Decode(&patch); err != nil { - return signatures, patch, nil, errors.Wrap(err, "bundle load failed on patch decode") + return signatures, patch, nil, fmt.Errorf("bundle load failed on patch decode: %w", err) } f.reader = &b diff --git a/constraint/vendor/github.com/open-policy-agent/opa/bundle/file.go b/constraint/vendor/github.com/open-policy-agent/opa/bundle/file.go index 040c0af3f..d5d26c408 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/bundle/file.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/bundle/file.go @@ -4,14 +4,13 @@ import ( "archive/tar" "bytes" "compress/gzip" + "fmt" "io" "os" "path" "path/filepath" "strings" "sync" - - "github.com/pkg/errors" ) // Descriptor contains information about a file and @@ -43,7 +42,7 @@ func (f *lazyFile) Read(b []byte) (int, error) { if f.file == nil { if f.file, err = os.Open(f.path); err != nil { - return 0, errors.Wrapf(err, "failed to open file %s", f.path) + return 0, fmt.Errorf("failed to open file %s: %w", f.path, err) } } @@ -154,7 +153,7 @@ func (d *dirLoader) NextFile() (*Descriptor, error) { return nil }) if err != nil { - return nil, errors.Wrap(err, "failed to list files") + return nil, fmt.Errorf("failed to list files: %w", err) } } @@ -220,7 +219,7 @@ func (t *tarballLoader) NextFile() (*Descriptor, error) { if t.tr == nil { gr, err := gzip.NewReader(t.r) if err != nil { - return nil, errors.Wrap(err, "archive read failed") + return nil, fmt.Errorf("archive read failed: %w", err) } t.tr = tar.NewReader(gr) @@ -245,7 +244,7 @@ func (t *tarballLoader) NextFile() (*Descriptor, error) { var buf bytes.Buffer if _, err := io.Copy(&buf, t.tr); err != nil { - return nil, errors.Wrapf(err, "failed to copy file %s", header.Name) + return nil, fmt.Errorf("failed to copy file %s: %w", header.Name, err) } f.reader = &buf diff --git a/constraint/vendor/github.com/open-policy-agent/opa/bundle/verify.go b/constraint/vendor/github.com/open-policy-agent/opa/bundle/verify.go index f76baf2c8..e85be835b 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/bundle/verify.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/bundle/verify.go @@ -16,8 +16,6 @@ import ( "github.com/open-policy-agent/opa/internal/jwx/jws" "github.com/open-policy-agent/opa/internal/jwx/jws/verify" "github.com/open-policy-agent/opa/util" - - "github.com/pkg/errors" ) const defaultVerifierID = "_default" @@ -92,12 +90,12 @@ func verifyJWTSignature(token string, bvc *VerificationConfig) (*DecodedSignatur var decodedHeader []byte if decodedHeader, err = base64.RawURLEncoding.DecodeString(parts[0]); err != nil { - return nil, errors.Wrap(err, "failed to base64 decode JWT headers") + return nil, fmt.Errorf("failed to base64 decode JWT headers: %w", err) } var hdr jws.StandardHeaders if err := json.Unmarshal(decodedHeader, &hdr); err != nil { - return nil, errors.Wrap(err, "failed to parse JWT headers") + return nil, fmt.Errorf("failed to parse JWT headers: %w", err) } payload, err := base64.RawURLEncoding.DecodeString(parts[1]) diff --git a/constraint/vendor/github.com/open-policy-agent/opa/capabilities/v0.41.0.json b/constraint/vendor/github.com/open-policy-agent/opa/capabilities/v0.41.0.json new file mode 100644 index 000000000..cf56e6cac --- /dev/null +++ b/constraint/vendor/github.com/open-policy-agent/opa/capabilities/v0.41.0.json @@ -0,0 +1,4007 @@ +{ + "builtins": [ + { + "name": "abs", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "all", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "and", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "\u0026" + }, + { + "name": "any", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "array.concat", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.reverse", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.slice", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "assign", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": ":=" + }, + { + "name": "base64.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "base64url.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode_no_pad", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "bits.and", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.lsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.negate", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.or", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.rsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.xor", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "cast_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "cast_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "cast_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "null" + }, + "type": "function" + } + }, + { + "name": "cast_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "cast_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "cast_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "ceil", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "concat", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "count", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.md5", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha1", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.md5", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.sha1", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.sha256", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificate_request", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_rsa_private_key", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "div", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "/" + }, + { + "name": "endswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "eq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "=" + }, + { + "name": "equal", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "==" + }, + { + "name": "floor", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "format_int", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "glob.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "glob.quote_meta", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "graph.reachable", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graph.reachable_paths", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graphql.is_valid", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "graphql.parse", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_and_verify", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_query", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_schema", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "gt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e" + }, + { + "name": "gte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e=" + }, + { + "name": "hex.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "hex.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "http.send", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "indexof", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "indexof_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "internal.member_2", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.member_3", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.print", + "decl": { + "args": [ + { + "dynamic": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "array" + } + ], + "type": "function" + } + }, + { + "name": "intersection", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode_verify", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "io.jwt.encode_sign", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "io.jwt.encode_sign_raw", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_number", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.patch", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "op", + "value": { + "type": "string" + } + }, + { + "key": "path", + "value": { + "type": "any" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "lower", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "lt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c" + }, + { + "name": "lte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c=" + }, + { + "name": "max", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "min", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "minus", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + "type": "function" + }, + "infix": "-" + }, + { + "name": "mul", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "*" + }, + { + "name": "neq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "!=" + }, + { + "name": "net.cidr_contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_contains_matches", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "static": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_expand", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_intersects", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_merge", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_overlap", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.lookup_ip_addr", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "numbers.range", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "object.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.get", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union_n", + "decl": { + "args": [ + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "opa.runtime", + "decl": { + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "or", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "|" + }, + { + "name": "plus", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "+" + }, + { + "name": "print", + "decl": { + "type": "function", + "variadic": { + "type": "any" + } + } + }, + { + "name": "product", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "rand.intn", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "re_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.find_all_string_submatch_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.find_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.globs_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.template_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.chain", + "decl": { + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.rule", + "decl": { + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "rego.parse_module", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rem", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "%" + }, + { + "name": "replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "round", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.compare", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.is_valid", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "set_diff", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "sort", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "sprintf", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "startswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.replace_n", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.reverse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "substring", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "sum", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.add_date", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.clock", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.date", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.diff", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.now_ns", + "decl": { + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_duration_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_ns", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_rfc3339_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.weekday", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "to_number", + "decl": { + "args": [ + { + "of": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "trace", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "trim", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_left", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_prefix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_right", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_space", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_suffix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "type_name", + "decl": { + "args": [ + { + "of": [ + { + "type": "any" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "union", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "units.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "units.parse_bytes", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "upper", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode_object", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "dynamic": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode_object", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "uuid.rfc4122", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "walk", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + }, + "relation": true + }, + { + "name": "yaml.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "yaml.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "yaml.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + } + ], + "future_keywords": [ + "every", + "in" + ], + "wasm_abi_versions": [ + { + "version": 1, + "minor_version": 1 + }, + { + "version": 1, + "minor_version": 2 + } + ] +} diff --git a/constraint/vendor/github.com/open-policy-agent/opa/format/format.go b/constraint/vendor/github.com/open-policy-agent/opa/format/format.go index c55d7998d..2376c94db 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/format/format.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/format/format.go @@ -300,8 +300,10 @@ func (w *writer) writeRule(rule *ast.Rule, isElse bool, comments []*ast.Comment) if len(rule.Head.Args) > 0 { closeLoc = closingLoc('(', ')', '{', '}', rule.Location) - } else { + } else if rule.Head.Key != nil { closeLoc = closingLoc('[', ']', '{', '}', rule.Location) + } else { + closeLoc = closingLoc(0, 0, '{', '}', rule.Location) } comments = w.insertComments(comments, closeLoc) diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go index c5198f2f7..07d542939 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go @@ -8,11 +8,10 @@ package wasm import ( "bytes" "encoding/binary" + "errors" "fmt" "io" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/internal/compiler/wasm/opa" "github.com/open-policy-agent/opa/internal/debug" @@ -185,6 +184,11 @@ var builtinsUsingRE2 = [...]string{ builtinsFunctions[ast.GlobMatch.Name], } +func IsWasmEnabled(bi string) bool { + _, ok := builtinsFunctions[bi] + return ok +} + type externalFunc struct { ID int32 Decl *opatypes.Function @@ -852,7 +856,7 @@ func (c *Compiler) compileFunc(fn *ir.Func) error { for i := range fn.Blocks { instrs, err := c.compileBlock(fn.Blocks[i]) if err != nil { - return errors.Wrapf(err, "block %d", i) + return fmt.Errorf("block %d: %w", i, err) } if i < len(fn.Blocks)-1 { // not the last block: wrap in `block` instr if withControlInstr(instrs) { // unless we don't need to diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/buffer/buffer.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/buffer/buffer.go index ca4ac419b..c383ff3b5 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/buffer/buffer.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/buffer/buffer.go @@ -7,8 +7,7 @@ import ( "encoding/base64" "encoding/binary" "encoding/json" - - "github.com/pkg/errors" + "fmt" ) // Buffer wraps `[]byte` and provides functions that are often used in @@ -35,7 +34,7 @@ func FromUint(v uint64) Buffer { func FromBase64(v []byte) (Buffer, error) { b := Buffer{} if err := b.Base64Decode(v); err != nil { - return Buffer(nil), errors.Wrap(err, "failed to decode from base64") + return Buffer(nil), fmt.Errorf("failed to decode from base64: %w", err) } return b, nil @@ -85,7 +84,7 @@ func (b *Buffer) Base64Decode(v []byte) error { out := make([]byte, enc.DecodedLen(len(v))) n, err := enc.Decode(out, v) if err != nil { - return errors.Wrap(err, "failed to decode from base64") + return fmt.Errorf("failed to decode from base64: %w", err) } out = out[:n] *b = Buffer(out) @@ -97,7 +96,7 @@ func (b *Buffer) Base64Decode(v []byte) error { func (b Buffer) MarshalJSON() ([]byte, error) { v, err := b.Base64Encode() if err != nil { - return nil, errors.Wrap(err, "failed to encode to base64") + return nil, fmt.Errorf("failed to encode to base64: %w", err) } return json.Marshal(string(v)) } @@ -107,7 +106,7 @@ func (b Buffer) MarshalJSON() ([]byte, error) { func (b *Buffer) UnmarshalJSON(data []byte) error { var x string if err := json.Unmarshal(data, &x); err != nil { - return errors.Wrap(err, "failed to unmarshal JSON") + return fmt.Errorf("failed to unmarshal JSON: %w", err) } return b.Base64Decode([]byte(x)) } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go index 076bd39ed..98f0cc42e 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go @@ -1,9 +1,9 @@ package jwa import ( + "errors" + "fmt" "strconv" - - "github.com/pkg/errors" ) // KeyType represents the key type ("kty") that are supported @@ -29,11 +29,11 @@ func (keyType *KeyType) Accept(value interface{}) error { case KeyType: tmp = x default: - return errors.Errorf(`invalid type for jwa.KeyType: %T`, value) + return fmt.Errorf("invalid type for jwa.KeyType: %T", value) } _, ok := keyTypeAlg[tmp.String()] if !ok { - return errors.Errorf("Unknown Key Type algorithm") + return errors.New("unknown Key Type algorithm") } *keyType = tmp @@ -53,14 +53,14 @@ func (keyType *KeyType) UnmarshalJSON(data []byte) error { var err error quoted, err = strconv.Unquote(string(data)) if err != nil { - return errors.Wrap(err, "Failed to process signature algorithm") + return fmt.Errorf("failed to process signature algorithm: %w", err) } } else { quoted = string(data) } _, ok := keyTypeAlg[quoted] if !ok { - return errors.Errorf("Unknown signature algorithm") + return errors.New("unknown signature algorithm") } *keyType = KeyType(quoted) return nil diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go index a0988ecab..45e400176 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go @@ -1,9 +1,9 @@ package jwa import ( + "errors" + "fmt" "strconv" - - "github.com/pkg/errors" ) // SignatureAlgorithm represents the various signature algorithms as described in https://tools.ietf.org/html/rfc7518#section-3.1 @@ -27,6 +27,7 @@ const ( RS384 SignatureAlgorithm = "RS384" // RSASSA-PKCS-v1.5 using SHA-384 RS512 SignatureAlgorithm = "RS512" // RSASSA-PKCS-v1.5 using SHA-512 NoValue SignatureAlgorithm = "" // No value is different from none + Unsupported SignatureAlgorithm = "unsupported" ) // Accept is used when conversion from values given by @@ -39,11 +40,11 @@ func (signature *SignatureAlgorithm) Accept(value interface{}) error { case SignatureAlgorithm: tmp = x default: - return errors.Errorf(`invalid type for jwa.SignatureAlgorithm: %T`, value) + return fmt.Errorf("invalid type for jwa.SignatureAlgorithm: %T", value) } _, ok := signatureAlg[tmp.String()] if !ok { - return errors.Errorf("Unknown signature algorithm") + return errors.New("unknown signature algorithm") } *signature = tmp return nil @@ -62,14 +63,15 @@ func (signature *SignatureAlgorithm) UnmarshalJSON(data []byte) error { var err error quoted, err = strconv.Unquote(string(data)) if err != nil { - return errors.Wrap(err, "Failed to process signature algorithm") + return fmt.Errorf("failed to process signature algorithm: %w", err) } } else { quoted = string(data) } _, ok := signatureAlg[quoted] if !ok { - return errors.Errorf("Unknown signature algorithm") + *signature = Unsupported + return nil } *signature = SignatureAlgorithm(quoted) return nil diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go index 30bee46b4..b46689f03 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go @@ -3,10 +3,10 @@ package jwk import ( "crypto/ecdsa" "crypto/elliptic" + "errors" + "fmt" "math/big" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -15,7 +15,7 @@ func newECDSAPublicKey(key *ecdsa.PublicKey) (*ECDSAPublicKey, error) { var hdr StandardHeaders err := hdr.Set(KeyTypeKey, jwa.EC) if err != nil { - return nil, errors.Wrapf(err, "Failed to set Key Type") + return nil, fmt.Errorf("failed to set Key Type: %w", err) } return &ECDSAPublicKey{ @@ -29,7 +29,7 @@ func newECDSAPrivateKey(key *ecdsa.PrivateKey) (*ECDSAPrivateKey, error) { var hdr StandardHeaders err := hdr.Set(KeyTypeKey, jwa.EC) if err != nil { - return nil, errors.Wrapf(err, "Failed to set Key Type") + return nil, fmt.Errorf("failed to set Key Type: %w", err) } return &ECDSAPrivateKey{ @@ -54,7 +54,7 @@ func (k *ECDSAPublicKey) GenerateKey(keyJSON *RawKeyJSON) error { var x, y big.Int if keyJSON.X == nil || keyJSON.Y == nil || keyJSON.Crv == "" { - return errors.Errorf("Missing mandatory key parameters X, Y or Crv") + return errors.New("missing mandatory key parameters X, Y or Crv") } x.SetBytes(keyJSON.X.Bytes()) @@ -69,7 +69,7 @@ func (k *ECDSAPublicKey) GenerateKey(keyJSON *RawKeyJSON) error { case jwa.P521: curve = elliptic.P521() default: - return errors.Errorf(`invalid curve name %s`, keyJSON.Crv) + return fmt.Errorf("invalid curve name %s", keyJSON.Crv) } *k = ECDSAPublicKey{ @@ -87,12 +87,12 @@ func (k *ECDSAPublicKey) GenerateKey(keyJSON *RawKeyJSON) error { func (k *ECDSAPrivateKey) GenerateKey(keyJSON *RawKeyJSON) error { if keyJSON.D == nil { - return errors.Errorf("Missing mandatory key parameter D") + return errors.New("missing mandatory key parameter D") } eCDSAPublicKey := &ECDSAPublicKey{} err := eCDSAPublicKey.GenerateKey(keyJSON) if err != nil { - return errors.Wrap(err, `failed to generate public key`) + return fmt.Errorf("failed to generate public key: %w", err) } dBytes := keyJSON.D.Bytes() // The length of this octet string MUST be ceiling(log-base-2(n)/8) @@ -106,7 +106,7 @@ func (k *ECDSAPrivateKey) GenerateKey(keyJSON *RawKeyJSON) error { n := eCDSAPublicKey.key.Params().N octetLength := (new(big.Int).Sub(n, big.NewInt(1)).BitLen() + 7) >> 3 if octetLength-len(dBytes) != 0 { - return errors.Errorf("Failed to generate private key. Incorrect D value") + return errors.New("failed to generate private key. Incorrect D value") } privateKey := &ecdsa.PrivateKey{ PublicKey: *eCDSAPublicKey.key, diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go index cf700ee86..b0fd51e90 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go @@ -1,7 +1,7 @@ package jwk import ( - "github.com/pkg/errors" + "fmt" "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -122,7 +122,7 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { case AlgorithmKey: var acceptor jwa.SignatureAlgorithm if err := acceptor.Accept(value); err != nil { - return errors.Wrapf(err, `invalid value for %s key`, AlgorithmKey) + return fmt.Errorf("invalid value for %s key: %w", AlgorithmKey, err) } h.Algorithm = &acceptor return nil @@ -131,15 +131,15 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { h.KeyID = v return nil } - return errors.Errorf("invalid value for %s key: %T", KeyIDKey, value) + return fmt.Errorf("invalid value for %s key: %T", KeyIDKey, value) case KeyOpsKey: if err := h.KeyOps.Accept(value); err != nil { - return errors.Wrapf(err, "invalid value for %s key", KeyOpsKey) + return fmt.Errorf("invalid value for %s key: %w", KeyOpsKey, err) } return nil case KeyTypeKey: if err := h.KeyType.Accept(value); err != nil { - return errors.Wrapf(err, "invalid value for %s key", KeyTypeKey) + return fmt.Errorf("invalid value for %s key: %w", KeyTypeKey, err) } return nil case KeyUsageKey: @@ -147,15 +147,15 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { h.KeyUsage = v return nil } - return errors.Errorf("invalid value for %s key: %T", KeyUsageKey, value) + return fmt.Errorf("invalid value for %s key: %T", KeyUsageKey, value) case PrivateParamsKey: if v, ok := value.(map[string]interface{}); ok { h.PrivateParams = v return nil } - return errors.Errorf("invalid value for %s key: %T", PrivateParamsKey, value) + return fmt.Errorf("invalid value for %s key: %T", PrivateParamsKey, value) default: - return errors.Errorf(`invalid key: %s`, name) + return fmt.Errorf("invalid key: %s", name) } } @@ -164,14 +164,14 @@ func (h StandardHeaders) Walk(f func(string, interface{}) error) error { for _, key := range []string{AlgorithmKey, KeyIDKey, KeyOpsKey, KeyTypeKey, KeyUsageKey, PrivateParamsKey} { if v, ok := h.Get(key); ok { if err := f(key, v); err != nil { - return errors.Wrapf(err, `walk function returned error for %s`, key) + return fmt.Errorf("walk function returned error for %s: %w", key, err) } } } for k, v := range h.PrivateParams { if err := f(k, v); err != nil { - return errors.Wrapf(err, `walk function returned error for %s`, k) + return fmt.Errorf("walk function returned error for %s: %w", k, err) } } return nil diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go index 22ccf8dfc..aa22a3830 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go @@ -5,8 +5,8 @@ import ( "crypto/ecdsa" "crypto/rsa" "encoding/json" - - "github.com/pkg/errors" + "errors" + "fmt" "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -17,7 +17,7 @@ import ( // public key cannot be deduced, an error is returned func GetPublicKey(key interface{}) (interface{}, error) { if key == nil { - return nil, errors.New(`jwk.New requires a non-nil key`) + return nil, errors.New("jwk.New requires a non-nil key") } switch v := key.(type) { @@ -32,7 +32,7 @@ func GetPublicKey(key interface{}) (interface{}, error) { case []byte: return v, nil default: - return nil, errors.Errorf(`invalid key type %T`, key) + return nil, fmt.Errorf("invalid key type %T", key) } } @@ -54,7 +54,7 @@ func GetKeyTypeFromKey(key interface{}) jwa.KeyType { // New creates a jwk.Key from the given key. func New(key interface{}) (Key, error) { if key == nil { - return nil, errors.New(`jwk.New requires a non-nil key`) + return nil, errors.New("jwk.New requires a non-nil key") } switch v := key.(type) { @@ -69,7 +69,7 @@ func New(key interface{}) (Key, error) { case []byte: return newSymmetricKey(v) default: - return nil, errors.Errorf(`invalid key type %T`, key) + return nil, fmt.Errorf("invalid key type %T", key) } } @@ -80,7 +80,7 @@ func parse(jwkSrc string) (*Set, error) { rawKeySetJSON := &RawKeySetJSON{} err := json.Unmarshal([]byte(jwkSrc), rawKeySetJSON) if err != nil { - return nil, errors.Wrap(err, "Failed to unmarshal JWK Set") + return nil, fmt.Errorf("failed to unmarshal JWK Set: %w", err) } if len(rawKeySetJSON.Keys) == 0 { @@ -88,20 +88,23 @@ func parse(jwkSrc string) (*Set, error) { rawKeyJSON := &RawKeyJSON{} err := json.Unmarshal([]byte(jwkSrc), rawKeyJSON) if err != nil { - return nil, errors.Wrap(err, "Failed to unmarshal JWK") + return nil, fmt.Errorf("failed to unmarshal JWK: %w", err) } jwkKey, err = rawKeyJSON.GenerateKey() if err != nil { - return nil, errors.Wrap(err, "Failed to generate key") + return nil, fmt.Errorf("failed to generate key: %w", err) } // Add to set jwkKeySet.Keys = append(jwkKeySet.Keys, jwkKey) } else { for i := range rawKeySetJSON.Keys { rawKeyJSON := rawKeySetJSON.Keys[i] + if rawKeyJSON.Algorithm != nil && *rawKeyJSON.Algorithm == jwa.Unsupported { + continue + } jwkKey, err = rawKeyJSON.GenerateKey() if err != nil { - return nil, errors.Wrap(err, "Failed to generate key: %s") + return nil, fmt.Errorf("failed to generate key: %w", err) } jwkKeySet.Keys = append(jwkKeySet.Keys, jwkKey) } @@ -140,11 +143,11 @@ func (r *RawKeyJSON) GenerateKey() (Key, error) { case jwa.OctetSeq: key = &SymmetricKey{} default: - return nil, errors.Errorf(`Unrecognized key type`) + return nil, errors.New("unrecognized key type") } err := key.GenerateKey(r) if err != nil { - return nil, errors.Wrap(err, "Failed to generate key from JWK") + return nil, fmt.Errorf("failed to generate key from JWK: %w", err) } return key, nil } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go index 1a5cba47b..11b8e3b56 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go @@ -3,10 +3,10 @@ package jwk import ( "crypto/rsa" "encoding/binary" + "errors" + "fmt" "math/big" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -15,7 +15,7 @@ func newRSAPublicKey(key *rsa.PublicKey) (*RSAPublicKey, error) { var hdr StandardHeaders err := hdr.Set(KeyTypeKey, jwa.RSA) if err != nil { - return nil, errors.Wrapf(err, "Failed to set Key Type") + return nil, fmt.Errorf("failed to set Key Type: %w", err) } return &RSAPublicKey{ StandardHeaders: &hdr, @@ -28,7 +28,7 @@ func newRSAPrivateKey(key *rsa.PrivateKey) (*RSAPrivateKey, error) { var hdr StandardHeaders err := hdr.Set(KeyTypeKey, jwa.RSA) if err != nil { - return nil, errors.Wrapf(err, "Failed to set Key Type") + return nil, fmt.Errorf("failed to set Key Type: %w", err) } var algoParams jwa.AlgorithmParameters @@ -67,7 +67,7 @@ func newRSAPrivateKey(key *rsa.PrivateKey) (*RSAPrivateKey, error) { // Materialize returns the standard RSA Public Key representation stored in the internal representation func (k *RSAPublicKey) Materialize() (interface{}, error) { if k.key == nil { - return nil, errors.New(`key has no rsa.PublicKey associated with it`) + return nil, errors.New("key has no rsa.PublicKey associated with it") } return k.key, nil } @@ -75,7 +75,7 @@ func (k *RSAPublicKey) Materialize() (interface{}, error) { // Materialize returns the standard RSA Private Key representation stored in the internal representation func (k *RSAPrivateKey) Materialize() (interface{}, error) { if k.key == nil { - return nil, errors.New(`key has no rsa.PrivateKey associated with it`) + return nil, errors.New("key has no rsa.PrivateKey associated with it") } return k.key, nil } @@ -84,7 +84,7 @@ func (k *RSAPrivateKey) Materialize() (interface{}, error) { func (k *RSAPublicKey) GenerateKey(keyJSON *RawKeyJSON) error { if keyJSON.N == nil || keyJSON.E == nil { - return errors.Errorf("Missing mandatory key parameters N or E") + return errors.New("missing mandatory key parameters N or E") } rsaPublicKey := &rsa.PublicKey{ N: (&big.Int{}).SetBytes(keyJSON.N.Bytes()), @@ -101,11 +101,11 @@ func (k *RSAPrivateKey) GenerateKey(keyJSON *RawKeyJSON) error { rsaPublicKey := &RSAPublicKey{} err := rsaPublicKey.GenerateKey(keyJSON) if err != nil { - return errors.Wrap(err, "failed to generate public key") + return fmt.Errorf("failed to generate public key: %w", err) } if keyJSON.D == nil || keyJSON.P == nil || keyJSON.Q == nil { - return errors.Errorf("Missing mandatory key parameters D, P or Q") + return errors.New("missing mandatory key parameters D, P or Q") } privateKey := &rsa.PrivateKey{ PublicKey: *rsaPublicKey.key, diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go index 8a0736155..e0cc0751e 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go @@ -1,7 +1,7 @@ package jwk import ( - "github.com/pkg/errors" + "fmt" "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -11,7 +11,7 @@ func newSymmetricKey(key []byte) (*SymmetricKey, error) { err := hdr.Set(KeyTypeKey, jwa.OctetSeq) if err != nil { - return nil, errors.Wrapf(err, "Failed to set Key Type") + return nil, fmt.Errorf("failed to set Key Type: %w", err) } return &SymmetricKey{ StandardHeaders: &hdr, diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go index 045e38fa1..0c8b35508 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go @@ -1,7 +1,7 @@ package jws import ( - "github.com/pkg/errors" + "fmt" "github.com/open-policy-agent/opa/internal/jwx/jwa" ) @@ -103,7 +103,7 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { switch name { case AlgorithmKey: if err := h.Algorithm.Accept(value); err != nil { - return errors.Wrapf(err, `invalid value for %s key`, AlgorithmKey) + return fmt.Errorf("invalid value for %s key: %w", AlgorithmKey, err) } return nil case ContentTypeKey: @@ -111,44 +111,44 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { h.ContentType = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, ContentTypeKey, value) + return fmt.Errorf("invalid value for %s key: %T", ContentTypeKey, value) case CriticalKey: if v, ok := value.([]string); ok { h.Critical = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, CriticalKey, value) + return fmt.Errorf("invalid value for %s key: %T", CriticalKey, value) case JWKKey: if v, ok := value.(string); ok { h.JWK = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, JWKKey, value) + return fmt.Errorf("invalid value for %s key: %T", JWKKey, value) case JWKSetURLKey: if v, ok := value.(string); ok { h.JWKSetURL = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, JWKSetURLKey, value) + return fmt.Errorf("invalid value for %s key: %T", JWKSetURLKey, value) case KeyIDKey: if v, ok := value.(string); ok { h.KeyID = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, KeyIDKey, value) + return fmt.Errorf("invalid value for %s key: %T", KeyIDKey, value) case PrivateParamsKey: if v, ok := value.(map[string]interface{}); ok { h.PrivateParams = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, PrivateParamsKey, value) + return fmt.Errorf("invalid value for %s key: %T", PrivateParamsKey, value) case TypeKey: if v, ok := value.(string); ok { h.Type = v return nil } - return errors.Errorf(`invalid value for %s key: %T`, TypeKey, value) + return fmt.Errorf("invalid value for %s key: %T", TypeKey, value) default: - return errors.Errorf(`invalid key: %s`, name) + return fmt.Errorf("invalid key: %s", name) } } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go index bfa498bb0..c163a3bd5 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go @@ -24,6 +24,8 @@ import ( "crypto/rand" "encoding/base64" "encoding/json" + "errors" + "fmt" "io" "strings" @@ -31,8 +33,6 @@ import ( "github.com/open-policy-agent/opa/internal/jwx/jwk" "github.com/open-policy-agent/opa/internal/jwx/jws/sign" "github.com/open-policy-agent/opa/internal/jwx/jws/verify" - - "github.com/pkg/errors" ) // SignLiteral generates a Signature for the given Payload and Headers, and serializes @@ -50,7 +50,7 @@ func SignLiteral(payload []byte, alg jwa.SignatureAlgorithm, key interface{}, hd ) signer, err := sign.New(alg) if err != nil { - return nil, errors.Wrap(err, `failed to create signer`) + return nil, fmt.Errorf("failed to create signer: %w", err) } var signature []byte @@ -61,7 +61,7 @@ func SignLiteral(payload []byte, alg jwa.SignatureAlgorithm, key interface{}, hd signature, err = signer.Sign([]byte(signingInput), key) } if err != nil { - return nil, errors.Wrap(err, `failed to sign Payload`) + return nil, fmt.Errorf("failed to sign Payload: %w", err) } encodedSignature := base64.RawURLEncoding.EncodeToString(signature) compactSerialization := strings.Join( @@ -83,12 +83,12 @@ func SignWithOption(payload []byte, alg jwa.SignatureAlgorithm, key interface{}) err := headers.Set(AlgorithmKey, alg) if err != nil { - return nil, errors.Wrap(err, "Failed to set alg value") + return nil, fmt.Errorf("failed to set alg value: %w", err) } hdrBuf, err := json.Marshal(headers) if err != nil { - return nil, errors.Wrap(err, `failed to marshal Headers`) + return nil, fmt.Errorf("failed to marshal Headers: %w", err) } // NOTE(sr): we don't use SignWithOption -- if we did, this rand.Reader // should come from the BuiltinContext's Seed, too. @@ -104,7 +104,7 @@ func Verify(buf []byte, alg jwa.SignatureAlgorithm, key interface{}) (ret []byte verifier, err := verify.New(alg) if err != nil { - return nil, errors.Wrap(err, "failed to create verifier") + return nil, fmt.Errorf("failed to create verifier: %w", err) } buf = bytes.TrimSpace(buf) @@ -114,7 +114,7 @@ func Verify(buf []byte, alg jwa.SignatureAlgorithm, key interface{}) (ret []byte parts, err := SplitCompact(string(buf[:])) if err != nil { - return nil, errors.Wrap(err, `failed extract from compact serialization format`) + return nil, fmt.Errorf("failed extract from compact serialization format: %w", err) } signingInput := strings.Join( @@ -126,16 +126,16 @@ func Verify(buf []byte, alg jwa.SignatureAlgorithm, key interface{}) (ret []byte decodedSignature, err := base64.RawURLEncoding.DecodeString(parts[2]) if err != nil { - return nil, errors.Wrap(err, "Failed to decode signature") + return nil, fmt.Errorf("failed to decode signature: %w", err) } if err := verifier.Verify([]byte(signingInput), decodedSignature, key); err != nil { - return nil, errors.Wrap(err, "Failed to verify message") + return nil, fmt.Errorf("failed to verify message: %w", err) } if decodedPayload, err := base64.RawURLEncoding.DecodeString(parts[1]); err == nil { return decodedPayload, nil } - return nil, errors.Wrap(err, "Failed to decode Payload") + return nil, fmt.Errorf("failed to decode Payload: %w", err) } // VerifyWithJWK verifies the JWS message using the specified JWK @@ -143,7 +143,7 @@ func VerifyWithJWK(buf []byte, key jwk.Key) (payload []byte, err error) { keyVal, err := key.Materialize() if err != nil { - return nil, errors.Wrap(err, "Failed to materialize key") + return nil, fmt.Errorf("failed to materialize key: %w", err) } return Verify(buf, key.GetAlgorithm(), keyVal) } @@ -179,7 +179,7 @@ func SplitCompact(jwsCompact string) ([]string, error) { parts := strings.Split(jwsCompact, ".") if len(parts) < 3 { - return nil, errors.New("Failed to split compact serialization") + return nil, errors.New("failed to split compact serialization") } return parts, nil } @@ -190,24 +190,24 @@ func parseCompact(str string) (m *Message, err error) { var decodedHeader, decodedPayload, decodedSignature []byte parts, err := SplitCompact(str) if err != nil { - return nil, errors.Wrap(err, `invalid compact serialization format`) + return nil, fmt.Errorf("invalid compact serialization format: %w", err) } if decodedHeader, err = base64.RawURLEncoding.DecodeString(parts[0]); err != nil { - return nil, errors.Wrap(err, `failed to decode Headers`) + return nil, fmt.Errorf("failed to decode Headers: %w", err) } var hdr StandardHeaders if err := json.Unmarshal(decodedHeader, &hdr); err != nil { - return nil, errors.Wrap(err, `failed to parse JOSE Headers`) + return nil, fmt.Errorf("failed to parse JOSE Headers: %w", err) } if decodedPayload, err = base64.RawURLEncoding.DecodeString(parts[1]); err != nil { - return nil, errors.Wrap(err, `failed to decode Payload`) + return nil, fmt.Errorf("failed to decode Payload: %w", err) } if len(parts) > 2 { if decodedSignature, err = base64.RawURLEncoding.DecodeString(parts[2]); err != nil { - return nil, errors.Wrap(err, `failed to decode Signature`) + return nil, fmt.Errorf("failed to decode Signature: %w", err) } } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go index 62af72b6c..db1aadec6 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go @@ -4,11 +4,11 @@ import ( "crypto" "crypto/ecdsa" "crypto/rand" + "errors" + "fmt" "io" "github.com/open-policy-agent/opa/internal/jwx/jwa" - - "github.com/pkg/errors" ) var ecdsaSignFuncs = map[jwa.SignatureAlgorithm]ecdsaSignFunc{} @@ -37,7 +37,7 @@ func makeECDSASignFunc(hash crypto.Hash) ecdsaSignFunc { h.Write(payload) r, s, err := ecdsa.Sign(rnd, key, h.Sum(nil)) if err != nil { - return nil, errors.Wrap(err, "failed to sign payload using ecdsa") + return nil, fmt.Errorf("failed to sign payload using ecdsa: %w", err) } rBytes := r.Bytes() @@ -56,7 +56,7 @@ func makeECDSASignFunc(hash crypto.Hash) ecdsaSignFunc { func newECDSA(alg jwa.SignatureAlgorithm) (*ECDSASigner, error) { signfn, ok := ecdsaSignFuncs[alg] if !ok { - return nil, errors.Errorf(`unsupported algorithm while trying to create ECDSA signer: %s`, alg) + return nil, fmt.Errorf("unsupported algorithm while trying to create ECDSA signer: %s", alg) } return &ECDSASigner{ @@ -74,12 +74,12 @@ func (s ECDSASigner) Algorithm() jwa.SignatureAlgorithm { // source (such as `rand.Reader`). func (s ECDSASigner) SignWithRand(payload []byte, key interface{}, r io.Reader) ([]byte, error) { if key == nil { - return nil, errors.New(`missing private key while signing payload`) + return nil, errors.New("missing private key while signing payload") } privateKey, ok := key.(*ecdsa.PrivateKey) if !ok { - return nil, errors.Errorf(`invalid key type %T. *ecdsa.PrivateKey is required`, key) + return nil, fmt.Errorf("invalid key type %T. *ecdsa.PrivateKey is required", key) } return s.sign(payload, privateKey, r) } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go index 77e45887a..1f8d04984 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go @@ -2,18 +2,18 @@ package verify import ( "crypto/hmac" - - "github.com/pkg/errors" + "fmt" "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/open-policy-agent/opa/internal/jwx/jws/sign" + "github.com/pkg/errors" ) func newHMAC(alg jwa.SignatureAlgorithm) (*HMACVerifier, error) { s, err := sign.New(alg) if err != nil { - return nil, errors.Wrap(err, `failed to generate HMAC signer`) + return nil, fmt.Errorf("failed to generate HMAC signer: %w", err) } return &HMACVerifier{signer: s}, nil } @@ -23,11 +23,11 @@ func (v HMACVerifier) Verify(signingInput, signature []byte, key interface{}) (e expected, err := v.signer.Sign(signingInput, key) if err != nil { - return errors.Wrap(err, `failed to generated signature`) + return fmt.Errorf("failed to generated signature: %w", err) } if !hmac.Equal(signature, expected) { - return errors.New(`failed to match hmac signature`) + return errors.New("failed to match hmac signature") } return nil } diff --git a/constraint/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go b/constraint/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go index 1ec265126..9c9c391a5 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go @@ -11,8 +11,6 @@ import ( "io" "io/ioutil" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/internal/leb128" "github.com/open-policy-agent/opa/internal/wasm/constant" "github.com/open-policy-agent/opa/internal/wasm/instruction" @@ -27,7 +25,7 @@ func ReadModule(r io.Reader) (*module.Module, error) { wr := &reader{r: r, n: 0} module, err := readModule(wr) if err != nil { - return nil, errors.Wrapf(err, "offset 0x%x", wr.n) + return nil, fmt.Errorf("offset 0x%x: %w", wr.n, err) } return module, nil @@ -39,7 +37,7 @@ func ReadCodeEntry(r io.Reader) (*module.CodeEntry, error) { wr := &reader{r: r, n: 0} entry, err := readCodeEntry(wr) if err != nil { - return nil, errors.Wrapf(err, "offset 0x%x", wr.n) + return nil, fmt.Errorf("offset 0x%x: %w", wr.n, err) } return entry, nil @@ -97,7 +95,7 @@ func readCodeEntry(r io.Reader) (*module.CodeEntry, error) { var entry module.CodeEntry if err := readLocals(r, &entry.Func.Locals); err != nil { - return nil, errors.Wrapf(err, "local declarations") + return nil, fmt.Errorf("local declarations: %w", err) } return &entry, readExpr(r, &entry.Func.Expr) @@ -145,61 +143,61 @@ func readSections(r io.Reader, m *module.Module) error { switch id { case constant.StartSectionID: if err := readStartSection(bufr, &m.Start); err != nil { - return errors.Wrap(err, "start section") + return fmt.Errorf("start section: %w", err) } case constant.CustomSectionID: var name string if err := readByteVectorString(bufr, &name); err != nil { - return errors.Wrap(err, "read custom section type") + return fmt.Errorf("read custom section type: %w", err) } if name == "name" { if err := readCustomNameSections(bufr, &m.Names); err != nil { - return errors.Wrap(err, "custom 'name' section") + return fmt.Errorf("custom 'name' section: %w", err) } } else { if err := readCustomSection(bufr, name, &m.Customs); err != nil { - return errors.Wrap(err, "custom section") + return fmt.Errorf("custom section: %w", err) } } case constant.TypeSectionID: if err := readTypeSection(bufr, &m.Type); err != nil { - return errors.Wrap(err, "type section") + return fmt.Errorf("type section: %w", err) } case constant.ImportSectionID: if err := readImportSection(bufr, &m.Import); err != nil { - return errors.Wrap(err, "import section") + return fmt.Errorf("import section: %w", err) } case constant.TableSectionID: if err := readTableSection(bufr, &m.Table); err != nil { - return errors.Wrap(err, "table section") + return fmt.Errorf("table section: %w", err) } case constant.MemorySectionID: if err := readMemorySection(bufr, &m.Memory); err != nil { - return errors.Wrap(err, "memory section") + return fmt.Errorf("memory section: %w", err) } case constant.GlobalSectionID: if err := readGlobalSection(bufr, &m.Global); err != nil { - return errors.Wrap(err, "global section") + return fmt.Errorf("global section: %w", err) } case constant.FunctionSectionID: if err := readFunctionSection(bufr, &m.Function); err != nil { - return errors.Wrap(err, "function section") + return fmt.Errorf("function section: %w", err) } case constant.ExportSectionID: if err := readExportSection(bufr, &m.Export); err != nil { - return errors.Wrap(err, "export section") + return fmt.Errorf("export section: %w", err) } case constant.ElementSectionID: if err := readElementSection(bufr, &m.Element); err != nil { - return errors.Wrap(err, "element section") + return fmt.Errorf("element section: %w", err) } case constant.DataSectionID: if err := readDataSection(bufr, &m.Data); err != nil { - return errors.Wrap(err, "data section") + return fmt.Errorf("data section: %w", err) } case constant.CodeSectionID: if err := readRawCodeSection(bufr, &m.Code); err != nil { - return errors.Wrap(err, "code section") + return fmt.Errorf("code section: %w", err) } default: return fmt.Errorf("illegal section id") diff --git a/constraint/vendor/github.com/open-policy-agent/opa/topdown/graphql.go b/constraint/vendor/github.com/open-policy-agent/opa/topdown/graphql.go new file mode 100644 index 000000000..74caebe6b --- /dev/null +++ b/constraint/vendor/github.com/open-policy-agent/opa/topdown/graphql.go @@ -0,0 +1,378 @@ +// Copyright 2022 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package topdown + +import ( + "fmt" + "strings" + + gqltop "github.com/vektah/gqlparser/v2" + gqlast "github.com/vektah/gqlparser/v2/ast" + gqlparser "github.com/vektah/gqlparser/v2/parser" + gqlvalidator "github.com/vektah/gqlparser/v2/validator" + + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/topdown/builtins" +) + +// Parses a GraphQL schema, and returns only the Schema object. +// Used in validation of queries. +// NOTE(philipc): The error type here is a gqlerror.Error struct, +// which requires us to treat it specially, since the returned value +// will *always* be non-null. +// See: https://staticcheck.io/docs/checks#SA4023 for details. +func loadSchema(schema string) (*gqlast.Schema, error) { + loadedSchema, err := gqltop.LoadSchema(&gqlast.Source{Input: schema}) + if err != nil { + return nil, fmt.Errorf("%s in GraphQL schema string at location %d:%d", err.Message, err.Locations[0].Line, err.Locations[0].Column) + } + return loadedSchema, nil +} + +// Parses a GraphQL schema, and returns the GraphQL AST for the schema. +// NOTE(philipc): The error type here is a gqlerror.Error struct, +// which requires us to treat it specially, since the returned value +// will *always* be non-null. +// See: https://staticcheck.io/docs/checks#SA4023 for details. +func parseSchema(schema string) (*gqlast.SchemaDocument, error) { + // NOTE(philipc): We don't include the "built-in schema defs" from the + // underlying graphql parsing library here, because those definitions + // generate enormous AST blobs. In the future, if there is demand for + // a "full-spec" version of schema ASTs, we may need to provide a + // version of this function that includes the built-in schema + // definitions. + schemaAST, err := gqlparser.ParseSchema(&gqlast.Source{Input: schema}) + if err != nil { + return nil, fmt.Errorf("%s in GraphQL string at location %d:%d", err.Message, err.Locations[0].Line, err.Locations[0].Column) + } + return schemaAST, nil +} + +// Parses a GraphQL query, and returns the GraphQL AST for the query. +// NOTE(philipc): The error type here is a gqlerror.Error struct, +// which requires us to treat it specially, since the returned value +// will *always* be non-null. +// See: https://staticcheck.io/docs/checks#SA4023 for details. +func parseQuery(query string) (*gqlast.QueryDocument, error) { + queryAST, err := gqlparser.ParseQuery(&gqlast.Source{Input: query}) + if err != nil { + return nil, fmt.Errorf("%s in GraphQL string at location %d:%d", err.Message, err.Locations[0].Line, err.Locations[0].Column) + } + return queryAST, nil +} + +// Validates a GraphQL query against a schema, and returns an error. +// In this case, we get a wrappered error list type, and pluck out +// just the first error message in the list. +// NOTE(philipc): The error type from Validate() is a gqlerror.List +// struct, which requires us to treat it specially, since the +// returned value will *always* be non-null. +// See: https://staticcheck.io/docs/checks#SA4023 for details. +func validateQuery(schema *gqlast.Schema, query *gqlast.QueryDocument) error { + // Validate the query against the schema, erroring if there's an issue. + err := gqlvalidator.Validate(schema, query) + if err != nil { + // We use strings.TrimSuffix to remove the '.' characters that the library + // authors include on most of their validation errors. This should be safe, + // since variable names in their error messages are usually quoted, and + // this affects only the last character(s) in the string. + // NOTE(philipc): We know the error location will be in the query string, + // because schema validation always happens before this function is called. + return fmt.Errorf("%s in GraphQL query string at location %d:%d", strings.TrimSuffix(err[0].Message, "."), err[0].Locations[0].Line, err[0].Locations[0].Column) + } + return nil +} + +// Recursively traverses an AST that has been run through InterfaceToValue, +// and prunes away the fields with null or empty values, and all `Position` +// structs. +// NOTE(philipc): We currently prune away null values to reduce the level +// of clutter in the returned AST objects. In the future, if there is demand +// for ASTs that have a more regular/fixed structure, we may need to provide +// a "raw" version of the AST, where we still prune away the `Position` +// structs, but leave in the null fields. +func pruneIrrelevantGraphQLASTNodes(value ast.Value) ast.Value { + // We iterate over the Value we've been provided, and recurse down + // in the case of complex types, such as Arrays/Objects. + // We are guaranteed to only have to deal with standard JSON types, + // so this is much less ugly than what we'd need for supporting every + // extant ast type! + switch x := value.(type) { + case *ast.Array: + result := ast.NewArray() + // Iterate over the array's elements, and do the following: + // - Drop any Nulls + // - Drop any any empty object/array value (after running the pruner) + for i := 0; i < x.Len(); i++ { + vTerm := x.Elem(i) + switch v := vTerm.Value.(type) { + case ast.Null: + continue + case *ast.Array: + // Safe, because we knew the type before going to prune it. + va := pruneIrrelevantGraphQLASTNodes(v).(*ast.Array) + if va.Len() > 0 { + result = result.Append(ast.NewTerm(va)) + } + case ast.Object: + // Safe, because we knew the type before going to prune it. + vo := pruneIrrelevantGraphQLASTNodes(v).(ast.Object) + if len(vo.Keys()) > 0 { + result = result.Append(ast.NewTerm(vo)) + } + default: + result = result.Append(vTerm) + } + } + return result + case ast.Object: + result := ast.NewObject() + // Iterate over our object's keys, and do the following: + // - Drop "Position". + // - Drop any key with a Null value. + // - Drop any key with an empty object/array value (after running the pruner) + keys := x.Keys() + for _, k := range keys { + // We drop the "Position" objects because we don't need the + // source-backref/location info they provide for policy rules. + // Note that keys are ast.Strings. + if ast.String("Position").Equal(k.Value) { + continue + } + vTerm := x.Get(k) + switch v := vTerm.Value.(type) { + case ast.Null: + continue + case *ast.Array: + // Safe, because we knew the type before going to prune it. + va := pruneIrrelevantGraphQLASTNodes(v).(*ast.Array) + if va.Len() > 0 { + result.Insert(k, ast.NewTerm(va)) + } + case ast.Object: + // Safe, because we knew the type before going to prune it. + vo := pruneIrrelevantGraphQLASTNodes(v).(ast.Object) + if len(vo.Keys()) > 0 { + result.Insert(k, ast.NewTerm(vo)) + } + default: + result.Insert(k, vTerm) + } + } + return result + default: + return x + } +} + +// Reports errors from parsing/validation. +func builtinGraphQLParse(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + // Get the raw strings from each operand so that we can + // feed them to the GraphQL parser functions. + rawQuery, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + rawSchema, err := builtins.StringOperand(operands[1].Value, 1) + if err != nil { + return err + } + + // Generate ASTs/errors for the GraphQL query. + queryDoc, err := parseQuery(string(rawQuery)) + if err != nil { + return err + } + + // Validate the query against the schema, erroring if there's an issue. + schema, err := loadSchema(string(rawSchema)) + if err != nil { + return err + } + if err := validateQuery(schema, queryDoc); err != nil { + return err + } + + // Generate AST/errors for the GraphQL schema, since the query + // passed validation. + schemaDoc, err := parseSchema(string(rawSchema)) + if err != nil { + return err + } + + // Transform the ASTs into Objects. + queryASTValue, err := ast.InterfaceToValue(queryDoc) + if err != nil { + return err + } + schemaASTValue, err := ast.InterfaceToValue(schemaDoc) + if err != nil { + return err + } + + // Recursively remove irrelevant AST structures. + queryResult := pruneIrrelevantGraphQLASTNodes(queryASTValue.(ast.Object)) + querySchema := pruneIrrelevantGraphQLASTNodes(schemaASTValue.(ast.Object)) + + // Construct return value. + verified := ast.ArrayTerm( + ast.NewTerm(queryResult), + ast.NewTerm(querySchema), + ) + + return iter(verified) +} + +// Returns default value when errors occur. +func builtinGraphQLParseAndVerify(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + // Get the raw strings from each operand so that we can + // feed them to the GraphQL parser functions. + rawQuery, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + rawSchema, err := builtins.StringOperand(operands[1].Value, 1) + if err != nil { + return err + } + + unverified := ast.ArrayTerm( + ast.BooleanTerm(false), + ast.NewTerm(ast.NewObject()), + ast.NewTerm(ast.NewObject()), + ) + + // Generate ASTs/errors for the GraphQL query. + queryDoc, err := parseQuery(string(rawQuery)) + if err != nil { + return iter(unverified) + } + + // Validate the query against the schema, erroring if there's an issue. + schema, err := loadSchema(string(rawSchema)) + if err != nil { + return iter(unverified) + } + if err := validateQuery(schema, queryDoc); err != nil { + return iter(unverified) + } + + // Generate AST/errors for the GraphQL schema, since the query + // passed validation. + schemaDoc, err := parseSchema(string(rawSchema)) + if err != nil { + return iter(unverified) + } + + // Transform the ASTs into Objects. + queryASTValue, err := ast.InterfaceToValue(queryDoc) + if err != nil { + return iter(unverified) + } + schemaASTValue, err := ast.InterfaceToValue(schemaDoc) + if err != nil { + return iter(unverified) + } + + // Recursively remove irrelevant AST structures. + queryResult := pruneIrrelevantGraphQLASTNodes(queryASTValue.(ast.Object)) + querySchema := pruneIrrelevantGraphQLASTNodes(schemaASTValue.(ast.Object)) + + // Construct return value. + verified := ast.ArrayTerm( + ast.BooleanTerm(true), + ast.NewTerm(queryResult), + ast.NewTerm(querySchema), + ) + + return iter(verified) +} + +func builtinGraphQLParseQuery(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + raw, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + + // Get the highly-nested AST struct, along with any errors generated. + query, err := parseQuery(string(raw)) + if err != nil { + return err + } + + // Transform the AST into an Object. + value, err := ast.InterfaceToValue(query) + if err != nil { + return err + } + + // Recursively remove irrelevant AST structures. + result := pruneIrrelevantGraphQLASTNodes(value.(ast.Object)) + + return iter(ast.NewTerm(result)) +} + +func builtinGraphQLParseSchema(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + raw, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + + // Get the highly-nested AST struct, along with any errors generated. + schema, err := parseSchema(string(raw)) + if err != nil { + return err + } + + // Transform the AST into an Object. + value, err := ast.InterfaceToValue(schema) + if err != nil { + return err + } + + // Recursively remove irrelevant AST structures. + result := pruneIrrelevantGraphQLASTNodes(value.(ast.Object)) + + return iter(ast.NewTerm(result)) +} + +func builtinGraphQLIsValid(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + // Get the raw strings from each operand so that we can + // feed them to the GraphQL parser functions. + rawQuery, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + rawSchema, err := builtins.StringOperand(operands[1].Value, 1) + if err != nil { + return err + } + + // Generate ASTs/errors for the GraphQL schema and query. + schema, err := loadSchema(string(rawSchema)) + if err != nil { + return iter(ast.BooleanTerm(false)) + } + query, err := parseQuery(string(rawQuery)) + if err != nil { + return iter(ast.BooleanTerm(false)) + } + + // Validate the query against the schema, erroring if there's an issue. + if err := validateQuery(schema, query); err != nil { + return iter(ast.BooleanTerm(false)) + } + + // If we got this far, the GraphQL query passed validation. + return iter(ast.BooleanTerm(true)) +} + +func init() { + RegisterBuiltinFunc(ast.GraphQLParse.Name, builtinGraphQLParse) + RegisterBuiltinFunc(ast.GraphQLParseAndVerify.Name, builtinGraphQLParseAndVerify) + RegisterBuiltinFunc(ast.GraphQLParseQuery.Name, builtinGraphQLParseQuery) + RegisterBuiltinFunc(ast.GraphQLParseSchema.Name, builtinGraphQLParseSchema) + RegisterBuiltinFunc(ast.GraphQLIsValid.Name, builtinGraphQLIsValid) +} diff --git a/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go b/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go index 6e58d2327..abafdf068 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_bytes.go @@ -35,14 +35,14 @@ func parseNumBytesError(msg string) error { return fmt.Errorf("%s error: %s", ast.UnitsParseBytes.Name, msg) } -func errUnitNotRecognized(unit string) error { +func errBytesUnitNotRecognized(unit string) error { return parseNumBytesError(fmt.Sprintf("byte unit %s not recognized", unit)) } var ( - errNoAmount = parseNumBytesError("no byte amount provided") - errNumConv = parseNumBytesError("could not parse byte amount to a number") - errIncludesSpaces = parseNumBytesError("spaces not allowed in resource strings") + errBytesValueNoAmount = parseNumBytesError("no byte amount provided") + errBytesValueNumConv = parseNumBytesError("could not parse byte amount to a number") + errBytesValueIncludesSpaces = parseNumBytesError("spaces not allowed in resource strings") ) func builtinNumBytes(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -56,12 +56,12 @@ func builtinNumBytes(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.T s := formatString(raw) if strings.Contains(s, " ") { - return errIncludesSpaces + return errBytesValueIncludesSpaces } num, unit := extractNumAndUnit(s) if num == "" { - return errNoAmount + return errBytesValueNoAmount } switch unit { @@ -92,12 +92,12 @@ func builtinNumBytes(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.T case "eib", "ei": m.SetUint64(ei) default: - return errUnitNotRecognized(unit) + return errBytesUnitNotRecognized(unit) } numFloat, ok := new(big.Float).SetString(num) if !ok { - return errNumConv + return errBytesValueNumConv } var total big.Int diff --git a/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_units.go b/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_units.go new file mode 100644 index 000000000..800ee3001 --- /dev/null +++ b/constraint/vendor/github.com/open-policy-agent/opa/topdown/parse_units.go @@ -0,0 +1,115 @@ +// Copyright 2022 The OPA Authors. All rights reserved. +// Use of this source code is governed by an Apache2 +// license that can be found in the LICENSE file. + +package topdown + +import ( + "fmt" + "math/big" + "strings" + + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/topdown/builtins" +) + +// Binary Si unit constants are borrowed from topdown/parse_bytes +const milli float64 = 0.001 +const ( + k uint64 = 1000 + m = k * 1000 + g = m * 1000 + t = g * 1000 + p = t * 1000 + e = p * 1000 +) + +func parseUnitsError(msg string) error { + return fmt.Errorf("%s error: %s", ast.UnitsParse.Name, msg) +} + +func errUnitNotRecognized(unit string) error { + return parseUnitsError(fmt.Sprintf("unit %s not recognized", unit)) +} + +var ( + errNoAmount = parseUnitsError("no amount provided") + errNumConv = parseUnitsError("could not parse amount to a number") + errIncludesSpaces = parseUnitsError("spaces not allowed in resource strings") +) + +// Accepts both normal SI and binary SI units. +func builtinUnits(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { + var x big.Float + + raw, err := builtins.StringOperand(operands[0].Value, 1) + if err != nil { + return err + } + + // We remove escaped quotes from strings here to retain parity with units.parse_bytes. + s := string(raw) + s = strings.Replace(s, "\"", "", -1) + + if strings.Contains(s, " ") { + return errIncludesSpaces + } + + num, unit := extractNumAndUnit(s) + if num == "" { + return errNoAmount + + } + + // Unlike in units.parse_bytes, we only lowercase after the first letter, + // so that we can distinguish between 'm' and 'M'. + if len(unit) > 1 { + lower := strings.ToLower(unit[1:]) + unit = unit[:1] + lower + } + + switch unit { + case "m": + x.SetFloat64(milli) + case "": + x.SetUint64(none) + case "k", "K": + x.SetUint64(k) + case "ki", "Ki": + x.SetUint64(ki) + case "M": + x.SetUint64(m) + case "mi", "Mi": + x.SetUint64(mi) + case "g", "G": + x.SetUint64(g) + case "gi", "Gi": + x.SetUint64(gi) + case "t", "T": + x.SetUint64(t) + case "ti", "Ti": + x.SetUint64(ti) + case "p", "P": + x.SetUint64(p) + case "pi", "Pi": + x.SetUint64(pi) + case "e", "E": + x.SetUint64(e) + case "ei", "Ei": + x.SetUint64(ei) + default: + return errUnitNotRecognized(unit) + } + + numFloat, ok := new(big.Float).SetString(num) + if !ok { + return errNumConv + } + + numFloat.Mul(numFloat, &x) + return iter(ast.NewTerm(builtins.FloatToNumber(numFloat))) +} + +func init() { + RegisterBuiltinFunc(ast.UnitsParse.Name, builtinUnits) +} diff --git a/constraint/vendor/github.com/open-policy-agent/opa/topdown/tokens.go b/constraint/vendor/github.com/open-policy-agent/opa/topdown/tokens.go index 64828831f..c23fa1b25 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/topdown/tokens.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/topdown/tokens.go @@ -15,14 +15,14 @@ import ( "encoding/hex" "encoding/json" "encoding/pem" + "errors" "fmt" "hash" "math/big" "strings" - "github.com/pkg/errors" - "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/open-policy-agent/opa/internal/jwx/jwk" "github.com/open-policy-agent/opa/internal/jwx/jws" "github.com/open-policy-agent/opa/topdown/builtins" @@ -269,9 +269,16 @@ func verifyES(publicKey interface{}, digest []byte, signature []byte) error { return fmt.Errorf("ECDSA signature verification error") } -// getKeyFromCertOrJWK returns the public key found in a X.509 certificate or JWK key(s). +type verificationKey struct { + alg string + kid string + key interface{} +} + +// getKeysFromCertOrJWK returns the public key found in a X.509 certificate or JWK key(s). // A valid PEM block is never valid JSON (and vice versa), hence can try parsing both. -func getKeyFromCertOrJWK(certificate string) ([]interface{}, error) { +// When provided a JWKS, each key additionally likely contains a key ID and the key algorithm. +func getKeysFromCertOrJWK(certificate string) ([]verificationKey, error) { if block, rest := pem.Decode([]byte(certificate)); block != nil { if len(rest) > 0 { return nil, fmt.Errorf("extra data after a PEM certificate block") @@ -280,19 +287,18 @@ func getKeyFromCertOrJWK(certificate string) ([]interface{}, error) { if block.Type == blockTypeCertificate { cert, err := x509.ParseCertificate(block.Bytes) if err != nil { - return nil, errors.Wrap(err, "failed to parse a PEM certificate") + return nil, fmt.Errorf("failed to parse a PEM certificate: %w", err) } - - return []interface{}{cert.PublicKey}, nil + return []verificationKey{{key: cert.PublicKey}}, nil } if block.Type == "PUBLIC KEY" { key, err := x509.ParsePKIXPublicKey(block.Bytes) if err != nil { - return nil, errors.Wrap(err, "failed to parse a PEM public key") + return nil, fmt.Errorf("failed to parse a PEM public key: %w", err) } - return []interface{}{key}, nil + return []verificationKey{{key: key}}, nil } return nil, fmt.Errorf("failed to extract a Key from the PEM certificate") @@ -300,21 +306,34 @@ func getKeyFromCertOrJWK(certificate string) ([]interface{}, error) { jwks, err := jwk.ParseString(certificate) if err != nil { - return nil, errors.Wrap(err, "failed to parse a JWK key (set)") + return nil, fmt.Errorf("failed to parse a JWK key (set): %w", err) } - var keys []interface{} + var keys []verificationKey for _, k := range jwks.Keys { key, err := k.Materialize() if err != nil { return nil, err } - keys = append(keys, key) + keys = append(keys, verificationKey{ + alg: k.GetAlgorithm().String(), + kid: k.GetKeyID(), + key: key, + }) } return keys, nil } +func getKeyByKid(kid string, keys []verificationKey) *verificationKey { + for _, key := range keys { + if key.kid == kid { + return &key + } + } + return nil +} + // Implements JWT signature verification. func builtinJWTVerify(a ast.Value, b ast.Value, hasher func() hash.Hash, verify func(publicKey interface{}, digest []byte, signature []byte) error) (ast.Value, error) { token, err := decodeJWT(a) @@ -327,7 +346,7 @@ func builtinJWTVerify(a ast.Value, b ast.Value, hasher func() hash.Hash, verify return nil, err } - keys, err := getKeyFromCertOrJWK(string(s)) + keys, err := getKeysFromCertOrJWK(string(s)) if err != nil { return nil, err } @@ -337,14 +356,45 @@ func builtinJWTVerify(a ast.Value, b ast.Value, hasher func() hash.Hash, verify return nil, err } + err = token.decodeHeader() + if err != nil { + return nil, err + } + header, err := parseTokenHeader(token) + if err != nil { + return nil, err + } + // Validate the JWT signature - for _, key := range keys { - err = verify(key, - getInputSHA([]byte(token.header+"."+token.payload), hasher), - []byte(signature)) - if err == nil { - return ast.Boolean(true), nil + // First, check if there's a matching key ID (`kid`) in both token header and key(s). + // If a match is found, verify using only that key. Only applicable when a JWKS was provided. + if header.kid != "" { + if key := getKeyByKid(header.kid, keys); key != nil { + err = verify(key.key, getInputSHA([]byte(token.header+"."+token.payload), hasher), []byte(signature)) + + return ast.Boolean(err == nil), nil + } + } + + // If no key ID matched, try to verify using any key in the set + // If an alg is present in both the JWT header and the key, skip verification unless they match + for _, key := range keys { + if key.alg == "" { + // No algorithm provided for the key - this is likely a certificate and not a JWKS, so + // we'll need to verify to find out + err = verify(key.key, getInputSHA([]byte(token.header+"."+token.payload), hasher), []byte(signature)) + if err == nil { + return ast.Boolean(true), nil + } + } else { + if header.alg != key.alg { + continue + } + err = verify(key.key, getInputSHA([]byte(token.header+"."+token.payload), hasher), []byte(signature)) + if err == nil { + return ast.Boolean(true), nil + } } } @@ -446,7 +496,7 @@ func builtinJWTVerifyHS512(bctx BuiltinContext, args []*ast.Term, iter func(*ast // tokenConstraints holds decoded JWT verification constraints. type tokenConstraints struct { // The set of asymmetric keys we can verify with. - keys []interface{} + keys []verificationKey // The single symmetric key we will verify with. secret string @@ -496,10 +546,11 @@ func tokenConstraintCert(value ast.Value, constraints *tokenConstraints) error { return fmt.Errorf("cert constraint: must be a string") } - keys, err := getKeyFromCertOrJWK(string(s)) + keys, err := getKeysFromCertOrJWK(string(s)) if err != nil { return err } + constraints.keys = keys return nil } @@ -596,14 +647,36 @@ func (constraints *tokenConstraints) verify(kid, alg, header, payload, signature } // If we're configured with asymmetric key(s) then only trust that if constraints.keys != nil { + if kid != "" { + if key := getKeyByKid(kid, constraints.keys); key != nil { + err := a.verify(key.key, a.hash, plaintext, []byte(signature)) + if err != nil { + return errSignatureNotVerified + } + return nil + } + } + verified := false for _, key := range constraints.keys { - err := a.verify(key, a.hash, plaintext, []byte(signature)) - if err == nil { - verified = true - break + if key.alg == "" { + err := a.verify(key.key, a.hash, plaintext, []byte(signature)) + if err == nil { + verified = true + break + } + } else { + if alg != key.alg { + continue + } + err := a.verify(key.key, a.hash, plaintext, []byte(signature)) + if err == nil { + verified = true + break + } } } + if !verified { return errSignatureNotVerified } @@ -844,6 +917,9 @@ func commonBuiltinJWTEncodeSign(bctx BuiltinContext, inputHeaders, jwsPayload, j return err } alg := standardHeaders.GetAlgorithm() + if alg == jwa.Unsupported { + return fmt.Errorf("unknown signature algorithm") + } if (standardHeaders.Type == "" || standardHeaders.Type == headerJwt) && !json.Valid([]byte(jwsPayload)) { return fmt.Errorf("type is JWT but payload is not JSON") diff --git a/constraint/vendor/github.com/open-policy-agent/opa/types/types.go b/constraint/vendor/github.com/open-policy-agent/opa/types/types.go index 9b5cc66c4..cd7138245 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/types/types.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/types/types.go @@ -48,6 +48,42 @@ func NewNull() Null { return Null{} } +type NamedType struct { + Name, Descr string + Type Type +} + +func (n *NamedType) typeMarker() string { return n.Type.typeMarker() } +func (n *NamedType) String() string { return n.Name + ": " + n.Type.String() } +func (n *NamedType) MarshalJSON() ([]byte, error) { + var obj map[string]interface{} + switch x := n.Type.(type) { + case interface{ toMap() map[string]interface{} }: + obj = x.toMap() + default: + obj = map[string]interface{}{ + "type": n.Type.typeMarker(), + } + } + obj["name"] = n.Name + if n.Descr != "" { + obj["description"] = n.Descr + } + return json.Marshal(obj) +} + +func (n *NamedType) Description(d string) *NamedType { + n.Descr = d + return n +} + +func Named(name string, t Type) *NamedType { + return &NamedType{ + Type: t, + Name: name, + } +} + // MarshalJSON returns the JSON encoding of t. func (t Null) MarshalJSON() ([]byte, error) { return json.Marshal(map[string]interface{}{ @@ -55,6 +91,15 @@ func (t Null) MarshalJSON() ([]byte, error) { }) } +func unwrap(t Type) Type { + switch t := t.(type) { + case *NamedType: + return t.Type + default: + return t + } +} + func (t Null) String() string { return typeNull } @@ -100,7 +145,7 @@ func (t String) MarshalJSON() ([]byte, error) { }) } -func (t String) String() string { +func (String) String() string { return typeString } @@ -142,6 +187,10 @@ func NewArray(static []Type, dynamic Type) *Array { // MarshalJSON returns the JSON encoding of t. func (t *Array) MarshalJSON() ([]byte, error) { + return json.Marshal(t.toMap()) +} + +func (t *Array) toMap() map[string]interface{} { repr := map[string]interface{}{ "type": t.typeMarker(), } @@ -151,7 +200,7 @@ func (t *Array) MarshalJSON() ([]byte, error) { if t.dynamic != nil { repr["dynamic"] = t.dynamic } - return json.Marshal(repr) + return repr } func (t *Array) String() string { @@ -207,13 +256,17 @@ func NewSet(of Type) *Set { // MarshalJSON returns the JSON encoding of t. func (t *Set) MarshalJSON() ([]byte, error) { + return json.Marshal(t.toMap()) +} + +func (t *Set) toMap() map[string]interface{} { repr := map[string]interface{}{ "type": t.typeMarker(), } if t.of != nil { repr["of"] = t.of } - return json.Marshal(repr) + return repr } func (t *Set) String() string { @@ -332,6 +385,10 @@ func (t *Object) Keys() []interface{} { // MarshalJSON returns the JSON encoding of t. func (t *Object) MarshalJSON() ([]byte, error) { + return json.Marshal(t.toMap()) +} + +func (t *Object) toMap() map[string]interface{} { repr := map[string]interface{}{ "type": t.typeMarker(), } @@ -341,7 +398,7 @@ func (t *Object) MarshalJSON() ([]byte, error) { if t.dynamic != nil { repr["dynamic"] = t.dynamic } - return json.Marshal(repr) + return repr } // Select returns the type of the named property. @@ -395,13 +452,17 @@ func (t Any) Contains(other Type) bool { // MarshalJSON returns the JSON encoding of t. func (t Any) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + return json.Marshal(t.toMap()) +} + +func (t Any) toMap() map[string]interface{} { + repr := map[string]interface{}{ "type": t.typeMarker(), } if len(t) != 0 { - data["of"] = []Type(t) + repr["of"] = []Type(t) } - return json.Marshal(data) + return repr } // Merge return a new Any type that is the superset of t and other. @@ -487,8 +548,7 @@ func Arity(x Type) int { return len(f.FuncArgs().Args) } -// NewFunction returns a new Function object where xs[:len(xs)-1] are arguments -// and xs[len(xs)-1] is the result type. +// NewFunction returns a new Function object of the given argument and result types. func NewFunction(args []Type, result Type) *Function { return &Function{ args: args, @@ -512,19 +572,34 @@ func NewVariadicFunction(args []Type, varargs Type, result Type) *Function { // FuncArgs returns the function's arguments. func (t *Function) FuncArgs() FuncArgs { - return FuncArgs{Args: t.Args(), Variadic: t.variadic} + return FuncArgs{Args: t.Args(), Variadic: unwrap(t.variadic)} +} + +// NamedFuncArgs returns the function's arguments, with a name and +// description if available. +func (t *Function) NamedFuncArgs() FuncArgs { + args := make([]Type, len(t.args)) + copy(args, t.args) + return FuncArgs{Args: args, Variadic: t.variadic} } // Args returns the function's arguments as a slice, ignoring variadic arguments. // Deprecated: Use FuncArgs instead. func (t *Function) Args() []Type { cpy := make([]Type, len(t.args)) - copy(cpy, t.args) + for i := range t.args { + cpy[i] = unwrap(t.args[i]) + } return cpy } // Result returns the function's result type. func (t *Function) Result() Type { + return unwrap(t.result) +} + +// Result returns the function's result type, without stripping name and description. +func (t *Function) NamedResult() Type { return t.result } @@ -566,12 +641,13 @@ func (t *Function) UnmarshalJSON(bs []byte) error { return nil } -// Union returns a new function represnting the union of t and other. Functions +// Union returns a new function representing the union of t and other. Functions // must have the same arity to be unioned. func (t *Function) Union(other *Function) *Function { if other == nil { return t - } else if t == nil { + } + if t == nil { return other } @@ -618,6 +694,7 @@ func (a FuncArgs) String() string { return "(" + strings.Join(buf, ", ") + ")" } +// Arg returns the nth argument's type. func (a FuncArgs) Arg(x int) Type { if x < len(a.Args) { return a.Args[x] @@ -627,6 +704,7 @@ func (a FuncArgs) Arg(x int) Type { // Compare returns -1, 0, 1 based on comparison between a and b. func Compare(a, b Type) int { + a, b = unwrap(a), unwrap(b) x := typeOrder(a) y := typeOrder(b) if x > y { @@ -731,7 +809,7 @@ func Compare(a, b Type) int { // Contains returns true if a is a superset or equal to b. func Contains(a, b Type) bool { - if any, ok := a.(Any); ok { + if any, ok := unwrap(a).(Any); ok { return any.Contains(b) } return Compare(a, b) == 0 @@ -740,6 +818,7 @@ func Contains(a, b Type) bool { // Or returns a type that represents the union of a and b. If one type is a // superset of the other, the superset is returned unchanged. func Or(a, b Type) Type { + a, b = unwrap(a), unwrap(b) if a == nil { return b } else if b == nil { @@ -768,7 +847,7 @@ func Or(a, b Type) Type { // Select returns a property or item of a. func Select(a Type, x interface{}) Type { - switch a := a.(type) { + switch a := unwrap(a).(type) { case *Array: n, ok := x.(json.Number) if !ok { @@ -811,7 +890,7 @@ func Select(a Type, x interface{}) Type { // keys are always number types, for objects the keys are always string types, // and for sets the keys are always the type of the set element. func Keys(a Type) Type { - switch a := a.(type) { + switch a := unwrap(a).(type) { case *Array: return N case *Object: @@ -841,7 +920,7 @@ func Keys(a Type) Type { // Values returns the type of values that can be enumerated for a. func Values(a Type) Type { - switch a := a.(type) { + switch a := unwrap(a).(type) { case *Array: var tpe Type for i := range a.static { @@ -874,7 +953,7 @@ func Values(a Type) Type { // Nil returns true if a's type is unknown. func Nil(a Type) bool { - switch a := a.(type) { + switch a := unwrap(a).(type) { case nil: return true case *Function: @@ -969,7 +1048,7 @@ func typeSliceCompare(a, b []Type) int { } func typeOrder(x Type) int { - switch x.(type) { + switch unwrap(x).(type) { case Null: return 0 case Boolean: diff --git a/constraint/vendor/github.com/open-policy-agent/opa/util/json.go b/constraint/vendor/github.com/open-policy-agent/opa/util/json.go index cfc94e064..283c49697 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/util/json.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/util/json.go @@ -105,6 +105,9 @@ func Reference(x interface{}) *interface{} { // Unmarshal decodes a YAML or JSON value into the specified type. func Unmarshal(bs []byte, v interface{}) error { + if json.Valid(bs) { + return UnmarshalJSON(bs, v) + } bs, err := yaml.YAMLToJSON(bs) if err != nil { return err diff --git a/constraint/vendor/github.com/open-policy-agent/opa/version/version.go b/constraint/vendor/github.com/open-policy-agent/opa/version/version.go index 4ec44a279..9d6ad92d9 100644 --- a/constraint/vendor/github.com/open-policy-agent/opa/version/version.go +++ b/constraint/vendor/github.com/open-policy-agent/opa/version/version.go @@ -10,7 +10,7 @@ import ( ) // Version is the canonical version of OPA. -var Version = "0.40.0" +var Version = "0.41.0" // GoVersion is the version of Go this was built with var GoVersion = runtime.Version() diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/.gitignore b/constraint/vendor/github.com/vektah/gqlparser/v2/.gitignore new file mode 100644 index 000000000..877392a76 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/.gitignore @@ -0,0 +1,5 @@ +/vendor +/validator/imported/node_modules +/validator/imported/graphql-js + +.idea/ diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/LICENSE b/constraint/vendor/github.com/vektah/gqlparser/v2/LICENSE new file mode 100644 index 000000000..1221b9d38 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2018 Adam Scarr + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/argmap.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/argmap.go new file mode 100644 index 000000000..43f6a3d6f --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/argmap.go @@ -0,0 +1,37 @@ +package ast + +func arg2map(defs ArgumentDefinitionList, args ArgumentList, vars map[string]interface{}) map[string]interface{} { + result := map[string]interface{}{} + var err error + + for _, argDef := range defs { + var val interface{} + var hasValue bool + + if argValue := args.ForName(argDef.Name); argValue != nil { + if argValue.Value.Kind == Variable { + val, hasValue = vars[argValue.Value.Raw] + } else { + val, err = argValue.Value.Value(vars) + if err != nil { + panic(err) + } + hasValue = true + } + } + + if !hasValue && argDef.DefaultValue != nil { + val, err = argDef.DefaultValue.Value(vars) + if err != nil { + panic(err) + } + hasValue = true + } + + if hasValue { + result[argDef.Name] = val + } + } + + return result +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/collections.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/collections.go new file mode 100644 index 000000000..94b800ee2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/collections.go @@ -0,0 +1,148 @@ +package ast + +type FieldList []*FieldDefinition + +func (l FieldList) ForName(name string) *FieldDefinition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type EnumValueList []*EnumValueDefinition + +func (l EnumValueList) ForName(name string) *EnumValueDefinition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type DirectiveList []*Directive + +func (l DirectiveList) ForName(name string) *Directive { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +func (l DirectiveList) ForNames(name string) []*Directive { + resp := []*Directive{} + for _, it := range l { + if it.Name == name { + resp = append(resp, it) + } + } + return resp +} + +type OperationList []*OperationDefinition + +func (l OperationList) ForName(name string) *OperationDefinition { + if name == "" && len(l) == 1 { + return l[0] + } + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type FragmentDefinitionList []*FragmentDefinition + +func (l FragmentDefinitionList) ForName(name string) *FragmentDefinition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type VariableDefinitionList []*VariableDefinition + +func (l VariableDefinitionList) ForName(name string) *VariableDefinition { + for _, it := range l { + if it.Variable == name { + return it + } + } + return nil +} + +type ArgumentList []*Argument + +func (l ArgumentList) ForName(name string) *Argument { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type ArgumentDefinitionList []*ArgumentDefinition + +func (l ArgumentDefinitionList) ForName(name string) *ArgumentDefinition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type SchemaDefinitionList []*SchemaDefinition + +type DirectiveDefinitionList []*DirectiveDefinition + +func (l DirectiveDefinitionList) ForName(name string) *DirectiveDefinition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type DefinitionList []*Definition + +func (l DefinitionList) ForName(name string) *Definition { + for _, it := range l { + if it.Name == name { + return it + } + } + return nil +} + +type OperationTypeDefinitionList []*OperationTypeDefinition + +func (l OperationTypeDefinitionList) ForType(name string) *OperationTypeDefinition { + for _, it := range l { + if it.Type == name { + return it + } + } + return nil +} + +type ChildValueList []*ChildValue + +func (v ChildValueList) ForName(name string) *Value { + for _, f := range v { + if f.Name == name { + return f.Value + } + } + return nil +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/definition.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/definition.go new file mode 100644 index 000000000..d20390816 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/definition.go @@ -0,0 +1,94 @@ +package ast + +type DefinitionKind string + +const ( + Scalar DefinitionKind = "SCALAR" + Object DefinitionKind = "OBJECT" + Interface DefinitionKind = "INTERFACE" + Union DefinitionKind = "UNION" + Enum DefinitionKind = "ENUM" + InputObject DefinitionKind = "INPUT_OBJECT" +) + +// Definition is the core type definition object, it includes all of the definable types +// but does *not* cover schema or directives. +// +// @vektah: Javascript implementation has different types for all of these, but they are +// more similar than different and don't define any behaviour. I think this style of +// "some hot" struct works better, at least for go. +// +// Type extensions are also represented by this same struct. +type Definition struct { + Kind DefinitionKind + Description string + Name string + Directives DirectiveList + Interfaces []string // object and input object + Fields FieldList // object and input object + Types []string // union + EnumValues EnumValueList // enum + + Position *Position `dump:"-"` + BuiltIn bool `dump:"-"` +} + +func (d *Definition) IsLeafType() bool { + return d.Kind == Enum || d.Kind == Scalar +} + +func (d *Definition) IsAbstractType() bool { + return d.Kind == Interface || d.Kind == Union +} + +func (d *Definition) IsCompositeType() bool { + return d.Kind == Object || d.Kind == Interface || d.Kind == Union +} + +func (d *Definition) IsInputType() bool { + return d.Kind == Scalar || d.Kind == Enum || d.Kind == InputObject +} + +func (d *Definition) OneOf(types ...string) bool { + for _, t := range types { + if d.Name == t { + return true + } + } + return false +} + +type FieldDefinition struct { + Description string + Name string + Arguments ArgumentDefinitionList // only for objects + DefaultValue *Value // only for input objects + Type *Type + Directives DirectiveList + Position *Position `dump:"-"` +} + +type ArgumentDefinition struct { + Description string + Name string + DefaultValue *Value + Type *Type + Directives DirectiveList + Position *Position `dump:"-"` +} + +type EnumValueDefinition struct { + Description string + Name string + Directives DirectiveList + Position *Position `dump:"-"` +} + +type DirectiveDefinition struct { + Description string + Name string + Arguments ArgumentDefinitionList + Locations []DirectiveLocation + IsRepeatable bool + Position *Position `dump:"-"` +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/directive.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/directive.go new file mode 100644 index 000000000..5f6e8531f --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/directive.go @@ -0,0 +1,43 @@ +package ast + +type DirectiveLocation string + +const ( + // Executable + LocationQuery DirectiveLocation = `QUERY` + LocationMutation DirectiveLocation = `MUTATION` + LocationSubscription DirectiveLocation = `SUBSCRIPTION` + LocationField DirectiveLocation = `FIELD` + LocationFragmentDefinition DirectiveLocation = `FRAGMENT_DEFINITION` + LocationFragmentSpread DirectiveLocation = `FRAGMENT_SPREAD` + LocationInlineFragment DirectiveLocation = `INLINE_FRAGMENT` + + // Type System + LocationSchema DirectiveLocation = `SCHEMA` + LocationScalar DirectiveLocation = `SCALAR` + LocationObject DirectiveLocation = `OBJECT` + LocationFieldDefinition DirectiveLocation = `FIELD_DEFINITION` + LocationArgumentDefinition DirectiveLocation = `ARGUMENT_DEFINITION` + LocationInterface DirectiveLocation = `INTERFACE` + LocationUnion DirectiveLocation = `UNION` + LocationEnum DirectiveLocation = `ENUM` + LocationEnumValue DirectiveLocation = `ENUM_VALUE` + LocationInputObject DirectiveLocation = `INPUT_OBJECT` + LocationInputFieldDefinition DirectiveLocation = `INPUT_FIELD_DEFINITION` + LocationVariableDefinition DirectiveLocation = `VARIABLE_DEFINITION` +) + +type Directive struct { + Name string + Arguments ArgumentList + Position *Position `dump:"-"` + + // Requires validation + ParentDefinition *Definition + Definition *DirectiveDefinition + Location DirectiveLocation +} + +func (d *Directive) ArgumentMap(vars map[string]interface{}) map[string]interface{} { + return arg2map(d.Definition.Arguments, d.Arguments, vars) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/document.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/document.go new file mode 100644 index 000000000..43bfb54ff --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/document.go @@ -0,0 +1,79 @@ +package ast + +type QueryDocument struct { + Operations OperationList + Fragments FragmentDefinitionList + Position *Position `dump:"-"` +} + +type SchemaDocument struct { + Schema SchemaDefinitionList + SchemaExtension SchemaDefinitionList + Directives DirectiveDefinitionList + Definitions DefinitionList + Extensions DefinitionList + Position *Position `dump:"-"` +} + +func (d *SchemaDocument) Merge(other *SchemaDocument) { + d.Schema = append(d.Schema, other.Schema...) + d.SchemaExtension = append(d.SchemaExtension, other.SchemaExtension...) + d.Directives = append(d.Directives, other.Directives...) + d.Definitions = append(d.Definitions, other.Definitions...) + d.Extensions = append(d.Extensions, other.Extensions...) +} + +type Schema struct { + Query *Definition + Mutation *Definition + Subscription *Definition + + Types map[string]*Definition + Directives map[string]*DirectiveDefinition + + PossibleTypes map[string][]*Definition + Implements map[string][]*Definition + + Description string +} + +// AddTypes is the helper to add types definition to the schema +func (s *Schema) AddTypes(defs ...*Definition) { + if s.Types == nil { + s.Types = make(map[string]*Definition) + } + for _, def := range defs { + s.Types[def.Name] = def + } +} + +func (s *Schema) AddPossibleType(name string, def *Definition) { + s.PossibleTypes[name] = append(s.PossibleTypes[name], def) +} + +// GetPossibleTypes will enumerate all the definitions for a given interface or union +func (s *Schema) GetPossibleTypes(def *Definition) []*Definition { + return s.PossibleTypes[def.Name] +} + +func (s *Schema) AddImplements(name string, iface *Definition) { + s.Implements[name] = append(s.Implements[name], iface) +} + +// GetImplements returns all the interface and union definitions that the given definition satisfies +func (s *Schema) GetImplements(def *Definition) []*Definition { + return s.Implements[def.Name] +} + +type SchemaDefinition struct { + Description string + Directives DirectiveList + OperationTypes OperationTypeDefinitionList + Position *Position `dump:"-"` +} + +type OperationTypeDefinition struct { + Operation Operation + Type string + Position *Position `dump:"-"` +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go new file mode 100644 index 000000000..dbb7a7efa --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go @@ -0,0 +1,159 @@ +package ast + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" +) + +// Dump turns ast into a stable string format for assertions in tests +func Dump(i interface{}) string { + v := reflect.ValueOf(i) + + d := dumper{Buffer: &bytes.Buffer{}} + d.dump(v) + + return d.String() +} + +type dumper struct { + *bytes.Buffer + indent int +} + +type Dumpable interface { + Dump() string +} + +func (d *dumper) dump(v reflect.Value) { + if dumpable, isDumpable := v.Interface().(Dumpable); isDumpable { + d.WriteString(dumpable.Dump()) + return + } + switch v.Kind() { + case reflect.Bool: + if v.Bool() { + d.WriteString("true") + } else { + d.WriteString("false") + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + d.WriteString(fmt.Sprintf("%d", v.Int())) + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + d.WriteString(fmt.Sprintf("%d", v.Uint())) + + case reflect.Float32, reflect.Float64: + d.WriteString(fmt.Sprintf("%.2f", v.Float())) + + case reflect.String: + if v.Type().Name() != "string" { + d.WriteString(v.Type().Name() + "(" + strconv.Quote(v.String()) + ")") + } else { + d.WriteString(strconv.Quote(v.String())) + } + + case reflect.Array, reflect.Slice: + d.dumpArray(v) + + case reflect.Interface, reflect.Ptr: + d.dumpPtr(v) + + case reflect.Struct: + d.dumpStruct(v) + + default: + panic(fmt.Errorf("unsupported kind: %s\n buf: %s", v.Kind().String(), d.String())) + } +} + +func (d *dumper) writeIndent() { + d.Buffer.WriteString(strings.Repeat(" ", d.indent)) +} + +func (d *dumper) nl() { + d.Buffer.WriteByte('\n') + d.writeIndent() +} + +func typeName(t reflect.Type) string { + if t.Kind() == reflect.Ptr { + return typeName(t.Elem()) + } + return t.Name() +} + +func (d *dumper) dumpArray(v reflect.Value) { + d.WriteString("[" + typeName(v.Type().Elem()) + "]") + + for i := 0; i < v.Len(); i++ { + d.nl() + d.WriteString("- ") + d.indent++ + d.dump(v.Index(i)) + d.indent-- + } +} + +func (d *dumper) dumpStruct(v reflect.Value) { + d.WriteString("<" + v.Type().Name() + ">") + d.indent++ + + typ := v.Type() + for i := 0; i < v.NumField(); i++ { + f := v.Field(i) + if typ.Field(i).Tag.Get("dump") == "-" { + continue + } + + if isZero(f) { + continue + } + d.nl() + d.WriteString(typ.Field(i).Name) + d.WriteString(": ") + d.dump(v.Field(i)) + } + + d.indent-- +} + +func isZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Ptr, reflect.Interface: + return v.IsNil() + case reflect.Func, reflect.Map: + return v.IsNil() + + case reflect.Array, reflect.Slice: + if v.IsNil() { + return true + } + z := true + for i := 0; i < v.Len(); i++ { + z = z && isZero(v.Index(i)) + } + return z + case reflect.Struct: + z := true + for i := 0; i < v.NumField(); i++ { + z = z && isZero(v.Field(i)) + } + return z + case reflect.String: + return v.String() == "" + } + + // Compare other types directly: + return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type())) +} + +func (d *dumper) dumpPtr(v reflect.Value) { + if v.IsNil() { + d.WriteString("nil") + return + } + d.dump(v.Elem()) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go new file mode 100644 index 000000000..57ab56c7c --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go @@ -0,0 +1,38 @@ +package ast + +type FragmentSpread struct { + Name string + Directives DirectiveList + + // Require validation + ObjectDefinition *Definition + Definition *FragmentDefinition + + Position *Position `dump:"-"` +} + +type InlineFragment struct { + TypeCondition string + Directives DirectiveList + SelectionSet SelectionSet + + // Require validation + ObjectDefinition *Definition + + Position *Position `dump:"-"` +} + +type FragmentDefinition struct { + Name string + // Note: fragment variable definitions are experimental and may be changed + // or removed in the future. + VariableDefinition VariableDefinitionList + TypeCondition string + Directives DirectiveList + SelectionSet SelectionSet + + // Require validation + Definition *Definition + + Position *Position `dump:"-"` +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/operation.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/operation.go new file mode 100644 index 000000000..3b37f81bf --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/operation.go @@ -0,0 +1,30 @@ +package ast + +type Operation string + +const ( + Query Operation = "query" + Mutation Operation = "mutation" + Subscription Operation = "subscription" +) + +type OperationDefinition struct { + Operation Operation + Name string + VariableDefinitions VariableDefinitionList + Directives DirectiveList + SelectionSet SelectionSet + Position *Position `dump:"-"` +} + +type VariableDefinition struct { + Variable string + Type *Type + DefaultValue *Value + Directives DirectiveList + Position *Position `dump:"-"` + + // Requires validation + Definition *Definition + Used bool `dump:"-"` +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/path.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/path.go new file mode 100644 index 000000000..9af168438 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/path.go @@ -0,0 +1,67 @@ +package ast + +import ( + "bytes" + "encoding/json" + "fmt" +) + +var _ json.Unmarshaler = (*Path)(nil) + +type Path []PathElement + +type PathElement interface { + isPathElement() +} + +var _ PathElement = PathIndex(0) +var _ PathElement = PathName("") + +func (path Path) String() string { + var str bytes.Buffer + for i, v := range path { + switch v := v.(type) { + case PathIndex: + str.WriteString(fmt.Sprintf("[%d]", v)) + case PathName: + if i != 0 { + str.WriteByte('.') + } + str.WriteString(string(v)) + default: + panic(fmt.Sprintf("unknown type: %T", v)) + } + } + return str.String() +} + +func (path *Path) UnmarshalJSON(b []byte) error { + var vs []interface{} + err := json.Unmarshal(b, &vs) + if err != nil { + return err + } + + *path = make([]PathElement, 0, len(vs)) + for _, v := range vs { + switch v := v.(type) { + case string: + *path = append(*path, PathName(v)) + case int: + *path = append(*path, PathIndex(v)) + case float64: + *path = append(*path, PathIndex(int(v))) + default: + return fmt.Errorf("unknown path element type: %T", v) + } + } + return nil +} + +type PathIndex int + +func (_ PathIndex) isPathElement() {} + +type PathName string + +func (_ PathName) isPathElement() {} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/selection.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/selection.go new file mode 100644 index 000000000..159db8447 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/selection.go @@ -0,0 +1,39 @@ +package ast + +type SelectionSet []Selection + +type Selection interface { + isSelection() + GetPosition() *Position +} + +func (*Field) isSelection() {} +func (*FragmentSpread) isSelection() {} +func (*InlineFragment) isSelection() {} + +func (s *Field) GetPosition() *Position { return s.Position } +func (s *FragmentSpread) GetPosition() *Position { return s.Position } +func (s *InlineFragment) GetPosition() *Position { return s.Position } + +type Field struct { + Alias string + Name string + Arguments ArgumentList + Directives DirectiveList + SelectionSet SelectionSet + Position *Position `dump:"-"` + + // Require validation + Definition *FieldDefinition + ObjectDefinition *Definition +} + +type Argument struct { + Name string + Value *Value + Position *Position `dump:"-"` +} + +func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} { + return arg2map(f.Definition.Arguments, f.Arguments, vars) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/source.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/source.go new file mode 100644 index 000000000..2949f83f7 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/source.go @@ -0,0 +1,19 @@ +package ast + +// Source covers a single *.graphql file +type Source struct { + // Name is the filename of the source + Name string + // Input is the actual contents of the source file + Input string + // BuiltIn indicate whether the source is a part of the specification + BuiltIn bool +} + +type Position struct { + Start int // The starting position, in runes, of this token in the input. + End int // The end position, in runes, of this token in the input. + Line int // The line number at the start of this item. + Column int // The column number at the start of this item. + Src *Source // The source document this token belongs to +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/type.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/type.go new file mode 100644 index 000000000..9577fdb48 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/type.go @@ -0,0 +1,68 @@ +package ast + +func NonNullNamedType(named string, pos *Position) *Type { + return &Type{NamedType: named, NonNull: true, Position: pos} +} + +func NamedType(named string, pos *Position) *Type { + return &Type{NamedType: named, NonNull: false, Position: pos} +} + +func NonNullListType(elem *Type, pos *Position) *Type { + return &Type{Elem: elem, NonNull: true, Position: pos} +} + +func ListType(elem *Type, pos *Position) *Type { + return &Type{Elem: elem, NonNull: false, Position: pos} +} + +type Type struct { + NamedType string + Elem *Type + NonNull bool + Position *Position `dump:"-"` +} + +func (t *Type) Name() string { + if t.NamedType != "" { + return t.NamedType + } + + return t.Elem.Name() +} + +func (t *Type) String() string { + nn := "" + if t.NonNull { + nn = "!" + } + if t.NamedType != "" { + return t.NamedType + nn + } + + return "[" + t.Elem.String() + "]" + nn +} + +func (t *Type) IsCompatible(other *Type) bool { + if t.NamedType != other.NamedType { + return false + } + + if t.Elem != nil && other.Elem == nil { + return false + } + + if t.Elem != nil && !t.Elem.IsCompatible(other.Elem) { + return false + } + + if other.NonNull { + return t.NonNull + } + + return true +} + +func (v *Type) Dump() string { + return v.String() +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/ast/value.go b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/value.go new file mode 100644 index 000000000..c25ef1505 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/ast/value.go @@ -0,0 +1,120 @@ +package ast + +import ( + "fmt" + "strconv" + "strings" +) + +type ValueKind int + +const ( + Variable ValueKind = iota + IntValue + FloatValue + StringValue + BlockValue + BooleanValue + NullValue + EnumValue + ListValue + ObjectValue +) + +type Value struct { + Raw string + Children ChildValueList + Kind ValueKind + Position *Position `dump:"-"` + + // Require validation + Definition *Definition + VariableDefinition *VariableDefinition + ExpectedType *Type +} + +type ChildValue struct { + Name string + Value *Value + Position *Position `dump:"-"` +} + +func (v *Value) Value(vars map[string]interface{}) (interface{}, error) { + if v == nil { + return nil, nil + } + switch v.Kind { + case Variable: + if value, ok := vars[v.Raw]; ok { + return value, nil + } + if v.VariableDefinition != nil && v.VariableDefinition.DefaultValue != nil { + return v.VariableDefinition.DefaultValue.Value(vars) + } + return nil, nil + case IntValue: + return strconv.ParseInt(v.Raw, 10, 64) + case FloatValue: + return strconv.ParseFloat(v.Raw, 64) + case StringValue, BlockValue, EnumValue: + return v.Raw, nil + case BooleanValue: + return strconv.ParseBool(v.Raw) + case NullValue: + return nil, nil + case ListValue: + var val []interface{} + for _, elem := range v.Children { + elemVal, err := elem.Value.Value(vars) + if err != nil { + return val, err + } + val = append(val, elemVal) + } + return val, nil + case ObjectValue: + val := map[string]interface{}{} + for _, elem := range v.Children { + elemVal, err := elem.Value.Value(vars) + if err != nil { + return val, err + } + val[elem.Name] = elemVal + } + return val, nil + default: + panic(fmt.Errorf("unknown value kind %d", v.Kind)) + } +} + +func (v *Value) String() string { + if v == nil { + return "" + } + switch v.Kind { + case Variable: + return "$" + v.Raw + case IntValue, FloatValue, EnumValue, BooleanValue, NullValue: + return v.Raw + case StringValue, BlockValue: + return strconv.Quote(v.Raw) + case ListValue: + var val []string + for _, elem := range v.Children { + val = append(val, elem.Value.String()) + } + return "[" + strings.Join(val, ",") + "]" + case ObjectValue: + var val []string + for _, elem := range v.Children { + val = append(val, elem.Name+":"+elem.Value.String()) + } + return "{" + strings.Join(val, ",") + "}" + default: + panic(fmt.Errorf("unknown value kind %d", v.Kind)) + } +} + +func (v *Value) Dump() string { + return v.String() +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go b/constraint/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go new file mode 100644 index 000000000..8145061a2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go @@ -0,0 +1,145 @@ +package gqlerror + +import ( + "bytes" + "errors" + "fmt" + "strconv" + + "github.com/vektah/gqlparser/v2/ast" +) + +// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors +type Error struct { + err error `json:"-"` + Message string `json:"message"` + Path ast.Path `json:"path,omitempty"` + Locations []Location `json:"locations,omitempty"` + Extensions map[string]interface{} `json:"extensions,omitempty"` + Rule string `json:"-"` +} + +func (err *Error) SetFile(file string) { + if file == "" { + return + } + if err.Extensions == nil { + err.Extensions = map[string]interface{}{} + } + + err.Extensions["file"] = file +} + +type Location struct { + Line int `json:"line,omitempty"` + Column int `json:"column,omitempty"` +} + +type List []*Error + +func (err *Error) Error() string { + var res bytes.Buffer + if err == nil { + return "" + } + filename, _ := err.Extensions["file"].(string) + if filename == "" { + filename = "input" + } + res.WriteString(filename) + + if len(err.Locations) > 0 { + res.WriteByte(':') + res.WriteString(strconv.Itoa(err.Locations[0].Line)) + } + + res.WriteString(": ") + if ps := err.pathString(); ps != "" { + res.WriteString(ps) + res.WriteByte(' ') + } + + res.WriteString(err.Message) + + return res.String() +} + +func (err Error) pathString() string { + return err.Path.String() +} + +func (err Error) Unwrap() error { + return err.err +} + +func (errs List) Error() string { + var buf bytes.Buffer + for _, err := range errs { + buf.WriteString(err.Error()) + buf.WriteByte('\n') + } + return buf.String() +} + +func (errs List) Is(target error) bool { + for _, err := range errs { + if errors.Is(err, target) { + return true + } + } + return false +} + +func (errs List) As(target interface{}) bool { + for _, err := range errs { + if errors.As(err, target) { + return true + } + } + return false +} + +func WrapPath(path ast.Path, err error) *Error { + return &Error{ + err: err, + Message: err.Error(), + Path: path, + } +} + +func Errorf(message string, args ...interface{}) *Error { + return &Error{ + Message: fmt.Sprintf(message, args...), + } +} + +func ErrorPathf(path ast.Path, message string, args ...interface{}) *Error { + return &Error{ + Message: fmt.Sprintf(message, args...), + Path: path, + } +} + +func ErrorPosf(pos *ast.Position, message string, args ...interface{}) *Error { + return ErrorLocf( + pos.Src.Name, + pos.Line, + pos.Column, + message, + args..., + ) +} + +func ErrorLocf(file string, line int, col int, message string, args ...interface{}) *Error { + var extensions map[string]interface{} + if file != "" { + extensions = map[string]interface{}{"file": file} + } + return &Error{ + Message: fmt.Sprintf(message, args...), + Extensions: extensions, + Locations: []Location{ + {Line: line, Column: col}, + }, + } +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/gqlparser.go b/constraint/vendor/github.com/vektah/gqlparser/v2/gqlparser.go new file mode 100644 index 000000000..ace63e14a --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/gqlparser.go @@ -0,0 +1,42 @@ +package gqlparser + +import ( + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/parser" + "github.com/vektah/gqlparser/v2/validator" + _ "github.com/vektah/gqlparser/v2/validator/rules" +) + +func LoadSchema(str ...*ast.Source) (*ast.Schema, *gqlerror.Error) { + return validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...) +} + +func MustLoadSchema(str ...*ast.Source) *ast.Schema { + s, err := validator.LoadSchema(append([]*ast.Source{validator.Prelude}, str...)...) + if err != nil { + panic(err) + } + return s +} + +func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) { + query, err := parser.ParseQuery(&ast.Source{Input: str}) + if err != nil { + return nil, gqlerror.List{err} + } + errs := validator.Validate(schema, query) + if errs != nil { + return nil, errs + } + + return query, nil +} + +func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument { + q, err := LoadQuery(schema, str) + if err != nil { + panic(err) + } + return q +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go new file mode 100644 index 000000000..4065a610a --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go @@ -0,0 +1,58 @@ +package lexer + +import ( + "math" + "strings" +) + +// blockStringValue produces the value of a block string from its parsed raw value, similar to +// Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc. +// +// This implements the GraphQL spec's BlockStringValue() static algorithm. +func blockStringValue(raw string) string { + lines := strings.Split(raw, "\n") + + commonIndent := math.MaxInt32 + for _, line := range lines { + indent := leadingWhitespace(line) + if indent < len(line) && indent < commonIndent { + commonIndent = indent + if commonIndent == 0 { + break + } + } + } + + if commonIndent != math.MaxInt32 && len(lines) > 0 { + for i := 1; i < len(lines); i++ { + if len(lines[i]) < commonIndent { + lines[i] = "" + } else { + lines[i] = lines[i][commonIndent:] + } + } + } + + start := 0 + end := len(lines) + + for start < end && leadingWhitespace(lines[start]) == math.MaxInt32 { + start++ + } + + for start < end && leadingWhitespace(lines[end-1]) == math.MaxInt32 { + end-- + } + + return strings.Join(lines[start:end], "\n") +} + +func leadingWhitespace(str string) int { + for i, r := range str { + if r != ' ' && r != '\t' { + return i + } + } + // this line is made up entirely of whitespace, its leading whitespace doesnt count. + return math.MaxInt32 +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go new file mode 100644 index 000000000..720dd5b48 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go @@ -0,0 +1,510 @@ +package lexer + +import ( + "bytes" + "unicode/utf8" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +// Lexer turns graphql request and schema strings into tokens +type Lexer struct { + *ast.Source + // An offset into the string in bytes + start int + // An offset into the string in runes + startRunes int + // An offset into the string in bytes + end int + // An offset into the string in runes + endRunes int + // the current line number + line int + // An offset into the string in rune + lineStartRunes int +} + +func New(src *ast.Source) Lexer { + return Lexer{ + Source: src, + line: 1, + } +} + +// take one rune from input and advance end +func (s *Lexer) peek() (rune, int) { + return utf8.DecodeRuneInString(s.Input[s.end:]) +} + +func (s *Lexer) makeToken(kind Type) (Token, *gqlerror.Error) { + return s.makeValueToken(kind, s.Input[s.start:s.end]) +} + +func (s *Lexer) makeValueToken(kind Type, value string) (Token, *gqlerror.Error) { + return Token{ + Kind: kind, + Value: value, + Pos: ast.Position{ + Start: s.startRunes, + End: s.endRunes, + Line: s.line, + Column: s.startRunes - s.lineStartRunes + 1, + Src: s.Source, + }, + }, nil +} + +func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) { + column := s.endRunes - s.lineStartRunes + 1 + return Token{ + Kind: Invalid, + Pos: ast.Position{ + Start: s.startRunes, + End: s.endRunes, + Line: s.line, + Column: column, + Src: s.Source, + }, + }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...) +} + +// ReadToken gets the next token from the source starting at the given position. +// +// This skips over whitespace and comments until it finds the next lexable +// token, then lexes punctuators immediately or calls the appropriate helper +// function for more complicated tokens. +func (s *Lexer) ReadToken() (token Token, err *gqlerror.Error) { + + s.ws() + s.start = s.end + s.startRunes = s.endRunes + + if s.end >= len(s.Input) { + return s.makeToken(EOF) + } + r := s.Input[s.start] + s.end++ + s.endRunes++ + switch r { + case '!': + return s.makeValueToken(Bang, "") + + case '$': + return s.makeValueToken(Dollar, "") + case '&': + return s.makeValueToken(Amp, "") + case '(': + return s.makeValueToken(ParenL, "") + case ')': + return s.makeValueToken(ParenR, "") + case '.': + if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == "..." { + s.end += 2 + s.endRunes += 2 + return s.makeValueToken(Spread, "") + } + case ':': + return s.makeValueToken(Colon, "") + case '=': + return s.makeValueToken(Equals, "") + case '@': + return s.makeValueToken(At, "") + case '[': + return s.makeValueToken(BracketL, "") + case ']': + return s.makeValueToken(BracketR, "") + case '{': + return s.makeValueToken(BraceL, "") + case '}': + return s.makeValueToken(BraceR, "") + case '|': + return s.makeValueToken(Pipe, "") + case '#': + s.readComment() + return s.ReadToken() + + case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': + return s.readName() + + case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return s.readNumber() + + case '"': + if len(s.Input) > s.start+2 && s.Input[s.start:s.start+3] == `"""` { + return s.readBlockString() + } + + return s.readString() + } + + s.end-- + s.endRunes-- + + if r < 0x0020 && r != 0x0009 && r != 0x000a && r != 0x000d { + return s.makeError(`Cannot contain the invalid character "\u%04d"`, r) + } + + if r == '\'' { + return s.makeError(`Unexpected single quote character ('), did you mean to use a double quote (")?`) + } + + return s.makeError(`Cannot parse the unexpected character "%s".`, string(r)) +} + +// ws reads from body starting at startPosition until it finds a non-whitespace +// or commented character, and updates the token end to include all whitespace +func (s *Lexer) ws() { + for s.end < len(s.Input) { + switch s.Input[s.end] { + case '\t', ' ', ',': + s.end++ + s.endRunes++ + case '\n': + s.end++ + s.endRunes++ + s.line++ + s.lineStartRunes = s.endRunes + case '\r': + s.end++ + s.endRunes++ + s.line++ + s.lineStartRunes = s.endRunes + // skip the following newline if its there + if s.end < len(s.Input) && s.Input[s.end] == '\n' { + s.end++ + s.endRunes++ + } + // byte order mark, given ws is hot path we aren't relying on the unicode package here. + case 0xef: + if s.end+2 < len(s.Input) && s.Input[s.end+1] == 0xBB && s.Input[s.end+2] == 0xBF { + s.end += 3 + s.endRunes++ + } else { + return + } + default: + return + } + } +} + +// readComment from the input +// +// #[\u0009\u0020-\uFFFF]* +func (s *Lexer) readComment() (Token, *gqlerror.Error) { + for s.end < len(s.Input) { + r, w := s.peek() + + // SourceCharacter but not LineTerminator + if r > 0x001f || r == '\t' { + s.end += w + s.endRunes++ + } else { + break + } + } + + return s.makeToken(Comment) +} + +// readNumber from the input, either a float +// or an int depending on whether a decimal point appears. +// +// Int: -?(0|[1-9][0-9]*) +// Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)? +func (s *Lexer) readNumber() (Token, *gqlerror.Error) { + float := false + + // backup to the first digit + s.end-- + s.endRunes-- + + s.acceptByte('-') + + if s.acceptByte('0') { + if consumed := s.acceptDigits(); consumed != 0 { + s.end -= consumed + s.endRunes -= consumed + return s.makeError("Invalid number, unexpected digit after 0: %s.", s.describeNext()) + } + } else { + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if s.acceptByte('.') { + float = true + + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if s.acceptByte('e', 'E') { + float = true + + s.acceptByte('-', '+') + + if consumed := s.acceptDigits(); consumed == 0 { + return s.makeError("Invalid number, expected digit but got: %s.", s.describeNext()) + } + } + + if float { + return s.makeToken(Float) + } else { + return s.makeToken(Int) + } +} + +// acceptByte if it matches any of given bytes, returning true if it found anything +func (s *Lexer) acceptByte(bytes ...uint8) bool { + if s.end >= len(s.Input) { + return false + } + + for _, accepted := range bytes { + if s.Input[s.end] == accepted { + s.end++ + s.endRunes++ + return true + } + } + return false +} + +// acceptDigits from the input, returning the number of digits it found +func (s *Lexer) acceptDigits() int { + consumed := 0 + for s.end < len(s.Input) && s.Input[s.end] >= '0' && s.Input[s.end] <= '9' { + s.end++ + s.endRunes++ + consumed++ + } + + return consumed +} + +// describeNext peeks at the input and returns a human readable string. This should will alloc +// and should only be used in errors +func (s *Lexer) describeNext() string { + if s.end < len(s.Input) { + return `"` + string(s.Input[s.end]) + `"` + } + return "" +} + +// readString from the input +// +// "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*" +func (s *Lexer) readString() (Token, *gqlerror.Error) { + inputLen := len(s.Input) + + // this buffer is lazily created only if there are escape characters. + var buf *bytes.Buffer + + // skip the opening quote + s.start++ + s.startRunes++ + + for s.end < inputLen { + r := s.Input[s.end] + if r == '\n' || r == '\r' { + break + } + if r < 0x0020 && r != '\t' { + return s.makeError(`Invalid character within String: "\u%04d".`, r) + } + switch r { + default: + var char = rune(r) + var w = 1 + + // skip unicode overhead if we are in the ascii range + if r >= 127 { + char, w = utf8.DecodeRuneInString(s.Input[s.end:]) + } + s.end += w + s.endRunes++ + + if buf != nil { + buf.WriteRune(char) + } + + case '"': + t, err := s.makeToken(String) + // the token should not include the quotes in its value, but should cover them in its position + t.Pos.Start-- + t.Pos.End++ + + if buf != nil { + t.Value = buf.String() + } + + // skip the close quote + s.end++ + s.endRunes++ + + return t, err + + case '\\': + if s.end+1 >= inputLen { + s.end++ + s.endRunes++ + return s.makeError(`Invalid character escape sequence.`) + } + + if buf == nil { + buf = bytes.NewBufferString(s.Input[s.start:s.end]) + } + + escape := s.Input[s.end+1] + + if escape == 'u' { + if s.end+6 >= inputLen { + s.end++ + s.endRunes++ + return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:]) + } + + r, ok := unhex(s.Input[s.end+2 : s.end+6]) + if !ok { + s.end++ + s.endRunes++ + return s.makeError("Invalid character escape sequence: \\%s.", s.Input[s.end:s.end+5]) + } + buf.WriteRune(r) + s.end += 6 + s.endRunes += 6 + } else { + switch escape { + case '"', '/', '\\': + buf.WriteByte(escape) + case 'b': + buf.WriteByte('\b') + case 'f': + buf.WriteByte('\f') + case 'n': + buf.WriteByte('\n') + case 'r': + buf.WriteByte('\r') + case 't': + buf.WriteByte('\t') + default: + s.end += 1 + s.endRunes += 1 + return s.makeError("Invalid character escape sequence: \\%s.", string(escape)) + } + s.end += 2 + s.endRunes += 2 + } + } + } + + return s.makeError("Unterminated string.") +} + +// readBlockString from the input +// +// """("?"?(\\"""|\\(?!=""")|[^"\\]))*""" +func (s *Lexer) readBlockString() (Token, *gqlerror.Error) { + inputLen := len(s.Input) + + var buf bytes.Buffer + + // skip the opening quote + s.start += 3 + s.startRunes += 3 + s.end += 2 + s.endRunes += 2 + + for s.end < inputLen { + r := s.Input[s.end] + + // Closing triple quote (""") + if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` { + t, err := s.makeValueToken(BlockString, blockStringValue(buf.String())) + + // the token should not include the quotes in its value, but should cover them in its position + t.Pos.Start -= 3 + t.Pos.End += 3 + + // skip the close quote + s.end += 3 + s.endRunes += 3 + + return t, err + } + + // SourceCharacter + if r < 0x0020 && r != '\t' && r != '\n' && r != '\r' { + return s.makeError(`Invalid character within String: "\u%04d".`, r) + } + + if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` { + buf.WriteString(`"""`) + s.end += 4 + s.endRunes += 4 + } else if r == '\r' { + if s.end+1 < inputLen && s.Input[s.end+1] == '\n' { + s.end++ + s.endRunes++ + } + + buf.WriteByte('\n') + s.end++ + s.endRunes++ + } else { + var char = rune(r) + var w = 1 + + // skip unicode overhead if we are in the ascii range + if r >= 127 { + char, w = utf8.DecodeRuneInString(s.Input[s.end:]) + } + s.end += w + s.endRunes++ + buf.WriteRune(char) + } + } + + return s.makeError("Unterminated string.") +} + +func unhex(b string) (v rune, ok bool) { + for _, c := range b { + v <<= 4 + switch { + case '0' <= c && c <= '9': + v |= c - '0' + case 'a' <= c && c <= 'f': + v |= c - 'a' + 10 + case 'A' <= c && c <= 'F': + v |= c - 'A' + 10 + default: + return 0, false + } + } + + return v, true +} + +// readName from the input +// +// [_A-Za-z][_0-9A-Za-z]* +func (s *Lexer) readName() (Token, *gqlerror.Error) { + for s.end < len(s.Input) { + r, w := s.peek() + + if (r >= '0' && r <= '9') || (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' { + s.end += w + s.endRunes++ + } else { + break + } + } + + return s.makeToken(Name) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml new file mode 100644 index 000000000..e2c26696a --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml @@ -0,0 +1,672 @@ +encoding: + - name: disallows uncommon control characters + input: "\u0007" + error: + message: 'Cannot contain the invalid character "\u0007"' + locations: [{line: 1, column: 1}] + + - name: accepts BOM header + input: "\uFEFF foo" + tokens: + - + kind: NAME + start: 2 + end: 5 + value: 'foo' + +simple tokens: + - name: records line and column + input: "\n \r\n \r foo\n" + tokens: + - + kind: NAME + start: 8 + end: 11 + line: 4 + column: 3 + value: 'foo' + + - name: skips whitespace + input: "\n\n foo\n\n\n" + tokens: + - + kind: NAME + start: 6 + end: 9 + value: 'foo' + + - name: skips comments + input: "\n #comment\n foo#comment\n" + tokens: + - + kind: NAME + start: 18 + end: 21 + value: 'foo' + + - name: skips commas + input: ",,,foo,,," + tokens: + - + kind: NAME + start: 3 + end: 6 + value: 'foo' + + - name: errors respect whitespace + input: "\n\n ?\n\n\n" + error: + message: 'Cannot parse the unexpected character "?".' + locations: [{line: 3, column: 5}] + string: | + Syntax Error: Cannot parse the unexpected character "?". + GraphQL request (3:5) + 2: + 3: ? + ^ + 4: + + - name: lex reports useful information for dashes in names + input: "a-b" + error: + message: 'Invalid number, expected digit but got: "b".' + locations: [{ line: 1, column: 3 }] + tokens: + - + kind: Name + start: 0 + end: 1 + value: a + +lexes strings: + - name: basic + input: '"simple"' + tokens: + - + kind: STRING + start: 0 + end: 8 + value: 'simple' + + - name: whitespace + input: '" white space "' + tokens: + - + kind: STRING + start: 0 + end: 15 + value: ' white space ' + + - name: quote + input: '"quote \""' + tokens: + - + kind: STRING + start: 0 + end: 10 + value: 'quote "' + + - name: escaped + input: '"escaped \n\r\b\t\f"' + tokens: + - + kind: STRING + start: 0 + end: 20 + value: "escaped \n\r\b\t\f" + + - name: slashes + input: '"slashes \\ \/"' + tokens: + - + kind: STRING + start: 0 + end: 15 + value: 'slashes \ /' + + - name: unicode + input: '"unicode \u1234\u5678\u90AB\uCDEF"' + tokens: + - + kind: STRING + start: 0 + end: 34 + value: "unicode \u1234\u5678\u90AB\uCDEF" + +lex reports useful string errors: + - name: unterminated + input: '"' + error: + message: "Unterminated string." + locations: [{ line: 1, column: 2 }] + + - name: no end quote + input: '"no end quote' + error: + message: 'Unterminated string.' + locations: [{ line: 1, column: 14 }] + + - name: single quotes + input: "'single quotes'" + error: + message: "Unexpected single quote character ('), did you mean to use a double quote (\")?" + locations: [{ line: 1, column: 1 }] + + - name: control characters + input: "\"contains unescaped \u0007 control char\"" + error: + message: 'Invalid character within String: "\u0007".' + locations: [{ line: 1, column: 21 }] + + - name: null byte + input: "\"null-byte is not \u0000 end of file\"" + error: + message: 'Invalid character within String: "\u0000".' + locations: [{ line: 1, column: 19 }] + + - name: unterminated newline + input: "\"multi\nline\"" + error: + message: 'Unterminated string.' + locations: [{line: 1, column: 7 }] + + - name: unterminated carriage return + input: "\"multi\rline\"" + error: + message: 'Unterminated string.' + locations: [{ line: 1, column: 7 }] + + - name: bad escape character + input: '"bad \z esc"' + error: + message: 'Invalid character escape sequence: \z.' + locations: [{ line: 1, column: 7 }] + + - name: hex escape sequence + input: '"bad \x esc"' + error: + message: 'Invalid character escape sequence: \x.' + locations: [{ line: 1, column: 7 }] + + - name: short escape sequence + input: '"bad \u1 esc"' + error: + message: 'Invalid character escape sequence: \u1 es.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 1 + input: '"bad \u0XX1 esc"' + error: + message: 'Invalid character escape sequence: \u0XX1.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 2 + input: '"bad \uXXXX esc"' + error: + message: 'Invalid character escape sequence: \uXXXX.' + locations: [{ line: 1, column: 7 }] + + - name: invalid escape sequence 3 + input: '"bad \uFXXX esc"' + error: + message: 'Invalid character escape sequence: \uFXXX.' + locations: [{ line: 1, column: 7 }] + + - name: invalid character escape sequence + input: '"bad \uXXXF esc"' + error: + message: 'Invalid character escape sequence: \uXXXF.' + locations: [{ line: 1, column: 7 }] + +lexes block strings: + - name: simple + input: '"""simple"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 12 + value: 'simple' + + - name: white space + input: '""" white space """' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 19 + value: ' white space ' + + - name: contains quote + input: '"""contains " quote"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 22 + value: 'contains " quote' + + - name: contains triplequote + input: "\"\"\"contains \\\"\"\" triplequote\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 31 + value: 'contains """ triplequote' + + - name: multi line + input: "\"\"\"multi\nline\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 16 + value: "multi\nline" + + - name: multi line normalized + input: "\"\"\"multi\rline\r\nnormalized\"\"\"" + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 28 + value: "multi\nline\nnormalized" + + - name: unescaped + input: '"""unescaped \n\r\b\t\f\u1234"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 32 + value: 'unescaped \n\r\b\t\f\u1234' + + - name: slashes + input: '"""slashes \\ \/"""' + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 19 + value: 'slashes \\ \/' + + - name: multiple lines + input: | + """ + + spans + multiple + lines + + """ + tokens: + - + kind: BLOCK_STRING + start: 0 + end: 36 + value: "spans\n multiple\n lines" + +lex reports useful block string errors: + - name: unterminated string + input: '"""' + error: + message: "Unterminated string." + locations: [{ line: 1, column: 4 }] + + - name: unescaped control characters + input: "\"\"\"contains unescaped \u0007 control char\"\"\"" + error: + message: 'Invalid character within String: "\u0007".' + locations: [{ line: 1, column: 23 }] + + - name: null byte + input: "\"\"\"null-byte is not \u0000 end of file\"\"\"" + error: + message: 'Invalid character within String: "\u0000".' + locations: [{ line: 1, column: 21 }] + +lexes numbers: + - name: integer + input: "4" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '4' + + - name: float + input: "4.123" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '4.123' + + - name: negative + input: "-4" + tokens: + - + kind: INT + start: 0 + end: 2 + value: '-4' + + - name: nine + input: "9" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '9' + + - name: zero + input: "0" + tokens: + - + kind: INT + start: 0 + end: 1 + value: '0' + + - name: negative float + input: "-4.123" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '-4.123' + + - name: float leading zero + input: "0.123" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '0.123' + + - name: exponent whole + input: "123e4" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '123e4' + + - name: exponent uppercase + input: "123E4" + tokens: + - + kind: FLOAT + start: 0 + end: 5 + value: '123E4' + + - name: exponent negative power + input: "123e-4" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '123e-4' + + - name: exponent positive power + input: "123e+4" + tokens: + - + kind: FLOAT + start: 0 + end: 6 + value: '123e+4' + + - name: exponent negative base + input: "-1.123e4" + tokens: + - + kind: FLOAT + start: 0 + end: 8 + value: '-1.123e4' + + - name: exponent negative base upper + input: "-1.123E4" + tokens: + - + kind: FLOAT + start: 0 + end: 8 + value: '-1.123E4' + + - name: exponent negative base negative power + input: "-1.123e-4" + tokens: + - + kind: FLOAT + start: 0 + end: 9 + value: '-1.123e-4' + + - name: exponent negative base positive power + input: "-1.123e+4" + tokens: + - + kind: FLOAT + start: 0 + end: 9 + value: '-1.123e+4' + + - name: exponent negative base large power + input: "-1.123e4567" + tokens: + - + kind: FLOAT + start: 0 + end: 11 + value: '-1.123e4567' + +lex reports useful number errors: + - name: zero + input: "00" + error: + message: 'Invalid number, unexpected digit after 0: "0".' + locations: [{ line: 1, column: 2 }] + + - name: positive + input: "+1" + error: + message: 'Cannot parse the unexpected character "+".' + locations: [{ line: 1, column: 1 }] + + - name: trailing dot + input: "1." + error: + message: 'Invalid number, expected digit but got: .' + locations: [{ line: 1, column: 3 }] + + - name: traililng dot exponent + input: "1.e1" + error: + message: 'Invalid number, expected digit but got: "e".' + locations: [{ line: 1, column: 3 }] + + - name: missing leading zero + input: ".123" + error: + message: 'Cannot parse the unexpected character ".".' + locations: [{ line: 1, column: 1 }] + + - name: characters + input: "1.A" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 3 }] + + - name: negative characters + input: "-A" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 2 }] + + - name: missing exponent + input: '1.0e' + error: + message: 'Invalid number, expected digit but got: .' + locations: [{ line: 1, column: 5 }] + + - name: character exponent + input: "1.0eA" + error: + message: 'Invalid number, expected digit but got: "A".' + locations: [{ line: 1, column: 5 }] + +lexes punctuation: + - name: bang + input: "!" + tokens: + - + kind: BANG + start: 0 + end: 1 + value: undefined + + - name: dollar + input: "$" + tokens: + - + kind: DOLLAR + start: 0 + end: 1 + value: undefined + + - name: open paren + input: "(" + tokens: + - + kind: PAREN_L + start: 0 + end: 1 + value: undefined + + - name: close paren + input: ")" + tokens: + - + kind: PAREN_R + start: 0 + end: 1 + value: undefined + + - name: spread + input: "..." + tokens: + - + kind: SPREAD + start: 0 + end: 3 + value: undefined + + - name: colon + input: ":" + tokens: + - + kind: COLON + start: 0 + end: 1 + value: undefined + + - name: equals + input: "=" + tokens: + - + kind: EQUALS + start: 0 + end: 1 + value: undefined + + - name: at + input: "@" + tokens: + - + kind: AT + start: 0 + end: 1 + value: undefined + + - name: open bracket + input: "[" + tokens: + - + kind: BRACKET_L + start: 0 + end: 1 + value: undefined + + - name: close bracket + input: "]" + tokens: + - + kind: BRACKET_R + start: 0 + end: 1 + value: undefined + + - name: open brace + input: "{" + tokens: + - + kind: BRACE_L + start: 0 + end: 1 + value: undefined + + - name: close brace + input: "}" + tokens: + - + kind: BRACE_R + start: 0 + end: 1 + value: undefined + + - name: pipe + input: "|" + tokens: + - + kind: PIPE + start: 0 + end: 1 + value: undefined + +lex reports useful unknown character error: + - name: not a spread + input: ".." + error: + message: 'Cannot parse the unexpected character ".".' + locations: [{ line: 1, column: 1 }] + + - name: question mark + input: "?" + error: + message: 'Cannot parse the unexpected character "?".' + message: 'Cannot parse the unexpected character "?".' + locations: [{ line: 1, column: 1 }] + + - name: unicode 203 + input: "\u203B" + error: + message: 'Cannot parse the unexpected character "â".' + locations: [{ line: 1, column: 1 }] + + - name: unicode 200 + input: "\u200b" + error: + message: 'Cannot parse the unexpected character "â".' + locations: [{ line: 1, column: 1 }] + diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/token.go b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/token.go new file mode 100644 index 000000000..8985a7efb --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/lexer/token.go @@ -0,0 +1,148 @@ +package lexer + +import ( + "strconv" + + "github.com/vektah/gqlparser/v2/ast" +) + +const ( + Invalid Type = iota + EOF + Bang + Dollar + Amp + ParenL + ParenR + Spread + Colon + Equals + At + BracketL + BracketR + BraceL + BraceR + Pipe + Name + Int + Float + String + BlockString + Comment +) + +func (t Type) Name() string { + switch t { + case Invalid: + return "Invalid" + case EOF: + return "EOF" + case Bang: + return "Bang" + case Dollar: + return "Dollar" + case Amp: + return "Amp" + case ParenL: + return "ParenL" + case ParenR: + return "ParenR" + case Spread: + return "Spread" + case Colon: + return "Colon" + case Equals: + return "Equals" + case At: + return "At" + case BracketL: + return "BracketL" + case BracketR: + return "BracketR" + case BraceL: + return "BraceL" + case BraceR: + return "BraceR" + case Pipe: + return "Pipe" + case Name: + return "Name" + case Int: + return "Int" + case Float: + return "Float" + case String: + return "String" + case BlockString: + return "BlockString" + case Comment: + return "Comment" + } + return "Unknown " + strconv.Itoa(int(t)) +} + +func (t Type) String() string { + switch t { + case Invalid: + return "" + case EOF: + return "" + case Bang: + return "!" + case Dollar: + return "$" + case Amp: + return "&" + case ParenL: + return "(" + case ParenR: + return ")" + case Spread: + return "..." + case Colon: + return ":" + case Equals: + return "=" + case At: + return "@" + case BracketL: + return "[" + case BracketR: + return "]" + case BraceL: + return "{" + case BraceR: + return "}" + case Pipe: + return "|" + case Name: + return "Name" + case Int: + return "Int" + case Float: + return "Float" + case String: + return "String" + case BlockString: + return "BlockString" + case Comment: + return "Comment" + } + return "Unknown " + strconv.Itoa(int(t)) +} + +// Kind represents a type of token. The types are predefined as constants. +type Type int + +type Token struct { + Kind Type // The token type. + Value string // The literal value consumed. + Pos ast.Position // The file and line this token was read from +} + +func (t Token) String() string { + if t.Value != "" { + return t.Kind.String() + " " + strconv.Quote(t.Value) + } + return t.Kind.String() +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/parser/parser.go b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/parser.go new file mode 100644 index 000000000..52b8e6840 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/parser.go @@ -0,0 +1,136 @@ +package parser + +import ( + "strconv" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/lexer" +) + +type parser struct { + lexer lexer.Lexer + err *gqlerror.Error + + peeked bool + peekToken lexer.Token + peekError *gqlerror.Error + + prev lexer.Token +} + +func (p *parser) peekPos() *ast.Position { + if p.err != nil { + return nil + } + + peek := p.peek() + return &peek.Pos +} + +func (p *parser) peek() lexer.Token { + if p.err != nil { + return p.prev + } + + if !p.peeked { + p.peekToken, p.peekError = p.lexer.ReadToken() + p.peeked = true + } + + return p.peekToken +} + +func (p *parser) error(tok lexer.Token, format string, args ...interface{}) { + if p.err != nil { + return + } + p.err = gqlerror.ErrorLocf(tok.Pos.Src.Name, tok.Pos.Line, tok.Pos.Column, format, args...) +} + +func (p *parser) next() lexer.Token { + if p.err != nil { + return p.prev + } + if p.peeked { + p.peeked = false + p.prev, p.err = p.peekToken, p.peekError + } else { + p.prev, p.err = p.lexer.ReadToken() + } + return p.prev +} + +func (p *parser) expectKeyword(value string) lexer.Token { + tok := p.peek() + if tok.Kind == lexer.Name && tok.Value == value { + return p.next() + } + + p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String()) + return tok +} + +func (p *parser) expect(kind lexer.Type) lexer.Token { + tok := p.peek() + if tok.Kind == kind { + return p.next() + } + + p.error(tok, "Expected %s, found %s", kind, tok.Kind.String()) + return tok +} + +func (p *parser) skip(kind lexer.Type) bool { + if p.err != nil { + return false + } + + tok := p.peek() + + if tok.Kind != kind { + return false + } + p.next() + return true +} + +func (p *parser) unexpectedError() { + p.unexpectedToken(p.peek()) +} + +func (p *parser) unexpectedToken(tok lexer.Token) { + p.error(tok, "Unexpected %s", tok.String()) +} + +func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) { + hasDef := p.skip(start) + if !hasDef { + return + } + + for p.peek().Kind != end && p.err == nil { + cb() + } + p.next() +} + +func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { + hasDef := p.skip(start) + if !hasDef { + return + } + + called := false + for p.peek().Kind != end && p.err == nil { + called = true + cb() + } + + if !called { + p.error(p.peek(), "expected at least one definition, found %s", p.peek().Kind.String()) + return + } + + p.next() +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query.go b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query.go new file mode 100644 index 000000000..939e8af46 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query.go @@ -0,0 +1,350 @@ +package parser + +import ( + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/lexer" + + . "github.com/vektah/gqlparser/v2/ast" +) + +func ParseQuery(source *Source) (*QueryDocument, *gqlerror.Error) { + p := parser{ + lexer: lexer.New(source), + } + return p.parseQueryDocument(), p.err +} + +func (p *parser) parseQueryDocument() *QueryDocument { + var doc QueryDocument + for p.peek().Kind != lexer.EOF { + if p.err != nil { + return &doc + } + doc.Position = p.peekPos() + switch p.peek().Kind { + case lexer.Name: + switch p.peek().Value { + case "query", "mutation", "subscription": + doc.Operations = append(doc.Operations, p.parseOperationDefinition()) + case "fragment": + doc.Fragments = append(doc.Fragments, p.parseFragmentDefinition()) + default: + p.unexpectedError() + } + case lexer.BraceL: + doc.Operations = append(doc.Operations, p.parseOperationDefinition()) + default: + p.unexpectedError() + } + } + + return &doc +} + +func (p *parser) parseOperationDefinition() *OperationDefinition { + if p.peek().Kind == lexer.BraceL { + return &OperationDefinition{ + Position: p.peekPos(), + Operation: Query, + SelectionSet: p.parseRequiredSelectionSet(), + } + } + + var od OperationDefinition + od.Position = p.peekPos() + od.Operation = p.parseOperationType() + + if p.peek().Kind == lexer.Name { + od.Name = p.next().Value + } + + od.VariableDefinitions = p.parseVariableDefinitions() + od.Directives = p.parseDirectives(false) + od.SelectionSet = p.parseRequiredSelectionSet() + + return &od +} + +func (p *parser) parseOperationType() Operation { + tok := p.next() + switch tok.Value { + case "query": + return Query + case "mutation": + return Mutation + case "subscription": + return Subscription + } + p.unexpectedToken(tok) + return "" +} + +func (p *parser) parseVariableDefinitions() VariableDefinitionList { + var defs []*VariableDefinition + p.many(lexer.ParenL, lexer.ParenR, func() { + defs = append(defs, p.parseVariableDefinition()) + }) + + return defs +} + +func (p *parser) parseVariableDefinition() *VariableDefinition { + var def VariableDefinition + def.Position = p.peekPos() + def.Variable = p.parseVariable() + + p.expect(lexer.Colon) + + def.Type = p.parseTypeReference() + + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + + def.Directives = p.parseDirectives(false) + + return &def +} + +func (p *parser) parseVariable() string { + p.expect(lexer.Dollar) + return p.parseName() +} + +func (p *parser) parseOptionalSelectionSet() SelectionSet { + var selections []Selection + p.some(lexer.BraceL, lexer.BraceR, func() { + selections = append(selections, p.parseSelection()) + }) + + return SelectionSet(selections) +} + +func (p *parser) parseRequiredSelectionSet() SelectionSet { + if p.peek().Kind != lexer.BraceL { + p.error(p.peek(), "Expected %s, found %s", lexer.BraceL, p.peek().Kind.String()) + return nil + } + + var selections []Selection + p.some(lexer.BraceL, lexer.BraceR, func() { + selections = append(selections, p.parseSelection()) + }) + + return SelectionSet(selections) +} + +func (p *parser) parseSelection() Selection { + if p.peek().Kind == lexer.Spread { + return p.parseFragment() + } + return p.parseField() +} + +func (p *parser) parseField() *Field { + var field Field + field.Position = p.peekPos() + field.Alias = p.parseName() + + if p.skip(lexer.Colon) { + field.Name = p.parseName() + } else { + field.Name = field.Alias + } + + field.Arguments = p.parseArguments(false) + field.Directives = p.parseDirectives(false) + if p.peek().Kind == lexer.BraceL { + field.SelectionSet = p.parseOptionalSelectionSet() + } + + return &field +} + +func (p *parser) parseArguments(isConst bool) ArgumentList { + var arguments ArgumentList + p.many(lexer.ParenL, lexer.ParenR, func() { + arguments = append(arguments, p.parseArgument(isConst)) + }) + + return arguments +} + +func (p *parser) parseArgument(isConst bool) *Argument { + arg := Argument{} + arg.Position = p.peekPos() + arg.Name = p.parseName() + p.expect(lexer.Colon) + + arg.Value = p.parseValueLiteral(isConst) + return &arg +} + +func (p *parser) parseFragment() Selection { + p.expect(lexer.Spread) + + if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" { + return &FragmentSpread{ + Position: p.peekPos(), + Name: p.parseFragmentName(), + Directives: p.parseDirectives(false), + } + } + + var def InlineFragment + def.Position = p.peekPos() + if p.peek().Value == "on" { + p.next() // "on" + + def.TypeCondition = p.parseName() + } + + def.Directives = p.parseDirectives(false) + def.SelectionSet = p.parseRequiredSelectionSet() + return &def +} + +func (p *parser) parseFragmentDefinition() *FragmentDefinition { + var def FragmentDefinition + def.Position = p.peekPos() + p.expectKeyword("fragment") + + def.Name = p.parseFragmentName() + def.VariableDefinition = p.parseVariableDefinitions() + + p.expectKeyword("on") + + def.TypeCondition = p.parseName() + def.Directives = p.parseDirectives(false) + def.SelectionSet = p.parseRequiredSelectionSet() + return &def +} + +func (p *parser) parseFragmentName() string { + if p.peek().Value == "on" { + p.unexpectedError() + return "" + } + + return p.parseName() +} + +func (p *parser) parseValueLiteral(isConst bool) *Value { + token := p.peek() + + var kind ValueKind + switch token.Kind { + case lexer.BracketL: + return p.parseList(isConst) + case lexer.BraceL: + return p.parseObject(isConst) + case lexer.Dollar: + if isConst { + p.unexpectedError() + return nil + } + return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable} + case lexer.Int: + kind = IntValue + case lexer.Float: + kind = FloatValue + case lexer.String: + kind = StringValue + case lexer.BlockString: + kind = BlockValue + case lexer.Name: + switch token.Value { + case "true", "false": + kind = BooleanValue + case "null": + kind = NullValue + default: + kind = EnumValue + } + default: + p.unexpectedError() + return nil + } + + p.next() + + return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind} +} + +func (p *parser) parseList(isConst bool) *Value { + var values ChildValueList + pos := p.peekPos() + p.many(lexer.BracketL, lexer.BracketR, func() { + values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)}) + }) + + return &Value{Children: values, Kind: ListValue, Position: pos} +} + +func (p *parser) parseObject(isConst bool) *Value { + var fields ChildValueList + pos := p.peekPos() + p.many(lexer.BraceL, lexer.BraceR, func() { + fields = append(fields, p.parseObjectField(isConst)) + }) + + return &Value{Children: fields, Kind: ObjectValue, Position: pos} +} + +func (p *parser) parseObjectField(isConst bool) *ChildValue { + field := ChildValue{} + field.Position = p.peekPos() + field.Name = p.parseName() + + p.expect(lexer.Colon) + + field.Value = p.parseValueLiteral(isConst) + return &field +} + +func (p *parser) parseDirectives(isConst bool) []*Directive { + var directives []*Directive + + for p.peek().Kind == lexer.At { + if p.err != nil { + break + } + directives = append(directives, p.parseDirective(isConst)) + } + return directives +} + +func (p *parser) parseDirective(isConst bool) *Directive { + p.expect(lexer.At) + + return &Directive{ + Position: p.peekPos(), + Name: p.parseName(), + Arguments: p.parseArguments(isConst), + } +} + +func (p *parser) parseTypeReference() *Type { + var typ Type + + if p.skip(lexer.BracketL) { + typ.Position = p.peekPos() + typ.Elem = p.parseTypeReference() + p.expect(lexer.BracketR) + } else { + typ.Position = p.peekPos() + typ.NamedType = p.parseName() + } + + if p.skip(lexer.Bang) { + typ.Position = p.peekPos() + typ.NonNull = true + } + return &typ +} + +func (p *parser) parseName() string { + token := p.expect(lexer.Name) + + return token.Value +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml new file mode 100644 index 000000000..a46a01e71 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml @@ -0,0 +1,544 @@ +parser provides useful errors: + - name: unclosed paren + input: '{' + error: + message: "Expected Name, found " + locations: [{line: 1, column: 2}] + + - name: missing on in fragment + input: | + { ...MissingOn } + fragment MissingOn Type + error: + message: 'Expected "on", found Name "Type"' + locations: [{ line: 2, column: 20 }] + + - name: missing name after alias + input: '{ field: {} }' + error: + message: "Expected Name, found {" + locations: [{ line: 1, column: 10 }] + + - name: not an operation + input: 'notanoperation Foo { field }' + error: + message: 'Unexpected Name "notanoperation"' + locations: [{ line: 1, column: 1 }] + + - name: a wild splat appears + input: '...' + error: + message: 'Unexpected ...' + locations: [{ line: 1, column: 1}] + +variables: + - name: are allowed in args + input: '{ field(complex: { a: { b: [ $var ] } }) }' + + - name: are not allowed in default args + input: 'query Foo($x: Complex = { a: { b: [ $var ] } }) { field }' + error: + message: 'Unexpected $' + locations: [{ line: 1, column: 37 }] + + - name: can have directives + input: 'query ($withDirective: String @first @second, $withoutDirective: String) { f }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + VariableDefinitions: [VariableDefinition] + - + Variable: "withDirective" + Type: String + Directives: [Directive] + - + Name: "first" + - + Name: "second" + - + Variable: "withoutDirective" + Type: String + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + +fragments: + - name: can not be named 'on' + input: 'fragment on on on { on }' + error: + message: 'Unexpected Name "on"' + locations: [{ line: 1, column: 10 }] + + - name: can not spread fragments called 'on' + input: '{ ...on }' + error: + message: 'Expected Name, found }' + locations: [{ line: 1, column: 9 }] + +encoding: + - name: multibyte characters are supported + input: | + # This comment has a ਊ multi-byte character. + { field(arg: "Has a ਊ multi-byte character.") } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "field" + Name: "field" + Arguments: [Argument] + - + Name: "arg" + Value: "Has a ਊ multi-byte character." + +keywords are allowed anywhere a name is: + - name: on + input: | + query on { + ... a + ... on on { field } + } + fragment a on Type { + on(on: $on) + @on(on: on) + } + + - name: subscription + input: | + query subscription { + ... subscription + ... on subscription { field } + } + fragment subscription on Type { + subscription(subscription: $subscription) + @subscription(subscription: subscription) + } + + - name: true + input: | + query true { + ... true + ... on true { field } + } + fragment true on Type { + true(true: $true) + @true(true: true) + } + +operations: + - name: anonymous mutation + input: 'mutation { mutationField }' + + - name: named mutation + input: 'mutation Foo { mutationField }' + + - name: anonymous subscription + input: 'subscription { subscriptionField }' + + - name: named subscription + input: 'subscription Foo { subscriptionField }' + + +ast: + - name: simple query + input: | + { + node(id: 4) { + id, + name + } + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "node" + Name: "node" + Arguments: [Argument] + - + Name: "id" + Value: 4 + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Alias: "name" + Name: "name" + + - name: nameless query with no variables + input: | + query { + node { + id + } + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "node" + Name: "node" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + + - name: fragment defined variables + input: 'fragment a($v: Boolean = false) on t { f(v: $v) }' + ast: | + + Fragments: [FragmentDefinition] + - + Name: "a" + VariableDefinition: [VariableDefinition] + - + Variable: "v" + Type: Boolean + DefaultValue: false + TypeCondition: "t" + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "v" + Value: $v + + +values: + - name: null + input: '{ f(id: null) }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "id" + Value: null + + - name: strings + input: '{ f(long: """long""", short: "short") } ' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "long" + Value: "long" + - + Name: "short" + Value: "short" + + - name: list + input: '{ f(id: [1,2]) }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + Arguments: [Argument] + - + Name: "id" + Value: [1,2] + +types: + - name: common types + input: 'query ($string: String, $int: Int, $arr: [Arr], $notnull: [Arr!]!) { f }' + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + VariableDefinitions: [VariableDefinition] + - + Variable: "string" + Type: String + - + Variable: "int" + Type: Int + - + Variable: "arr" + Type: [Arr] + - + Variable: "notnull" + Type: [Arr!]! + SelectionSet: [Selection] + - + Alias: "f" + Name: "f" + +large queries: + - name: kitchen sink + input: | + # Copyright (c) 2015-present, Facebook, Inc. + # + # This source code is licensed under the MIT license found in the + # LICENSE file in the root directory of this source tree. + + query queryName($foo: ComplexType, $site: Site = MOBILE) { + whoever123is: node(id: [123, 456]) { + id , + ... on User @defer { + field2 { + id , + alias: field1(first:10, after:$foo,) @include(if: $foo) { + id, + ...frag + } + } + } + ... @skip(unless: $foo) { + id + } + ... { + id + } + } + } + + mutation likeStory { + like(story: 123) @defer { + story { + id + } + } + } + + subscription StoryLikeSubscription($input: StoryLikeSubscribeInput) { + storyLikeSubscribe(input: $input) { + story { + likers { + count + } + likeSentence { + text + } + } + } + } + + fragment frag on Friend { + foo(size: $size, bar: $b, obj: {key: "value", block: """ + block string uses \""" + """}) + } + + { + unnamed(truthy: true, falsey: false, nullish: null), + query + } + ast: | + + Operations: [OperationDefinition] + - + Operation: Operation("query") + Name: "queryName" + VariableDefinitions: [VariableDefinition] + - + Variable: "foo" + Type: ComplexType + - + Variable: "site" + Type: Site + DefaultValue: MOBILE + SelectionSet: [Selection] + - + Alias: "whoever123is" + Name: "node" + Arguments: [Argument] + - + Name: "id" + Value: [123,456] + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + TypeCondition: "User" + Directives: [Directive] + - + Name: "defer" + SelectionSet: [Selection] + - + Alias: "field2" + Name: "field2" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Alias: "alias" + Name: "field1" + Arguments: [Argument] + - + Name: "first" + Value: 10 + - + Name: "after" + Value: $foo + Directives: [Directive] + - + Name: "include" + Arguments: [Argument] + - + Name: "if" + Value: $foo + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Name: "frag" + - + Directives: [Directive] + - + Name: "skip" + Arguments: [Argument] + - + Name: "unless" + Value: $foo + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Operation: Operation("mutation") + Name: "likeStory" + SelectionSet: [Selection] + - + Alias: "like" + Name: "like" + Arguments: [Argument] + - + Name: "story" + Value: 123 + Directives: [Directive] + - + Name: "defer" + SelectionSet: [Selection] + - + Alias: "story" + Name: "story" + SelectionSet: [Selection] + - + Alias: "id" + Name: "id" + - + Operation: Operation("subscription") + Name: "StoryLikeSubscription" + VariableDefinitions: [VariableDefinition] + - + Variable: "input" + Type: StoryLikeSubscribeInput + SelectionSet: [Selection] + - + Alias: "storyLikeSubscribe" + Name: "storyLikeSubscribe" + Arguments: [Argument] + - + Name: "input" + Value: $input + SelectionSet: [Selection] + - + Alias: "story" + Name: "story" + SelectionSet: [Selection] + - + Alias: "likers" + Name: "likers" + SelectionSet: [Selection] + - + Alias: "count" + Name: "count" + - + Alias: "likeSentence" + Name: "likeSentence" + SelectionSet: [Selection] + - + Alias: "text" + Name: "text" + - + Operation: Operation("query") + SelectionSet: [Selection] + - + Alias: "unnamed" + Name: "unnamed" + Arguments: [Argument] + - + Name: "truthy" + Value: true + - + Name: "falsey" + Value: false + - + Name: "nullish" + Value: null + - + Alias: "query" + Name: "query" + Fragments: [FragmentDefinition] + - + Name: "frag" + TypeCondition: "Friend" + SelectionSet: [Selection] + - + Alias: "foo" + Name: "foo" + Arguments: [Argument] + - + Name: "size" + Value: $size + - + Name: "bar" + Value: $b + - + Name: "obj" + Value: {key:"value",block:"block string uses \"\"\""} + +fuzzer: +- name: 01 + input: '{__typename{...}}' + error: + message: 'Expected {, found }' + locations: [{ line: 1, column: 16 }] + +- name: 02 + input: '{...{__typename{...{}}}}' + error: + message: 'expected at least one definition, found }' + locations: [{ line: 1, column: 21 }] diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema.go b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema.go new file mode 100644 index 000000000..9340b78c2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema.go @@ -0,0 +1,535 @@ +package parser + +import ( + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/lexer" +) + +func ParseSchema(source *Source) (*SchemaDocument, *gqlerror.Error) { + p := parser{ + lexer: lexer.New(source), + } + ast, err := p.parseSchemaDocument(), p.err + if err != nil { + return nil, err + } + + for _, def := range ast.Definitions { + def.BuiltIn = source.BuiltIn + } + for _, def := range ast.Extensions { + def.BuiltIn = source.BuiltIn + } + + return ast, nil +} + +func ParseSchemas(inputs ...*Source) (*SchemaDocument, *gqlerror.Error) { + ast := &SchemaDocument{} + for _, input := range inputs { + inputAst, err := ParseSchema(input) + if err != nil { + return nil, err + } + ast.Merge(inputAst) + } + return ast, nil +} + +func (p *parser) parseSchemaDocument() *SchemaDocument { + var doc SchemaDocument + doc.Position = p.peekPos() + for p.peek().Kind != lexer.EOF { + if p.err != nil { + return nil + } + + var description string + if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String { + description = p.parseDescription() + } + + if p.peek().Kind != lexer.Name { + p.unexpectedError() + break + } + + switch p.peek().Value { + case "scalar", "type", "interface", "union", "enum", "input": + doc.Definitions = append(doc.Definitions, p.parseTypeSystemDefinition(description)) + case "schema": + doc.Schema = append(doc.Schema, p.parseSchemaDefinition(description)) + case "directive": + doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description)) + case "extend": + if description != "" { + p.unexpectedToken(p.prev) + } + p.parseTypeSystemExtension(&doc) + default: + p.unexpectedError() + return nil + } + } + + return &doc +} + +func (p *parser) parseDescription() string { + token := p.peek() + + if token.Kind != lexer.BlockString && token.Kind != lexer.String { + return "" + } + + return p.next().Value +} + +func (p *parser) parseTypeSystemDefinition(description string) *Definition { + tok := p.peek() + if tok.Kind != lexer.Name { + p.unexpectedError() + return nil + } + + switch tok.Value { + case "scalar": + return p.parseScalarTypeDefinition(description) + case "type": + return p.parseObjectTypeDefinition(description) + case "interface": + return p.parseInterfaceTypeDefinition(description) + case "union": + return p.parseUnionTypeDefinition(description) + case "enum": + return p.parseEnumTypeDefinition(description) + case "input": + return p.parseInputObjectTypeDefinition(description) + default: + p.unexpectedError() + return nil + } +} + +func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { + p.expectKeyword("schema") + + def := SchemaDefinition{Description: description} + def.Position = p.peekPos() + def.Description = description + def.Directives = p.parseDirectives(true) + + p.some(lexer.BraceL, lexer.BraceR, func() { + def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) + }) + return &def +} + +func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition { + var op OperationTypeDefinition + op.Position = p.peekPos() + op.Operation = p.parseOperationType() + p.expect(lexer.Colon) + op.Type = p.parseName() + return &op +} + +func (p *parser) parseScalarTypeDefinition(description string) *Definition { + p.expectKeyword("scalar") + + var def Definition + def.Position = p.peekPos() + def.Kind = Scalar + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseObjectTypeDefinition(description string) *Definition { + p.expectKeyword("type") + + var def Definition + def.Position = p.peekPos() + def.Kind = Object + def.Description = description + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + return &def +} + +func (p *parser) parseImplementsInterfaces() []string { + var types []string + if p.peek().Value == "implements" { + p.next() + // optional leading ampersand + p.skip(lexer.Amp) + + types = append(types, p.parseName()) + for p.skip(lexer.Amp) && p.err == nil { + types = append(types, p.parseName()) + } + } + return types +} + +func (p *parser) parseFieldsDefinition() FieldList { + var defs FieldList + p.some(lexer.BraceL, lexer.BraceR, func() { + defs = append(defs, p.parseFieldDefinition()) + }) + return defs +} + +func (p *parser) parseFieldDefinition() *FieldDefinition { + var def FieldDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + def.Arguments = p.parseArgumentDefs() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + def.Directives = p.parseDirectives(true) + + return &def +} + +func (p *parser) parseArgumentDefs() ArgumentDefinitionList { + var args ArgumentDefinitionList + p.some(lexer.ParenL, lexer.ParenR, func() { + args = append(args, p.parseArgumentDef()) + }) + return args +} + +func (p *parser) parseArgumentDef() *ArgumentDefinition { + var def ArgumentDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseInputValueDef() *FieldDefinition { + var def FieldDefinition + def.Position = p.peekPos() + def.Description = p.parseDescription() + def.Name = p.parseName() + p.expect(lexer.Colon) + def.Type = p.parseTypeReference() + if p.skip(lexer.Equals) { + def.DefaultValue = p.parseValueLiteral(true) + } + def.Directives = p.parseDirectives(true) + return &def +} + +func (p *parser) parseInterfaceTypeDefinition(description string) *Definition { + p.expectKeyword("interface") + + var def Definition + def.Position = p.peekPos() + def.Kind = Interface + def.Description = description + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + return &def +} + +func (p *parser) parseUnionTypeDefinition(description string) *Definition { + p.expectKeyword("union") + + var def Definition + def.Position = p.peekPos() + def.Kind = Union + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Types = p.parseUnionMemberTypes() + return &def +} + +func (p *parser) parseUnionMemberTypes() []string { + var types []string + if p.skip(lexer.Equals) { + // optional leading pipe + p.skip(lexer.Pipe) + + types = append(types, p.parseName()) + for p.skip(lexer.Pipe) && p.err == nil { + types = append(types, p.parseName()) + } + } + return types +} + +func (p *parser) parseEnumTypeDefinition(description string) *Definition { + p.expectKeyword("enum") + + var def Definition + def.Position = p.peekPos() + def.Kind = Enum + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.EnumValues = p.parseEnumValuesDefinition() + return &def +} + +func (p *parser) parseEnumValuesDefinition() EnumValueList { + var values EnumValueList + p.some(lexer.BraceL, lexer.BraceR, func() { + values = append(values, p.parseEnumValueDefinition()) + }) + return values +} + +func (p *parser) parseEnumValueDefinition() *EnumValueDefinition { + return &EnumValueDefinition{ + Position: p.peekPos(), + Description: p.parseDescription(), + Name: p.parseName(), + Directives: p.parseDirectives(true), + } +} + +func (p *parser) parseInputObjectTypeDefinition(description string) *Definition { + p.expectKeyword("input") + + var def Definition + def.Position = p.peekPos() + def.Kind = InputObject + def.Description = description + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseInputFieldsDefinition() + return &def +} + +func (p *parser) parseInputFieldsDefinition() FieldList { + var values FieldList + p.some(lexer.BraceL, lexer.BraceR, func() { + values = append(values, p.parseInputValueDef()) + }) + return values +} + +func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) { + p.expectKeyword("extend") + + switch p.peek().Value { + case "schema": + doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension()) + case "scalar": + doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension()) + case "type": + doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension()) + case "interface": + doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension()) + case "union": + doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension()) + case "enum": + doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension()) + case "input": + doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension()) + default: + p.unexpectedError() + } +} + +func (p *parser) parseSchemaExtension() *SchemaDefinition { + p.expectKeyword("schema") + + var def SchemaDefinition + def.Position = p.peekPos() + def.Directives = p.parseDirectives(true) + p.some(lexer.BraceL, lexer.BraceR, func() { + def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) + }) + if len(def.Directives) == 0 && len(def.OperationTypes) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseScalarTypeExtension() *Definition { + p.expectKeyword("scalar") + + var def Definition + def.Position = p.peekPos() + def.Kind = Scalar + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + if len(def.Directives) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseObjectTypeExtension() *Definition { + p.expectKeyword("type") + + var def Definition + def.Position = p.peekPos() + def.Kind = Object + def.Name = p.parseName() + def.Interfaces = p.parseImplementsInterfaces() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseInterfaceTypeExtension() *Definition { + p.expectKeyword("interface") + + var def Definition + def.Position = p.peekPos() + def.Kind = Interface + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Fields = p.parseFieldsDefinition() + if len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseUnionTypeExtension() *Definition { + p.expectKeyword("union") + + var def Definition + def.Position = p.peekPos() + def.Kind = Union + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.Types = p.parseUnionMemberTypes() + + if len(def.Directives) == 0 && len(def.Types) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseEnumTypeExtension() *Definition { + p.expectKeyword("enum") + + var def Definition + def.Position = p.peekPos() + def.Kind = Enum + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + def.EnumValues = p.parseEnumValuesDefinition() + if len(def.Directives) == 0 && len(def.EnumValues) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseInputObjectTypeExtension() *Definition { + p.expectKeyword("input") + + var def Definition + def.Position = p.peekPos() + def.Kind = InputObject + def.Name = p.parseName() + def.Directives = p.parseDirectives(false) + def.Fields = p.parseInputFieldsDefinition() + if len(def.Directives) == 0 && len(def.Fields) == 0 { + p.unexpectedError() + } + return &def +} + +func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition { + p.expectKeyword("directive") + p.expect(lexer.At) + + var def DirectiveDefinition + def.Position = p.peekPos() + def.Description = description + def.Name = p.parseName() + def.Arguments = p.parseArgumentDefs() + + if peek := p.peek(); peek.Kind == lexer.Name && peek.Value == "repeatable" { + def.IsRepeatable = true + p.skip(lexer.Name) + } + + p.expectKeyword("on") + def.Locations = p.parseDirectiveLocations() + return &def +} + +func (p *parser) parseDirectiveLocations() []DirectiveLocation { + p.skip(lexer.Pipe) + + locations := []DirectiveLocation{p.parseDirectiveLocation()} + + for p.skip(lexer.Pipe) && p.err == nil { + locations = append(locations, p.parseDirectiveLocation()) + } + + return locations +} + +func (p *parser) parseDirectiveLocation() DirectiveLocation { + name := p.expect(lexer.Name) + + switch name.Value { + case `QUERY`: + return LocationQuery + case `MUTATION`: + return LocationMutation + case `SUBSCRIPTION`: + return LocationSubscription + case `FIELD`: + return LocationField + case `FRAGMENT_DEFINITION`: + return LocationFragmentDefinition + case `FRAGMENT_SPREAD`: + return LocationFragmentSpread + case `INLINE_FRAGMENT`: + return LocationInlineFragment + case `VARIABLE_DEFINITION`: + return LocationVariableDefinition + case `SCHEMA`: + return LocationSchema + case `SCALAR`: + return LocationScalar + case `OBJECT`: + return LocationObject + case `FIELD_DEFINITION`: + return LocationFieldDefinition + case `ARGUMENT_DEFINITION`: + return LocationArgumentDefinition + case `INTERFACE`: + return LocationInterface + case `UNION`: + return LocationUnion + case `ENUM`: + return LocationEnum + case `ENUM_VALUE`: + return LocationEnumValue + case `INPUT_OBJECT`: + return LocationInputObject + case `INPUT_FIELD_DEFINITION`: + return LocationInputFieldDefinition + } + + p.unexpectedToken(name) + return "" +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml new file mode 100644 index 000000000..30faf84c5 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml @@ -0,0 +1,646 @@ +object types: + - name: simple + input: | + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: with description + input: | + "Description" + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "Description" + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: with block description + input: | + """ + Description + """ + # Even with comments between them + type Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "Description" + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + - name: with field arg + input: | + type Hello { + world(flag: Boolean): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "flag" + Type: Boolean + Type: String + + - name: with field arg and default value + input: | + type Hello { + world(flag: Boolean = true): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "flag" + DefaultValue: true + Type: Boolean + Type: String + + - name: with field list arg + input: | + type Hello { + world(things: [String]): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "things" + Type: [String] + Type: String + + - name: with two args + input: | + type Hello { + world(argOne: Boolean, argTwo: Int): String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Arguments: [ArgumentDefinition] + - + Name: "argOne" + Type: Boolean + - + Name: "argTwo" + Type: Int + Type: String + - name: must define one or more fields + input: | + type Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 13 }] + +type extensions: + - name: Object extension + input: | + extend type Hello { + world: String + } + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: without any fields + input: "extend type Hello implements Greeting" + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Greeting" + + - name: without fields twice + input: | + extend type Hello implements Greeting + extend type Hello implements SecondGreeting + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Greeting" + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "SecondGreeting" + + - name: without anything errors + input: "extend type Hello" + error: + message: "Unexpected " + locations: [{ line: 1, column: 18 }] + + - name: can have descriptions # hmm, this might not be spec compliant... + input: | + "Description" + extend type Hello { + world: String + } + error: + message: 'Unexpected String "Description"' + locations: [{ line: 1, column: 2 }] + + - name: can not have descriptions on types + input: | + extend "Description" type Hello { + world: String + } + error: + message: Unexpected String "Description" + locations: [{ line: 1, column: 9 }] + + - name: all can have directives + input: | + extend scalar Foo @deprecated + extend type Foo @deprecated + extend interface Foo @deprecated + extend union Foo @deprecated + extend enum Foo @deprecated + extend input Foo @deprecated + ast: | + + Extensions: [Definition] + - + Kind: DefinitionKind("SCALAR") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("OBJECT") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("INTERFACE") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("UNION") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("ENUM") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + - + Kind: DefinitionKind("INPUT_OBJECT") + Name: "Foo" + Directives: [Directive] + - + Name: "deprecated" + +schema definition: + - name: simple + input: | + schema { + query: Query + } + ast: | + + Schema: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("query") + Type: "Query" + +schema extensions: + - name: simple + input: | + extend schema { + mutation: Mutation + } + ast: | + + SchemaExtension: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("mutation") + Type: "Mutation" + + - name: directive only + input: "extend schema @directive" + ast: | + + SchemaExtension: [SchemaDefinition] + - + Directives: [Directive] + - + Name: "directive" + + - name: without anything errors + input: "extend schema" + error: + message: "Unexpected " + locations: [{ line: 1, column: 14}] + +inheritance: + - name: single + input: "type Hello implements World { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "World" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + + - name: multi + input: "type Hello implements Wo & rld { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Wo" + - "rld" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + + - name: multi with leading amp + input: "type Hello implements & Wo & rld { field: String }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Interfaces: [string] + - "Wo" + - "rld" + Fields: [FieldDefinition] + - + Name: "field" + Type: String + +enums: + - name: single value + input: "enum Hello { WORLD }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("ENUM") + Name: "Hello" + EnumValues: [EnumValueDefinition] + - + Name: "WORLD" + + - name: double value + input: "enum Hello { WO, RLD }" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("ENUM") + Name: "Hello" + EnumValues: [EnumValueDefinition] + - + Name: "WO" + - + Name: "RLD" + - name: must define one or more unique enum values + input: | + enum Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 13 }] + +interface: + - name: simple + input: | + interface Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INTERFACE") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + - name: must define one or more fields + input: | + interface Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 18 }] + + - name: may define intermediate interfaces + input: | + interface IA { + id: ID! + } + + interface IIA implements IA { + id: ID! + } + + type A implements IIA { + id: ID! + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INTERFACE") + Name: "IA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + - + Kind: DefinitionKind("INTERFACE") + Name: "IIA" + Interfaces: [string] + - "IA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + - + Kind: DefinitionKind("OBJECT") + Name: "A" + Interfaces: [string] + - "IIA" + Fields: [FieldDefinition] + - + Name: "id" + Type: ID! + +unions: + - name: simple + input: "union Hello = World" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "World" + + - name: with two types + input: "union Hello = Wo | Rld" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "Wo" + - "Rld" + + - name: with leading pipe + input: "union Hello = | Wo | Rld" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("UNION") + Name: "Hello" + Types: [string] + - "Wo" + - "Rld" + + - name: cant be empty + input: "union Hello = || Wo | Rld" + error: + message: "Expected Name, found |" + locations: [{ line: 1, column: 16 }] + + - name: cant double pipe + input: "union Hello = Wo || Rld" + error: + message: "Expected Name, found |" + locations: [{ line: 1, column: 19 }] + + - name: cant have trailing pipe + input: "union Hello = | Wo | Rld |" + error: + message: "Expected Name, found " + locations: [{ line: 1, column: 27 }] + +scalar: + - name: simple + input: "scalar Hello" + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("SCALAR") + Name: "Hello" + +input object: + - name: simple + input: | + input Hello { + world: String + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("INPUT_OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + + - name: can not have args + input: | + input Hello { + world(foo: Int): String + } + error: + message: "Expected :, found (" + locations: [{ line: 2, column: 8 }] + - name: must define one or more input fields + input: | + input Hello {} + error: + message: "expected at least one definition, found }" + locations: [{ line: 1, column: 14 }] + +directives: + - name: simple + input: directive @foo on FIELD + ast: | + + Directives: [DirectiveDefinition] + - + Name: "foo" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: false + + - name: executable + input: | + directive @onQuery on QUERY + directive @onMutation on MUTATION + directive @onSubscription on SUBSCRIPTION + directive @onField on FIELD + directive @onFragmentDefinition on FRAGMENT_DEFINITION + directive @onFragmentSpread on FRAGMENT_SPREAD + directive @onInlineFragment on INLINE_FRAGMENT + directive @onVariableDefinition on VARIABLE_DEFINITION + ast: | + + Directives: [DirectiveDefinition] + - + Name: "onQuery" + Locations: [DirectiveLocation] + - DirectiveLocation("QUERY") + IsRepeatable: false + - + Name: "onMutation" + Locations: [DirectiveLocation] + - DirectiveLocation("MUTATION") + IsRepeatable: false + - + Name: "onSubscription" + Locations: [DirectiveLocation] + - DirectiveLocation("SUBSCRIPTION") + IsRepeatable: false + - + Name: "onField" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: false + - + Name: "onFragmentDefinition" + Locations: [DirectiveLocation] + - DirectiveLocation("FRAGMENT_DEFINITION") + IsRepeatable: false + - + Name: "onFragmentSpread" + Locations: [DirectiveLocation] + - DirectiveLocation("FRAGMENT_SPREAD") + IsRepeatable: false + - + Name: "onInlineFragment" + Locations: [DirectiveLocation] + - DirectiveLocation("INLINE_FRAGMENT") + IsRepeatable: false + - + Name: "onVariableDefinition" + Locations: [DirectiveLocation] + - DirectiveLocation("VARIABLE_DEFINITION") + IsRepeatable: false + + - name: repeatable + input: directive @foo repeatable on FIELD + ast: | + + Directives: [DirectiveDefinition] + - + Name: "foo" + Locations: [DirectiveLocation] + - DirectiveLocation("FIELD") + IsRepeatable: true + + - name: invalid location + input: "directive @foo on FIELD | INCORRECT_LOCATION" + error: + message: 'Unexpected Name "INCORRECT_LOCATION"' + locations: [{ line: 1, column: 27 }] + +fuzzer: + - name: 1 + input: "type o{d(g:[" + error: + message: 'Expected Name, found ' + locations: [{ line: 1, column: 13 }] + - name: 2 + input: "\"\"\"\r" + error: + message: 'Unexpected ' + locations: [{ line: 1, column: 5 }] diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/readme.md b/constraint/vendor/github.com/vektah/gqlparser/v2/readme.md new file mode 100644 index 000000000..9f49ec78e --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/readme.md @@ -0,0 +1,17 @@ +gqlparser [![CircleCI](https://badgen.net/circleci/github/vektah/gqlparser/master)](https://circleci.com/gh/vektah/gqlparser) [![Go Report Card](https://goreportcard.com/badge/github.com/vektah/gqlparser/v2)](https://goreportcard.com/report/github.com/vektah/gqlparser/v2) [![Coverage Status](https://badgen.net/coveralls/c/github/vektah/gqlparser)](https://coveralls.io/github/vektah/gqlparser?branch=master) +=== + +This is a parser for graphql, written to mirror the graphql-js reference implementation as closely while remaining idiomatic and easy to use. + +spec target: June 2018 (Schema definition language, block strings as descriptions, error paths & extension) + +This parser is used by [gqlgen](https://github.com/99designs/gqlgen), and it should be reasonably stable. + +Guiding principles: + + - maintainability: It should be easy to stay up to date with the spec + - well tested: It shouldn't need a graphql server to validate itself. Changes to this repo should be self contained. + - server agnostic: It should be usable by any of the graphql server implementations, and any graphql client tooling. + - idiomatic & stable api: It should follow go best practices, especially around forwards compatibility. + - fast: Where it doesn't impact on the above it should be fast. Avoid unnecessary allocs in hot paths. + - close to reference: Where it doesn't impact on the above, it should stay close to the [graphql/graphql-js](https://github.com/graphql/graphql-js) reference implementation. diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/error.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/error.go new file mode 100644 index 000000000..f8f76055a --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/error.go @@ -0,0 +1,55 @@ +package validator + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type ErrorOption func(err *gqlerror.Error) + +func Message(msg string, args ...interface{}) ErrorOption { + return func(err *gqlerror.Error) { + err.Message += fmt.Sprintf(msg, args...) + } +} + +func At(position *ast.Position) ErrorOption { + return func(err *gqlerror.Error) { + if position == nil { + return + } + err.Locations = append(err.Locations, gqlerror.Location{ + Line: position.Line, + Column: position.Column, + }) + if position.Src.Name != "" { + err.SetFile(position.Src.Name) + } + } +} + +func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption { + suggested := SuggestionList(typed, suggestions) + return func(err *gqlerror.Error) { + if len(suggested) > 0 { + err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?" + } + } +} + +func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption { + suggested := SuggestionList(typed, suggestions) + return func(err *gqlerror.Error) { + if len(suggested) > 0 { + err.Message += " " + prefix + " " + OrList(suggested...) + "?" + } + } +} + +func Suggestf(suggestion string, args ...interface{}) ErrorOption { + return func(err *gqlerror.Error) { + err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?" + } +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go new file mode 100644 index 000000000..f1ab5873f --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go @@ -0,0 +1,39 @@ +package validator + +import "bytes" + +// Given [ A, B, C ] return '"A", "B", or "C"'. +func QuotedOrList(items ...string) string { + itemsQuoted := make([]string, len(items)) + for i, item := range items { + itemsQuoted[i] = `"` + item + `"` + } + return OrList(itemsQuoted...) +} + +// Given [ A, B, C ] return 'A, B, or C'. +func OrList(items ...string) string { + var buf bytes.Buffer + + if len(items) > 5 { + items = items[:5] + } + if len(items) == 2 { + buf.WriteString(items[0]) + buf.WriteString(" or ") + buf.WriteString(items[1]) + return buf.String() + } + + for i, item := range items { + if i != 0 { + if i == len(items)-1 { + buf.WriteString(", or ") + } else { + buf.WriteString(", ") + } + } + buf.WriteString(item) + } + return buf.String() +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go new file mode 100644 index 000000000..c354ec0df --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go @@ -0,0 +1,15 @@ +package validator + +import ( + _ "embed" + "github.com/vektah/gqlparser/v2/ast" +) + +//go:embed prelude.graphql +var preludeGraphql string + +var Prelude = &ast.Source{ + Name: "prelude.graphql", + Input: preludeGraphql, + BuiltIn: true, +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql new file mode 100644 index 000000000..bdca0096a --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/prelude.graphql @@ -0,0 +1,121 @@ +# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema + +"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1." +scalar Int + +"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point)." +scalar Float + +"The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text." +scalar String + +"The `Boolean` scalar type represents `true` or `false`." +scalar Boolean + +"""The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.""" +scalar ID + +"The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument." +directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument." +directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"The @deprecated built-in directive is used within the type system definition language to indicate deprecated portions of a GraphQL service's schema, such as deprecated fields on a type, arguments on a field, input fields on an input type, or values of an enum type." +directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE + +"The @specifiedBy built-in directive is used within the type system definition language to provide a scalar specification URL for specifying the behavior of custom scalar types." +directive @specifiedBy(url: String!) on SCALAR + +type __Schema { + description: String + types: [__Type!]! + queryType: __Type! + mutationType: __Type + subscriptionType: __Type + directives: [__Directive!]! +} + +type __Type { + kind: __TypeKind! + name: String + description: String + # must be non-null for OBJECT and INTERFACE, otherwise null. + fields(includeDeprecated: Boolean = false): [__Field!] + # must be non-null for OBJECT and INTERFACE, otherwise null. + interfaces: [__Type!] + # must be non-null for INTERFACE and UNION, otherwise null. + possibleTypes: [__Type!] + # must be non-null for ENUM, otherwise null. + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + # must be non-null for INPUT_OBJECT, otherwise null. + inputFields: [__InputValue!] + # must be non-null for NON_NULL and LIST, otherwise null. + ofType: __Type + # may be non-null for custom SCALAR, otherwise null. + specifiedByURL: String +} + +type __Field { + name: String! + description: String + args: [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String +} + +type __InputValue { + name: String! + description: String + type: __Type! + defaultValue: String +} + +type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String +} + +enum __TypeKind { + SCALAR + OBJECT + INTERFACE + UNION + ENUM + INPUT_OBJECT + LIST + NON_NULL +} + +type __Directive { + name: String! + description: String + locations: [__DirectiveLocation!]! + args: [__InputValue!]! + isRepeatable: Boolean! +} + +enum __DirectiveLocation { + QUERY + MUTATION + SUBSCRIPTION + FIELD + FRAGMENT_DEFINITION + FRAGMENT_SPREAD + INLINE_FRAGMENT + VARIABLE_DEFINITION + SCHEMA + SCALAR + OBJECT + FIELD_DEFINITION + ARGUMENT_DEFINITION + INTERFACE + UNION + ENUM + ENUM_VALUE + INPUT_OBJECT + INPUT_FIELD_DEFINITION +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go new file mode 100644 index 000000000..aa83c6967 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go @@ -0,0 +1,94 @@ +package validator + +import ( + "fmt" + "sort" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.ObjectDefinition == nil || field.Definition != nil { + return + } + + message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name) + + if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil { + message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?" + } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil { + message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?" + } + + addError( + Message(message), + At(field.Position), + ) + }) + }) +} + +// Go through all of the implementations of type, as well as the interfaces +// that they implement. If any of those types include the provided field, +// suggest them, sorted by how often the type is referenced, starting +// with Interfaces. +func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) []string { + if !parent.IsAbstractType() { + return nil + } + + var suggestedObjectTypes []string + var suggestedInterfaceTypes []string + interfaceUsageCount := map[string]int{} + + for _, possibleType := range walker.Schema.GetPossibleTypes(parent) { + field := possibleType.Fields.ForName(name) + if field == nil { + continue + } + + suggestedObjectTypes = append(suggestedObjectTypes, possibleType.Name) + + for _, possibleInterface := range possibleType.Interfaces { + interfaceField := walker.Schema.Types[possibleInterface] + if interfaceField != nil && interfaceField.Fields.ForName(name) != nil { + if interfaceUsageCount[possibleInterface] == 0 { + suggestedInterfaceTypes = append(suggestedInterfaceTypes, possibleInterface) + } + interfaceUsageCount[possibleInterface]++ + } + } + } + + suggestedTypes := append(suggestedInterfaceTypes, suggestedObjectTypes...) + + sort.SliceStable(suggestedTypes, func(i, j int) bool { + typeA, typeB := suggestedTypes[i], suggestedTypes[j] + diff := interfaceUsageCount[typeB] - interfaceUsageCount[typeA] + if diff != 0 { + return diff < 0 + } + return strings.Compare(typeA, typeB) < 0 + }) + + return suggestedTypes +} + +// For the field name provided, determine if there are any similar field names +// that may be the result of a typo. +func getSuggestedFieldNames(parent *ast.Definition, name string) []string { + if parent.Kind != ast.Object && parent.Kind != ast.Interface { + return nil + } + + var possibleFieldNames []string + for _, field := range parent.Fields { + possibleFieldNames = append(possibleFieldNames, field.Name) + } + + return SuggestionList(name, possibleFieldNames) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go new file mode 100644 index 000000000..5215f697e --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go @@ -0,0 +1,39 @@ +package validator + +import ( + "fmt" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) { + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + fragmentType := walker.Schema.Types[inlineFragment.TypeCondition] + if fragmentType == nil || fragmentType.IsCompositeType() { + return + } + + message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition) + + addError( + Message(message), + At(inlineFragment.Position), + ) + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() { + return + } + + message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition) + + addError( + Message(message), + At(fragment.Position), + ) + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go new file mode 100644 index 000000000..da5a79621 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go @@ -0,0 +1,57 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) { + // A GraphQL field is only valid if all supplied arguments are defined by that field. + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil || field.ObjectDefinition == nil { + return + } + for _, arg := range field.Arguments { + def := field.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + var suggestions []string + for _, argDef := range field.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + + addError( + Message(`Unknown argument "%s" on field "%s.%s".`, arg.Name, field.ObjectDefinition.Name, field.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(field.Position), + ) + } + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + return + } + for _, arg := range directive.Arguments { + def := directive.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + var suggestions []string + for _, argDef := range directive.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + + addError( + Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(directive.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go new file mode 100644 index 000000000..18fe41fd7 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go @@ -0,0 +1,47 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) { + type mayNotBeUsedDirective struct { + Name string + Line int + Column int + } + var seen map[mayNotBeUsedDirective]bool = map[mayNotBeUsedDirective]bool{} + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + addError( + Message(`Unknown directive "@%s".`, directive.Name), + At(directive.Position), + ) + return + } + + for _, loc := range directive.Definition.Locations { + if loc == directive.Location { + return + } + } + + // position must be exists if directive.Definition != nil + tmp := mayNotBeUsedDirective{ + Name: directive.Name, + Line: directive.Position.Line, + Column: directive.Position.Column, + } + + if !seen[tmp] { + addError( + Message(`Directive "@%s" may not be used on %s.`, directive.Name, directive.Location), + At(directive.Position), + ) + seen[tmp] = true + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go new file mode 100644 index 000000000..b7427d0d0 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go @@ -0,0 +1,19 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) { + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if fragmentSpread.Definition == nil { + addError( + Message(`Unknown fragment "%s".`, fragmentSpread.Name), + At(fragmentSpread.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go new file mode 100644 index 000000000..7abfbf62f --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go @@ -0,0 +1,59 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) { + observers.OnVariable(func(walker *Walker, variable *ast.VariableDefinition) { + typeName := variable.Type.Name() + typdef := walker.Schema.Types[typeName] + if typdef != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typeName), + At(variable.Position), + ) + }) + + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + typedName := inlineFragment.TypeCondition + if typedName == "" { + return + } + + def := walker.Schema.Types[typedName] + if def != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typedName), + At(inlineFragment.Position), + ) + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + typeName := fragment.TypeCondition + def := walker.Schema.Types[typeName] + if def != nil { + return + } + + var possibleTypes []string + for _, t := range walker.Schema.Types { + possibleTypes = append(possibleTypes, t.Name) + } + + addError( + Message(`Unknown type "%s".`, typeName), + SuggestListQuoted("Did you mean", typeName, possibleTypes), + At(fragment.Position), + ) + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go new file mode 100644 index 000000000..d27528542 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go @@ -0,0 +1,19 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if operation.Name == "" && len(walker.Document.Operations) > 1 { + addError( + Message(`This anonymous operation must be the only defined operation.`), + At(operation.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go new file mode 100644 index 000000000..da73f3499 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go @@ -0,0 +1,93 @@ +package validator + +import ( + "fmt" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) { + visitedFrags := make(map[string]bool) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + var spreadPath []*ast.FragmentSpread + spreadPathIndexByName := make(map[string]int) + + var recursive func(fragment *ast.FragmentDefinition) + recursive = func(fragment *ast.FragmentDefinition) { + if visitedFrags[fragment.Name] { + return + } + + visitedFrags[fragment.Name] = true + + spreadNodes := getFragmentSpreads(fragment.SelectionSet) + if len(spreadNodes) == 0 { + return + } + spreadPathIndexByName[fragment.Name] = len(spreadPath) + + for _, spreadNode := range spreadNodes { + spreadName := spreadNode.Name + + cycleIndex, ok := spreadPathIndexByName[spreadName] + + spreadPath = append(spreadPath, spreadNode) + if !ok { + spreadFragment := walker.Document.Fragments.ForName(spreadName) + if spreadFragment != nil { + recursive(spreadFragment) + } + } else { + cyclePath := spreadPath[cycleIndex : len(spreadPath)-1] + var fragmentNames []string + for _, fs := range cyclePath { + fragmentNames = append(fragmentNames, fmt.Sprintf(`"%s"`, fs.Name)) + } + var via string + if len(fragmentNames) != 0 { + via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", ")) + } + addError( + Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via), + At(spreadNode.Position), + ) + } + + spreadPath = spreadPath[:len(spreadPath)-1] + } + + delete(spreadPathIndexByName, fragment.Name) + } + + recursive(fragment) + }) + }) +} + +func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread { + var spreads []*ast.FragmentSpread + + setsToVisit := []ast.SelectionSet{node} + + for len(setsToVisit) != 0 { + set := setsToVisit[len(setsToVisit)-1] + setsToVisit = setsToVisit[:len(setsToVisit)-1] + + for _, selection := range set { + switch selection := selection.(type) { + case *ast.FragmentSpread: + spreads = append(spreads, selection) + case *ast.Field: + setsToVisit = append(setsToVisit, selection.SelectionSet) + case *ast.InlineFragment: + setsToVisit = append(setsToVisit, selection.SelectionSet) + } + } + } + + return spreads +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go new file mode 100644 index 000000000..91df727a2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go @@ -0,0 +1,28 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil { + return + } + + if walker.CurrentOperation.Name != "" { + addError( + Message(`Variable "%s" is not defined by operation "%s".`, value, walker.CurrentOperation.Name), + At(value.Position), + ) + } else { + addError( + Message(`Variable "%s" is not defined.`, value), + At(value.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go new file mode 100644 index 000000000..dfc896725 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go @@ -0,0 +1,30 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) { + + inFragmentDefinition := false + fragmentNameUsed := make(map[string]bool) + + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if !inFragmentDefinition { + fragmentNameUsed[fragmentSpread.Name] = true + } + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + inFragmentDefinition = true + if !fragmentNameUsed[fragment.Name] { + addError( + Message(`Fragment "%s" is never used.`, fragment.Name), + At(fragment.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go new file mode 100644 index 000000000..df2e5f4b7 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go @@ -0,0 +1,30 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, varDef := range operation.VariableDefinitions { + if varDef.Used { + continue + } + + if operation.Name != "" { + addError( + Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name), + At(varDef.Position), + ) + } else { + addError( + Message(`Variable "$%s" is never used.`, varDef.Variable), + At(varDef.Position), + ) + } + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go new file mode 100644 index 000000000..38e1efa11 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go @@ -0,0 +1,560 @@ +package validator + +import ( + "bytes" + "fmt" + "reflect" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + + AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) { + /** + * Algorithm: + * + * Conflicts occur when two fields exist in a query which will produce the same + * response name, but represent differing values, thus creating a conflict. + * The algorithm below finds all conflicts via making a series of comparisons + * between fields. In order to compare as few fields as possible, this makes + * a series of comparisons "within" sets of fields and "between" sets of fields. + * + * Given any selection set, a collection produces both a set of fields by + * also including all inline fragments, as well as a list of fragments + * referenced by fragment spreads. + * + * A) Each selection set represented in the document first compares "within" its + * collected set of fields, finding any conflicts between every pair of + * overlapping fields. + * Note: This is the *only time* that a the fields "within" a set are compared + * to each other. After this only fields "between" sets are compared. + * + * B) Also, if any fragment is referenced in a selection set, then a + * comparison is made "between" the original set of fields and the + * referenced fragment. + * + * C) Also, if multiple fragments are referenced, then comparisons + * are made "between" each referenced fragment. + * + * D) When comparing "between" a set of fields and a referenced fragment, first + * a comparison is made between each field in the original set of fields and + * each field in the the referenced set of fields. + * + * E) Also, if any fragment is referenced in the referenced selection set, + * then a comparison is made "between" the original set of fields and the + * referenced fragment (recursively referring to step D). + * + * F) When comparing "between" two fragments, first a comparison is made between + * each field in the first referenced set of fields and each field in the the + * second referenced set of fields. + * + * G) Also, any fragments referenced by the first must be compared to the + * second, and any fragments referenced by the second must be compared to the + * first (recursively referring to step F). + * + * H) When comparing two fields, if both have selection sets, then a comparison + * is made "between" both selection sets, first comparing the set of fields in + * the first selection set with the set of fields in the second. + * + * I) Also, if any fragment is referenced in either selection set, then a + * comparison is made "between" the other set of fields and the + * referenced fragment. + * + * J) Also, if two fragments are referenced in both selection sets, then a + * comparison is made "between" the two fragments. + * + */ + + m := &overlappingFieldsCanBeMergedManager{ + comparedFragmentPairs: pairSet{data: make(map[string]map[string]bool)}, + } + + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(operation.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnField(func(walker *Walker, field *ast.Field) { + if walker.CurrentOperation == nil { + // When checking both Operation and Fragment, errors are duplicated when processing FragmentDefinition referenced from Operation + return + } + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(field.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(inlineFragment.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + m.walker = walker + conflicts := m.findConflictsWithinSelectionSet(fragment.SelectionSet) + for _, conflict := range conflicts { + conflict.addFieldsConflictMessage(addError) + } + }) + }) +} + +type pairSet struct { + data map[string]map[string]bool +} + +func (pairSet *pairSet) Add(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) { + add := func(a *ast.FragmentSpread, b *ast.FragmentSpread) { + m := pairSet.data[a.Name] + if m == nil { + m = make(map[string]bool) + pairSet.data[a.Name] = m + } + m[b.Name] = areMutuallyExclusive + } + add(a, b) + add(b, a) +} + +func (pairSet *pairSet) Has(a *ast.FragmentSpread, b *ast.FragmentSpread, areMutuallyExclusive bool) bool { + am, ok := pairSet.data[a.Name] + if !ok { + return false + } + result, ok := am[b.Name] + if !ok { + return false + } + + // areMutuallyExclusive being false is a superset of being true, + // hence if we want to know if this PairSet "has" these two with no + // exclusivity, we have to ensure it was added as such. + if !areMutuallyExclusive { + return !result + } + + return true +} + +type sequentialFieldsMap struct { + // We can't use map[string][]*ast.Field. because map is not stable... + seq []string + data map[string][]*ast.Field +} + +type fieldIterateEntry struct { + ResponseName string + Fields []*ast.Field +} + +func (m *sequentialFieldsMap) Push(responseName string, field *ast.Field) { + fields, ok := m.data[responseName] + if !ok { + m.seq = append(m.seq, responseName) + } + fields = append(fields, field) + m.data[responseName] = fields +} + +func (m *sequentialFieldsMap) Get(responseName string) ([]*ast.Field, bool) { + fields, ok := m.data[responseName] + return fields, ok +} + +func (m *sequentialFieldsMap) Iterator() [][]*ast.Field { + fieldsList := make([][]*ast.Field, 0, len(m.seq)) + for _, responseName := range m.seq { + fields := m.data[responseName] + fieldsList = append(fieldsList, fields) + } + return fieldsList +} + +func (m *sequentialFieldsMap) KeyValueIterator() []*fieldIterateEntry { + fieldEntriesList := make([]*fieldIterateEntry, 0, len(m.seq)) + for _, responseName := range m.seq { + fields := m.data[responseName] + fieldEntriesList = append(fieldEntriesList, &fieldIterateEntry{ + ResponseName: responseName, + Fields: fields, + }) + } + return fieldEntriesList +} + +type conflictMessageContainer struct { + Conflicts []*ConflictMessage +} + +type ConflictMessage struct { + Message string + ResponseName string + Names []string + SubMessage []*ConflictMessage + Position *ast.Position +} + +func (m *ConflictMessage) String(buf *bytes.Buffer) { + if len(m.SubMessage) == 0 { + buf.WriteString(m.Message) + return + } + + for idx, subMessage := range m.SubMessage { + buf.WriteString(`subfields "`) + buf.WriteString(subMessage.ResponseName) + buf.WriteString(`" conflict because `) + subMessage.String(buf) + if idx != len(m.SubMessage)-1 { + buf.WriteString(" and ") + } + } +} + +func (m *ConflictMessage) addFieldsConflictMessage(addError AddErrFunc) { + var buf bytes.Buffer + m.String(&buf) + addError( + Message(`Fields "%s" conflict because %s. Use different aliases on the fields to fetch both if this was intentional.`, m.ResponseName, buf.String()), + At(m.Position), + ) +} + +type overlappingFieldsCanBeMergedManager struct { + walker *Walker + + // per walker + comparedFragmentPairs pairSet + // cachedFieldsAndFragmentNames interface{} + + // per selectionSet + comparedFragments map[string]bool +} + +func (m *overlappingFieldsCanBeMergedManager) findConflictsWithinSelectionSet(selectionSet ast.SelectionSet) []*ConflictMessage { + if len(selectionSet) == 0 { + return nil + } + + fieldsMap, fragmentSpreads := getFieldsAndFragmentNames(selectionSet) + + var conflicts conflictMessageContainer + + // (A) Find find all conflicts "within" the fieldMap of this selection set. + // Note: this is the *only place* `collectConflictsWithin` is called. + m.collectConflictsWithin(&conflicts, fieldsMap) + + m.comparedFragments = make(map[string]bool) + for idx, fragmentSpreadA := range fragmentSpreads { + // (B) Then collect conflicts between these fieldMap and those represented by + // each spread fragment name found. + m.collectConflictsBetweenFieldsAndFragment(&conflicts, false, fieldsMap, fragmentSpreadA) + + for _, fragmentSpreadB := range fragmentSpreads[idx+1:] { + // (C) Then compare this fragment with all other fragments found in this + // selection set to collect conflicts between fragments spread together. + // This compares each item in the list of fragment names to every other + // item in that same list (except for itself). + m.collectConflictsBetweenFragments(&conflicts, false, fragmentSpreadA, fragmentSpreadB) + } + } + + return conflicts.Conflicts +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFragment(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fieldsMap *sequentialFieldsMap, fragmentSpread *ast.FragmentSpread) { + if m.comparedFragments[fragmentSpread.Name] { + return + } + m.comparedFragments[fragmentSpread.Name] = true + + if fragmentSpread.Definition == nil { + return + } + + fieldsMapB, fragmentSpreads := getFieldsAndFragmentNames(fragmentSpread.Definition.SelectionSet) + + // Do not compare a fragment's fieldMap to itself. + if reflect.DeepEqual(fieldsMap, fieldsMapB) { + return + } + + // (D) First collect any conflicts between the provided collection of fields + // and the collection of fields represented by the given fragment. + m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMap, fieldsMapB) + + // (E) Then collect any conflicts between the provided collection of fields + // and any fragment names found in the given fragment. + baseFragmentSpread := fragmentSpread + for _, fragmentSpread := range fragmentSpreads { + if fragmentSpread.Name == baseFragmentSpread.Name { + continue + } + m.collectConflictsBetweenFieldsAndFragment(conflicts, areMutuallyExclusive, fieldsMap, fragmentSpread) + } +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { + + var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) + check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { + + if fragmentSpreadA.Name == fragmentSpreadB.Name { + return + } + + if m.comparedFragmentPairs.Has(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) { + return + } + m.comparedFragmentPairs.Add(fragmentSpreadA, fragmentSpreadB, areMutuallyExclusive) + + if fragmentSpreadA.Definition == nil { + return + } + if fragmentSpreadB.Definition == nil { + return + } + + fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(fragmentSpreadA.Definition.SelectionSet) + fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(fragmentSpreadB.Definition.SelectionSet) + + // (F) First, collect all conflicts between these two collections of fields + // (not including any nested fragments). + m.collectConflictsBetween(conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB) + + // (G) Then collect conflicts between the first fragment and any nested + // fragments spread in the second fragment. + for _, fragmentSpread := range fragmentSpreadsB { + check(fragmentSpreadA, fragmentSpread) + } + // (G) Then collect conflicts between the second fragment and any nested + // fragments spread in the first fragment. + for _, fragmentSpread := range fragmentSpreadsA { + check(fragmentSpread, fragmentSpreadB) + } + } + + check(fragmentSpreadA, fragmentSpreadB) +} + +func (m *overlappingFieldsCanBeMergedManager) findConflictsBetweenSubSelectionSets(areMutuallyExclusive bool, selectionSetA ast.SelectionSet, selectionSetB ast.SelectionSet) *conflictMessageContainer { + var conflicts conflictMessageContainer + + fieldsMapA, fragmentSpreadsA := getFieldsAndFragmentNames(selectionSetA) + fieldsMapB, fragmentSpreadsB := getFieldsAndFragmentNames(selectionSetB) + + // (H) First, collect all conflicts between these two collections of field. + m.collectConflictsBetween(&conflicts, areMutuallyExclusive, fieldsMapA, fieldsMapB) + + // (I) Then collect conflicts between the first collection of fields and + // those referenced by each fragment name associated with the second. + for _, fragmentSpread := range fragmentSpreadsB { + m.comparedFragments = make(map[string]bool) + m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapA, fragmentSpread) + } + + // (I) Then collect conflicts between the second collection of fields and + // those referenced by each fragment name associated with the first. + for _, fragmentSpread := range fragmentSpreadsA { + m.comparedFragments = make(map[string]bool) + m.collectConflictsBetweenFieldsAndFragment(&conflicts, areMutuallyExclusive, fieldsMapB, fragmentSpread) + } + + // (J) Also collect conflicts between any fragment names by the first and + // fragment names by the second. This compares each item in the first set of + // names to each item in the second set of names. + for _, fragmentSpreadA := range fragmentSpreadsA { + for _, fragmentSpreadB := range fragmentSpreadsB { + m.collectConflictsBetweenFragments(&conflicts, areMutuallyExclusive, fragmentSpreadA, fragmentSpreadB) + } + } + + if len(conflicts.Conflicts) == 0 { + return nil + } + + return &conflicts +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsWithin(conflicts *conflictMessageContainer, fieldsMap *sequentialFieldsMap) { + for _, fields := range fieldsMap.Iterator() { + for idx, fieldA := range fields { + for _, fieldB := range fields[idx+1:] { + conflict := m.findConflict(false, fieldA, fieldB) + if conflict != nil { + conflicts.Conflicts = append(conflicts.Conflicts, conflict) + } + } + } + } +} + +func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetween(conflicts *conflictMessageContainer, parentFieldsAreMutuallyExclusive bool, fieldsMapA *sequentialFieldsMap, fieldsMapB *sequentialFieldsMap) { + for _, fieldsEntryA := range fieldsMapA.KeyValueIterator() { + fieldsB, ok := fieldsMapB.Get(fieldsEntryA.ResponseName) + if !ok { + continue + } + for _, fieldA := range fieldsEntryA.Fields { + for _, fieldB := range fieldsB { + conflict := m.findConflict(parentFieldsAreMutuallyExclusive, fieldA, fieldB) + if conflict != nil { + conflicts.Conflicts = append(conflicts.Conflicts, conflict) + } + } + } + } +} + +func (m *overlappingFieldsCanBeMergedManager) findConflict(parentFieldsAreMutuallyExclusive bool, fieldA *ast.Field, fieldB *ast.Field) *ConflictMessage { + if fieldA.ObjectDefinition == nil || fieldB.ObjectDefinition == nil { + return nil + } + + areMutuallyExclusive := parentFieldsAreMutuallyExclusive + if !areMutuallyExclusive { + tmp := fieldA.ObjectDefinition.Name != fieldB.ObjectDefinition.Name + tmp = tmp && fieldA.ObjectDefinition.Kind == ast.Object + tmp = tmp && fieldB.ObjectDefinition.Kind == ast.Object + tmp = tmp && fieldA.Definition != nil && fieldB.Definition != nil + areMutuallyExclusive = tmp + } + + fieldNameA := fieldA.Name + if fieldA.Alias != "" { + fieldNameA = fieldA.Alias + } + + if !areMutuallyExclusive { + // Two aliases must refer to the same field. + if fieldA.Name != fieldB.Name { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: fmt.Sprintf(`"%s" and "%s" are different fields`, fieldA.Name, fieldB.Name), + Position: fieldB.Position, + } + } + + // Two field calls must have the same arguments. + if !sameArguments(fieldA.Arguments, fieldB.Arguments) { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: "they have differing arguments", + Position: fieldB.Position, + } + } + } + + if fieldA.Definition != nil && fieldB.Definition != nil && doTypesConflict(m.walker, fieldA.Definition.Type, fieldB.Definition.Type) { + return &ConflictMessage{ + ResponseName: fieldNameA, + Message: fmt.Sprintf(`they return conflicting types "%s" and "%s"`, fieldA.Definition.Type.String(), fieldB.Definition.Type.String()), + Position: fieldB.Position, + } + } + + // Collect and compare sub-fields. Use the same "visited fragment names" list + // for both collections so fields in a fragment reference are never + // compared to themselves. + conflicts := m.findConflictsBetweenSubSelectionSets(areMutuallyExclusive, fieldA.SelectionSet, fieldB.SelectionSet) + if conflicts == nil { + return nil + } + return &ConflictMessage{ + ResponseName: fieldNameA, + SubMessage: conflicts.Conflicts, + Position: fieldB.Position, + } +} + +func sameArguments(args1 []*ast.Argument, args2 []*ast.Argument) bool { + if len(args1) != len(args2) { + return false + } + for _, arg1 := range args1 { + var matched bool + for _, arg2 := range args2 { + if arg1.Name == arg2.Name && sameValue(arg1.Value, arg2.Value) { + matched = true + break + } + } + if !matched { + return false + } + } + return true +} + +func sameValue(value1 *ast.Value, value2 *ast.Value) bool { + if value1.Kind != value2.Kind { + return false + } + if value1.Raw != value2.Raw { + return false + } + return true +} + +func doTypesConflict(walker *Walker, type1 *ast.Type, type2 *ast.Type) bool { + if type1.Elem != nil { + if type2.Elem != nil { + return doTypesConflict(walker, type1.Elem, type2.Elem) + } + return true + } + if type2.Elem != nil { + return true + } + if type1.NonNull && !type2.NonNull { + return true + } + if !type1.NonNull && type2.NonNull { + return true + } + + t1 := walker.Schema.Types[type1.NamedType] + t2 := walker.Schema.Types[type2.NamedType] + if (t1.Kind == ast.Scalar || t1.Kind == ast.Enum) && (t2.Kind == ast.Scalar || t2.Kind == ast.Enum) { + return t1.Name != t2.Name + } + + return false +} + +func getFieldsAndFragmentNames(selectionSet ast.SelectionSet) (*sequentialFieldsMap, []*ast.FragmentSpread) { + fieldsMap := sequentialFieldsMap{ + data: make(map[string][]*ast.Field), + } + var fragmentSpreads []*ast.FragmentSpread + + var walk func(selectionSet ast.SelectionSet) + walk = func(selectionSet ast.SelectionSet) { + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + responseName := selection.Name + if selection.Alias != "" { + responseName = selection.Alias + } + fieldsMap.Push(responseName, selection) + + case *ast.InlineFragment: + walk(selection.SelectionSet) + + case *ast.FragmentSpread: + fragmentSpreads = append(fragmentSpreads, selection) + } + } + } + walk(selectionSet) + + return &fieldsMap, fragmentSpreads +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go new file mode 100644 index 000000000..a3f795c97 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go @@ -0,0 +1,68 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) { + + validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) { + if parentDef == nil { + return + } + + var parentDefs []*ast.Definition + switch parentDef.Kind { + case ast.Object: + parentDefs = []*ast.Definition{parentDef} + case ast.Interface, ast.Union: + parentDefs = walker.Schema.GetPossibleTypes(parentDef) + default: + return + } + + fragmentDefType := walker.Schema.Types[fragmentName] + if fragmentDefType == nil { + return + } + if !fragmentDefType.IsCompositeType() { + // checked by FragmentsOnCompositeTypes + return + } + fragmentDefs := walker.Schema.GetPossibleTypes(fragmentDefType) + + for _, fragmentDef := range fragmentDefs { + for _, parentDef := range parentDefs { + if parentDef.Name == fragmentDef.Name { + return + } + } + } + + emitError() + } + + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + validate(walker, inlineFragment.ObjectDefinition, inlineFragment.TypeCondition, func() { + addError( + Message(`Fragment cannot be spread here as objects of type "%s" can never be of type "%s".`, inlineFragment.ObjectDefinition.Name, inlineFragment.TypeCondition), + At(inlineFragment.Position), + ) + }) + }) + + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if fragmentSpread.Definition == nil { + return + } + validate(walker, fragmentSpread.ObjectDefinition, fragmentSpread.Definition.TypeCondition, func() { + addError( + Message(`Fragment "%s" cannot be spread here as objects of type "%s" can never be of type "%s".`, fragmentSpread.Name, fragmentSpread.ObjectDefinition.Name, fragmentSpread.Definition.TypeCondition), + At(fragmentSpread.Position), + ) + }) + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go new file mode 100644 index 000000000..d8ed65209 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go @@ -0,0 +1,62 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil { + return + } + + argDef: + for _, argDef := range field.Definition.Arguments { + if !argDef.Type.NonNull { + continue + } + if argDef.DefaultValue != nil { + continue + } + for _, arg := range field.Arguments { + if arg.Name == argDef.Name { + continue argDef + } + } + + addError( + Message(`Field "%s" argument "%s" of type "%s" is required, but it was not provided.`, field.Name, argDef.Name, argDef.Type.String()), + At(field.Position), + ) + } + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + return + } + + argDef: + for _, argDef := range directive.Definition.Arguments { + if !argDef.Type.NonNull { + continue + } + if argDef.DefaultValue != nil { + continue + } + for _, arg := range directive.Arguments { + if arg.Name == argDef.Name { + continue argDef + } + } + + addError( + Message(`Directive "@%s" argument "%s" of type "%s" is required, but it was not provided.`, directive.Definition.Name, argDef.Name, argDef.Type.String()), + At(directive.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go new file mode 100644 index 000000000..718bc6834 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go @@ -0,0 +1,36 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil { + return + } + + fieldType := walker.Schema.Types[field.Definition.Type.Name()] + if fieldType == nil { + return + } + + if fieldType.IsLeafType() && len(field.SelectionSet) > 0 { + addError( + Message(`Field "%s" must not have a selection since type "%s" has no subfields.`, field.Name, fieldType.Name), + At(field.Position), + ) + } + + if !fieldType.IsLeafType() && len(field.SelectionSet) == 0 { + addError( + Message(`Field "%s" of type "%s" must have a selection of subfields.`, field.Name, field.Definition.Type.String()), + Suggestf(`"%s { ... }"`, field.Name), + At(field.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go new file mode 100644 index 000000000..a9e5bf633 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go @@ -0,0 +1,86 @@ +package validator + +import ( + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if walker.Schema.Subscription == nil || operation.Operation != ast.Subscription { + return + } + + fields := retrieveTopFieldNames(operation.SelectionSet) + + name := "Anonymous Subscription" + if operation.Name != "" { + name = `Subscription ` + strconv.Quote(operation.Name) + } + + if len(fields) > 1 { + addError( + Message(`%s must select only one top level field.`, name), + At(fields[1].position), + ) + } + + for _, field := range fields { + if strings.HasPrefix(field.name, "__") { + addError( + Message(`%s must not select an introspection top level field.`, name), + At(field.position), + ) + } + } + }) + }) +} + +type topField struct { + name string + position *ast.Position +} + +func retrieveTopFieldNames(selectionSet ast.SelectionSet) []*topField { + fields := []*topField{} + inFragmentRecursive := map[string]bool{} + var walk func(selectionSet ast.SelectionSet) + walk = func(selectionSet ast.SelectionSet) { + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + fields = append(fields, &topField{ + name: selection.Name, + position: selection.GetPosition(), + }) + case *ast.InlineFragment: + walk(selection.SelectionSet) + case *ast.FragmentSpread: + if selection.Definition == nil { + return + } + fragment := selection.Definition.Name + if !inFragmentRecursive[fragment] { + inFragmentRecursive[fragment] = true + walk(selection.Definition.SelectionSet) + } + } + } + } + walk(selectionSet) + + seen := make(map[string]bool, len(fields)) + uniquedFields := make([]*topField, 0, len(fields)) + for _, field := range fields { + if !seen[field.name] { + uniquedFields = append(uniquedFields, field) + } + seen[field.name] = true + } + return uniquedFields +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go new file mode 100644 index 000000000..1d9a50ab2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go @@ -0,0 +1,33 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + checkUniqueArgs(field.Arguments, addError) + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + checkUniqueArgs(directive.Arguments, addError) + }) + }) +} + +func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) { + knownArgNames := map[string]int{} + + for _, arg := range args { + if knownArgNames[arg.Name] == 1 { + addError( + Message(`There can be only one argument named "%s".`, arg.Name), + At(arg.Position), + ) + } + + knownArgNames[arg.Name]++ + } +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go new file mode 100644 index 000000000..52dfb21eb --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go @@ -0,0 +1,24 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) { + observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) { + seen := map[string]bool{} + + for _, dir := range directives { + if dir.Name != "repeatable" && seen[dir.Name] { + addError( + Message(`The directive "@%s" can only be used once at this location.`, dir.Name), + At(dir.Position), + ) + } + seen[dir.Name] = true + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go new file mode 100644 index 000000000..8c348aea0 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go @@ -0,0 +1,22 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) { + seenFragments := map[string]bool{} + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + if seenFragments[fragment.Name] { + addError( + Message(`There can be only one fragment named "%s".`, fragment.Name), + At(fragment.Position), + ) + } + seenFragments[fragment.Name] = true + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go new file mode 100644 index 000000000..092be671c --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go @@ -0,0 +1,27 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Kind != ast.ObjectValue { + return + } + + seen := map[string]bool{} + for _, field := range value.Children { + if seen[field.Name] { + addError( + Message(`There can be only one input field named "%s".`, field.Name), + At(field.Position), + ) + } + seen[field.Name] = true + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go new file mode 100644 index 000000000..4d41b60ae --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go @@ -0,0 +1,22 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) { + seen := map[string]bool{} + + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if seen[operation.Name] { + addError( + Message(`There can be only one operation named "%s".`, operation.Name), + At(operation.Position), + ) + } + seen[operation.Name] = true + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go new file mode 100644 index 000000000..6481ef4cd --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go @@ -0,0 +1,24 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + seen := map[string]int{} + for _, def := range operation.VariableDefinitions { + // add the same error only once per a variable. + if seen[def.Variable] == 1 { + addError( + Message(`There can be only one variable named "$%s".`, def.Variable), + At(def.Position), + ) + } + seen[def.Variable]++ + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go new file mode 100644 index 000000000..22bea7711 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go @@ -0,0 +1,168 @@ +package validator + +import ( + "errors" + "fmt" + "strconv" + + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Definition == nil || value.ExpectedType == nil { + return + } + + if value.Kind == ast.NullValue && value.ExpectedType.NonNull { + addError( + Message(`Expected value of type "%s", found %s.`, value.ExpectedType.String(), value.String()), + At(value.Position), + ) + } + + if value.Definition.Kind == ast.Scalar { + // Skip custom validating scalars + if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") { + return + } + } + + var possibleEnums []string + if value.Definition.Kind == ast.Enum { + for _, val := range value.Definition.EnumValues { + possibleEnums = append(possibleEnums, val.Name) + } + } + + rawVal, err := value.Value(nil) + if err != nil { + unexpectedTypeMessage(addError, value) + } + + switch value.Kind { + case ast.NullValue: + return + case ast.ListValue: + if value.ExpectedType.Elem == nil { + unexpectedTypeMessage(addError, value) + return + } + + case ast.IntValue: + if !value.Definition.OneOf("Int", "Float", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.FloatValue: + if !value.Definition.OneOf("Float") { + unexpectedTypeMessage(addError, value) + } + + case ast.StringValue, ast.BlockValue: + if value.Definition.Kind == ast.Enum { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Enum "%s" cannot represent non-enum value: %s.`, value.ExpectedType.String(), value.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } else if !value.Definition.OneOf("String", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.EnumValue: + if value.Definition.Kind != ast.Enum { + rawValStr := fmt.Sprint(rawVal) + addError( + unexpectedTypeMessageOnly(value), + SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } else if value.Definition.EnumValues.ForName(value.Raw) == nil { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Value "%s" does not exist in "%s" enum.`, value.String(), value.ExpectedType.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } + + case ast.BooleanValue: + if !value.Definition.OneOf("Boolean") { + unexpectedTypeMessage(addError, value) + } + + case ast.ObjectValue: + + for _, field := range value.Definition.Fields { + if field.Type.NonNull { + fieldValue := value.Children.ForName(field.Name) + if fieldValue == nil && field.DefaultValue == nil { + addError( + Message(`Field "%s.%s" of required type "%s" was not provided.`, value.Definition.Name, field.Name, field.Type.String()), + At(value.Position), + ) + continue + } + } + } + + for _, fieldValue := range value.Children { + if value.Definition.Fields.ForName(fieldValue.Name) == nil { + var suggestions []string + for _, fieldValue := range value.Definition.Fields { + suggestions = append(suggestions, fieldValue.Name) + } + + addError( + Message(`Field "%s" is not defined by type "%s".`, fieldValue.Name, value.Definition.Name), + SuggestListQuoted("Did you mean", fieldValue.Name, suggestions), + At(fieldValue.Position), + ) + } + } + + case ast.Variable: + return + + default: + panic(fmt.Errorf("unhandled %T", value)) + } + }) + }) +} + +func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) { + addError( + unexpectedTypeMessageOnly(v), + At(v.Position), + ) +} + +func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption { + switch v.ExpectedType.String() { + case "Int", "Int!": + if _, err := strconv.ParseInt(v.Raw, 10, 32); err != nil && errors.Is(err, strconv.ErrRange) { + return Message(`Int cannot represent non 32-bit signed integer value: %s`, v.String()) + } + return Message(`Int cannot represent non-integer value: %s`, v.String()) + case "String", "String!", "[String]": + return Message(`String cannot represent a non string value: %s`, v.String()) + case "Boolean", "Boolean!": + return Message(`Boolean cannot represent a non boolean value: %s`, v.String()) + case "Float", "Float!": + return Message(`Float cannot represent non numeric value: %s`, v.String()) + case "ID", "ID!": + return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String()) + //case "Enum": + // return Message(`Enum "%s" cannot represent non-enum value: %s`, v.ExpectedType.String(), v.String()) + default: + if v.Definition.Kind == ast.Enum { + return Message(`Enum "%s" cannot represent non-enum value: %s.`, v.ExpectedType.String(), v.String()) + } + return Message(`Expected value of type "%s", found %s.`, v.ExpectedType.String(), v.String()) + } +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go new file mode 100644 index 000000000..4ea94e5a8 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go @@ -0,0 +1,28 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, def := range operation.VariableDefinitions { + if def.Definition == nil { + continue + } + if !def.Definition.IsInputType() { + addError( + Message( + `Variable "$%s" cannot be non-input type "%s".`, + def.Variable, + def.Type.String(), + ), + At(def.Position), + ) + } + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go new file mode 100644 index 000000000..eef743540 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go @@ -0,0 +1,38 @@ +package validator + +import ( + "github.com/vektah/gqlparser/v2/ast" + . "github.com/vektah/gqlparser/v2/validator" +) + +func init() { + AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil { + return + } + + tmp := *value.ExpectedType + + // todo: move me into walk + // If there is a default non nullable types can be null + if value.VariableDefinition.DefaultValue != nil && value.VariableDefinition.DefaultValue.Kind != ast.NullValue { + if value.ExpectedType.NonNull { + tmp.NonNull = false + } + } + + if !value.VariableDefinition.Type.IsCompatible(&tmp) { + addError( + Message( + `Variable "%s" of type "%s" used in position expecting type "%s".`, + value, + value.VariableDefinition.Type.String(), + value.ExpectedType.String(), + ), + At(value.Position), + ) + } + }) + }) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema.go new file mode 100644 index 000000000..57dc549f9 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema.go @@ -0,0 +1,490 @@ +package validator + +import ( + "sort" + "strconv" + "strings" + + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/parser" +) + +func LoadSchema(inputs ...*Source) (*Schema, *gqlerror.Error) { + ast, err := parser.ParseSchemas(inputs...) + if err != nil { + return nil, err + } + return ValidateSchemaDocument(ast) +} + +func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, *gqlerror.Error) { + schema := Schema{ + Types: map[string]*Definition{}, + Directives: map[string]*DirectiveDefinition{}, + PossibleTypes: map[string][]*Definition{}, + Implements: map[string][]*Definition{}, + } + + for i, def := range ast.Definitions { + if schema.Types[def.Name] != nil { + return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name) + } + schema.Types[def.Name] = ast.Definitions[i] + } + + defs := append(DefinitionList{}, ast.Definitions...) + + for _, ext := range ast.Extensions { + def := schema.Types[ext.Name] + if def == nil { + schema.Types[ext.Name] = &Definition{ + Kind: ext.Kind, + Name: ext.Name, + Position: ext.Position, + } + def = schema.Types[ext.Name] + defs = append(defs, def) + } + + if def.Kind != ext.Kind { + return nil, gqlerror.ErrorPosf(ext.Position, "Cannot extend type %s because the base type is a %s, not %s.", ext.Name, def.Kind, ext.Kind) + } + + def.Directives = append(def.Directives, ext.Directives...) + def.Interfaces = append(def.Interfaces, ext.Interfaces...) + def.Fields = append(def.Fields, ext.Fields...) + def.Types = append(def.Types, ext.Types...) + def.EnumValues = append(def.EnumValues, ext.EnumValues...) + } + + for _, def := range defs { + switch def.Kind { + case Union: + for _, t := range def.Types { + schema.AddPossibleType(def.Name, schema.Types[t]) + schema.AddImplements(t, def) + } + case InputObject, Object: + for _, intf := range def.Interfaces { + schema.AddPossibleType(intf, def) + schema.AddImplements(def.Name, schema.Types[intf]) + } + schema.AddPossibleType(def.Name, def) + case Interface: + for _, intf := range def.Interfaces { + schema.AddPossibleType(intf, def) + schema.AddImplements(def.Name, schema.Types[intf]) + } + } + } + + for i, dir := range ast.Directives { + if schema.Directives[dir.Name] != nil { + return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name) + } + schema.Directives[dir.Name] = ast.Directives[i] + } + + if len(ast.Schema) > 1 { + return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") + } + + if len(ast.Schema) == 1 { + schema.Description = ast.Schema[0].Description + for _, entrypoint := range ast.Schema[0].OperationTypes { + def := schema.Types[entrypoint.Type] + if def == nil { + return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) + } + switch entrypoint.Operation { + case Query: + schema.Query = def + case Mutation: + schema.Mutation = def + case Subscription: + schema.Subscription = def + } + } + } + + for _, ext := range ast.SchemaExtension { + for _, entrypoint := range ext.OperationTypes { + def := schema.Types[entrypoint.Type] + if def == nil { + return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) + } + switch entrypoint.Operation { + case Query: + schema.Query = def + case Mutation: + schema.Mutation = def + case Subscription: + schema.Subscription = def + } + } + } + + if err := validateTypeDefinitions(&schema); err != nil { + return nil, err + } + + if err := validateDirectiveDefinitions(&schema); err != nil { + return nil, err + } + + // Inferred root operation type names should be performed only when a `schema` directive is + // **not** provided, when it is, `Mutation` and `Subscription` becomes valid types and are not + // assigned as a root operation on the schema. + if len(ast.Schema) == 0 { + if schema.Query == nil && schema.Types["Query"] != nil { + schema.Query = schema.Types["Query"] + } + + if schema.Mutation == nil && schema.Types["Mutation"] != nil { + schema.Mutation = schema.Types["Mutation"] + } + + if schema.Subscription == nil && schema.Types["Subscription"] != nil { + schema.Subscription = schema.Types["Subscription"] + } + } + + if schema.Query != nil { + schema.Query.Fields = append( + schema.Query.Fields, + &FieldDefinition{ + Name: "__schema", + Type: NonNullNamedType("__Schema", nil), + }, + &FieldDefinition{ + Name: "__type", + Type: NamedType("__Type", nil), + Arguments: ArgumentDefinitionList{ + {Name: "name", Type: NonNullNamedType("String", nil)}, + }, + }, + ) + } + + return &schema, nil +} + +func validateTypeDefinitions(schema *Schema) *gqlerror.Error { + types := make([]string, 0, len(schema.Types)) + for typ := range schema.Types { + types = append(types, typ) + } + sort.Strings(types) + for _, typ := range types { + err := validateDefinition(schema, schema.Types[typ]) + if err != nil { + return err + } + } + return nil +} + +func validateDirectiveDefinitions(schema *Schema) *gqlerror.Error { + directives := make([]string, 0, len(schema.Directives)) + for directive := range schema.Directives { + directives = append(directives, directive) + } + sort.Strings(directives) + for _, directive := range directives { + err := validateDirective(schema, schema.Directives[directive]) + if err != nil { + return err + } + } + return nil +} + +func validateDirective(schema *Schema, def *DirectiveDefinition) *gqlerror.Error { + if err := validateName(def.Position, def.Name); err != nil { + // now, GraphQL spec doesn't have reserved directive name + return err + } + + return validateArgs(schema, def.Arguments, def) +} + +func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error { + for _, field := range def.Fields { + if err := validateName(field.Position, field.Name); err != nil { + // now, GraphQL spec doesn't have reserved field name + return err + } + if err := validateTypeRef(schema, field.Type); err != nil { + return err + } + if err := validateArgs(schema, field.Arguments, nil); err != nil { + return err + } + wantDirLocation := LocationFieldDefinition + if def.Kind == InputObject { + wantDirLocation = LocationInputFieldDefinition + } + if err := validateDirectives(schema, field.Directives, wantDirLocation, nil); err != nil { + return err + } + } + + for _, typ := range def.Types { + typDef := schema.Types[typ] + if typDef == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(typ)) + } + if !isValidKind(typDef.Kind, Object) { + return gqlerror.ErrorPosf(def.Position, "%s type %s must be %s.", def.Kind, strconv.Quote(typ), kindList(Object)) + } + } + + for _, intf := range def.Interfaces { + if err := validateImplements(schema, def, intf); err != nil { + return err + } + } + + switch def.Kind { + case Object, Interface: + if len(def.Fields) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more fields.", def.Kind, def.Name) + } + for _, field := range def.Fields { + if typ, ok := schema.Types[field.Type.Name()]; ok { + if !isValidKind(typ.Kind, Scalar, Object, Interface, Union, Enum) { + return gqlerror.ErrorPosf(field.Position, "%s %s: field must be one of %s.", def.Kind, def.Name, kindList(Scalar, Object, Interface, Union, Enum)) + } + } + } + case Enum: + if len(def.EnumValues) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more unique enum values.", def.Kind, def.Name) + } + case InputObject: + if len(def.Fields) == 0 { + return gqlerror.ErrorPosf(def.Position, "%s %s: must define one or more input fields.", def.Kind, def.Name) + } + for _, field := range def.Fields { + if typ, ok := schema.Types[field.Type.Name()]; ok { + if !isValidKind(typ.Kind, Scalar, Enum, InputObject) { + return gqlerror.ErrorPosf(field.Position, "%s %s: field must be one of %s.", typ.Kind, field.Name, kindList(Scalar, Enum, InputObject)) + } + } + } + } + + for idx, field1 := range def.Fields { + for _, field2 := range def.Fields[idx+1:] { + if field1.Name == field2.Name { + return gqlerror.ErrorPosf(field2.Position, "Field %s.%s can only be defined once.", def.Name, field2.Name) + } + } + } + + if !def.BuiltIn { + // GraphQL spec has reserved type names a lot! + err := validateName(def.Position, def.Name) + if err != nil { + return err + } + } + + return validateDirectives(schema, def.Directives, DirectiveLocation(def.Kind), nil) +} + +func validateTypeRef(schema *Schema, typ *Type) *gqlerror.Error { + if schema.Types[typ.Name()] == nil { + return gqlerror.ErrorPosf(typ.Position, "Undefined type %s.", typ.Name()) + } + return nil +} + +func validateArgs(schema *Schema, args ArgumentDefinitionList, currentDirective *DirectiveDefinition) *gqlerror.Error { + for _, arg := range args { + if err := validateName(arg.Position, arg.Name); err != nil { + // now, GraphQL spec doesn't have reserved argument name + return err + } + if err := validateTypeRef(schema, arg.Type); err != nil { + return err + } + def := schema.Types[arg.Type.Name()] + if !def.IsInputType() { + return gqlerror.ErrorPosf( + arg.Position, + "cannot use %s as argument %s because %s is not a valid input type", + arg.Type.String(), + arg.Name, + def.Kind, + ) + } + if err := validateDirectives(schema, arg.Directives, LocationArgumentDefinition, currentDirective); err != nil { + return err + } + } + return nil +} + +func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLocation, currentDirective *DirectiveDefinition) *gqlerror.Error { + for _, dir := range dirs { + if err := validateName(dir.Position, dir.Name); err != nil { + // now, GraphQL spec doesn't have reserved directive name + return err + } + if currentDirective != nil && dir.Name == currentDirective.Name { + return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name) + } + if schema.Directives[dir.Name] == nil { + return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name) + } + validKind := false + for _, dirLocation := range schema.Directives[dir.Name].Locations { + if dirLocation == location { + validKind = true + break + } + } + if !validKind { + return gqlerror.ErrorPosf(dir.Position, "Directive %s is not applicable on %s.", dir.Name, location) + } + dir.Definition = schema.Directives[dir.Name] + } + return nil +} + +func validateImplements(schema *Schema, def *Definition, intfName string) *gqlerror.Error { + // see validation rules at the bottom of + // https://facebook.github.io/graphql/October2021/#sec-Objects + intf := schema.Types[intfName] + if intf == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) + } + if intf.Kind != Interface { + return gqlerror.ErrorPosf(def.Position, "%s is a non interface type %s.", strconv.Quote(intfName), intf.Kind) + } + for _, requiredField := range intf.Fields { + foundField := def.Fields.ForName(requiredField.Name) + if foundField == nil { + return gqlerror.ErrorPosf(def.Position, + `For %s to implement %s it must have a field called %s.`, + def.Name, intf.Name, requiredField.Name, + ) + } + + if !isCovariant(schema, requiredField.Type, foundField.Type) { + return gqlerror.ErrorPosf(foundField.Position, + `For %s to implement %s the field %s must have type %s.`, + def.Name, intf.Name, requiredField.Name, requiredField.Type.String(), + ) + } + + for _, requiredArg := range requiredField.Arguments { + foundArg := foundField.Arguments.ForName(requiredArg.Name) + if foundArg == nil { + return gqlerror.ErrorPosf(foundField.Position, + `For %s to implement %s the field %s must have the same arguments but it is missing %s.`, + def.Name, intf.Name, requiredField.Name, requiredArg.Name, + ) + } + + if !requiredArg.Type.IsCompatible(foundArg.Type) { + return gqlerror.ErrorPosf(foundArg.Position, + `For %s to implement %s the field %s must have the same arguments but %s has the wrong type.`, + def.Name, intf.Name, requiredField.Name, requiredArg.Name, + ) + } + } + for _, foundArgs := range foundField.Arguments { + if requiredField.Arguments.ForName(foundArgs.Name) == nil && foundArgs.Type.NonNull && foundArgs.DefaultValue == nil { + return gqlerror.ErrorPosf(foundArgs.Position, + `For %s to implement %s any additional arguments on %s must be optional or have a default value but %s is required.`, + def.Name, intf.Name, foundField.Name, foundArgs.Name, + ) + } + } + } + return validateTypeImplementsAncestors(schema, def, intfName) +} + +// validateTypeImplementsAncestors +// https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/type/validate.ts#L428 +func validateTypeImplementsAncestors(schema *Schema, def *Definition, intfName string) *gqlerror.Error { + intf := schema.Types[intfName] + if intf == nil { + return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) + } + for _, transitive := range intf.Interfaces { + if !containsString(def.Interfaces, transitive) { + if transitive == def.Name { + return gqlerror.ErrorPosf(def.Position, + `Type %s cannot implement %s because it would create a circular reference.`, + def.Name, intfName, + ) + } + return gqlerror.ErrorPosf(def.Position, + `Type %s must implement %s because it is implemented by %s.`, + def.Name, transitive, intfName, + ) + } + } + return nil +} + +func containsString(slice []string, want string) bool { + for _, str := range slice { + if want == str { + return true + } + } + return false +} + +func isCovariant(schema *Schema, required *Type, actual *Type) bool { + if required.NonNull && !actual.NonNull { + return false + } + + if required.NamedType != "" { + if required.NamedType == actual.NamedType { + return true + } + for _, pt := range schema.PossibleTypes[required.NamedType] { + if pt.Name == actual.NamedType { + return true + } + } + return false + } + + if required.Elem != nil && actual.Elem == nil { + return false + } + + return isCovariant(schema, required.Elem, actual.Elem) +} + +func validateName(pos *Position, name string) *gqlerror.Error { + if strings.HasPrefix(name, "__") { + return gqlerror.ErrorPosf(pos, `Name "%s" must not begin with "__", which is reserved by GraphQL introspection.`, name) + } + return nil +} + +func isValidKind(kind DefinitionKind, valid ...DefinitionKind) bool { + for _, k := range valid { + if kind == k { + return true + } + } + return false +} + +func kindList(kinds ...DefinitionKind) string { + s := make([]string, len(kinds)) + for i, k := range kinds { + s[i] = string(k) + } + return strings.Join(s, ", ") +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml new file mode 100644 index 000000000..a07707ba0 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml @@ -0,0 +1,665 @@ +types: + - name: cannot be redeclared + input: | + type A { + name: String + } + type A { + name: String + } + error: + message: "Cannot redeclare type A." + locations: [{line: 4, column: 6}] + - name: cannot be duplicated field at same definition 1 + input: | + type A { + name: String + name: String + } + error: + message: "Field A.name can only be defined once." + locations: [{line: 3, column: 3}] + - name: cannot be duplicated field at same definition 2 + input: | + type A { + name: String + } + extend type A { + name: String + } + error: + message: "Field A.name can only be defined once." + locations: [{line: 5, column: 3}] + - name: cannot be duplicated field at same definition 3 + input: | + type A { + name: String + } + extend type A { + age: Int + age: Int + } + error: + message: "Field A.age can only be defined once." + locations: [{line: 6, column: 3}] + +object types: + - name: must define one or more fields + input: | + directive @D on OBJECT + + # This pattern rejected by parser + # type InvalidObject1 {} + + type InvalidObject2 @D + + type ValidObject { + id: ID + } + extend type ValidObject @D + extend type ValidObject { + b: Int + } + error: + message: 'OBJECT InvalidObject2: must define one or more fields.' + locations: [{line: 6, column: 6}] + - name: check reserved names on type name + input: | + type __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 6}] + - name: check reserved names on type field + input: | + type FooBar { + __id: ID + } + error: + message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 2, column: 3}] + + - name: check reserved names on type field argument + input: | + type FooBar { + foo(__bar: ID): ID + } + error: + message: 'Name "__bar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 2, column: 7}] + + - name: must not allow input object as field type + input: | + input Input { + id: ID + } + type Query { + input: Input! + } + error: + message: 'OBJECT Query: field must be one of SCALAR, OBJECT, INTERFACE, UNION, ENUM.' + locations: [{line: 5, column: 3}] + +interfaces: + - name: must exist + input: | + type Thing implements Object { + id: ID! + } + + type Query { + Things: [Thing!]! + } + error: + message: 'Undefined type "Object".' + locations: [{line: 1, column: 6}] + + - name: must be an interface + input: | + type Thing implements Object { + id: ID! + } + + type Query { + Things: [Thing!]! + } + + type Object { + name: String + } + error: + message: '"Object" is a non interface type OBJECT.' + locations: [{line: 1, column: 6}] + + - name: must define one or more fields + input: | + directive @D on INTERFACE + + # This pattern rejected by parser + # interface InvalidInterface1 {} + + interface InvalidInterface2 @D + + interface ValidInterface { + id: ID + } + extend interface ValidInterface @D + extend interface ValidInterface { + b: Int + } + error: + message: 'INTERFACE InvalidInterface2: must define one or more fields.' + locations: [{line: 6, column: 11}] + + - name: check reserved names on type name + input: | + interface __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 11}] + + - name: must not allow input object as field type + input: | + input Input { + id: ID + } + type Query { + foo: Foo! + } + interface Foo { + input: Input! + } + error: + message: 'INTERFACE Foo: field must be one of SCALAR, OBJECT, INTERFACE, UNION, ENUM.' + locations: [{line: 8, column: 3}] + + - name: must have all fields from interface + input: | + type Bar implements BarInterface { + someField: Int! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface it must have a field called id.' + locations: [{line: 1, column: 6}] + + - name: must have same type of fields + input: | + type Bar implements BarInterface { + id: Int! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have type ID!.' + locations: [{line: 2, column: 5}] + + - name: must have all required arguments + input: | + type Bar implements BarInterface { + id: ID! + } + + interface BarInterface { + id(ff: Int!): ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have the same arguments but it is missing ff.' + locations: [{line: 2, column: 5}] + + - name: must have same argument types + input: | + type Bar implements BarInterface { + id(ff: ID!): ID! + } + + interface BarInterface { + id(ff: Int!): ID! + } + error: + message: 'For Bar to implement BarInterface the field id must have the same arguments but ff has the wrong type.' + locations: [{line: 2, column: 8}] + + - name: may defined additional nullable arguments + input: | + type Bar implements BarInterface { + id(opt: Int): ID! + } + + interface BarInterface { + id: ID! + } + + - name: may defined additional required arguments with defaults + input: | + type Bar implements BarInterface { + id(opt: Int! = 1): ID! + } + + interface BarInterface { + id: ID! + } + + - name: must not define additional required arguments without defaults + input: | + type Bar implements BarInterface { + id(opt: Int!): ID! + } + + interface BarInterface { + id: ID! + } + error: + message: 'For Bar to implement BarInterface any additional arguments on id must be optional or have a default value but opt is required.' + locations: [{line: 2, column: 8}] + + - name: can have covariant argument types + input: | + union U = A|B + + type A { name: String } + type B { name: String } + + type Bar implements BarInterface { + f: A! + } + + interface BarInterface { + f: U! + } + + - name: may define intermediate interfaces + input: | + interface IA { + id: ID! + } + + interface IIA implements IA { + id: ID! + } + + type A implements IIA & IA { + id: ID! + } + + - name: Type Foo must implement Baz because it is implemented by Bar + input: | + interface Baz { + baz: String + } + + interface Bar implements Baz { + bar: String + baz: String + } + + type Foo implements Bar { + foo: String + bar: String + baz: String + } + error: + message: 'Type Foo must implement Baz because it is implemented by Bar.' + locations: [{line: 10, column: 6}] + + - name: circular reference error + input: | + interface Circular1 implements Circular2 { + id: ID! + } + + interface Circular2 implements Circular1 { + id: ID! + } + error: + message: 'Type Circular1 cannot implement Circular2 because it would create a circular reference.' + locations: [{line: 1, column: 11}] + +inputs: + - name: must define one or more input fields + input: | + directive @D on INPUT_OBJECT + + # This pattern rejected by parser + # input InvalidInput1 {} + + input InvalidInput2 @D + + input ValidInput { + id: ID + } + extend input ValidInput @D + extend input ValidInput { + b: Int + } + error: + message: 'INPUT_OBJECT InvalidInput2: must define one or more input fields.' + locations: [{line: 6, column: 7}] + - name: check reserved names on type name + input: | + input __FooBar { + id: ID + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 7}] + + - name: fields cannot be Objects + input: | + type Object { id: ID } + input Foo { a: Object! } + error: + message: 'OBJECT a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 2, column: 13}] + + - name: fields cannot be Interfaces + input: | + interface Interface { id: ID! } + input Foo { a: Interface! } + error: + message: 'INTERFACE a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 2, column: 13}] + + - name: fields cannot be Unions + input: | + type Object { id: ID } + union Union = Object + input Foo { a: Union! } + error: + message: 'UNION a: field must be one of SCALAR, ENUM, INPUT_OBJECT.' + locations: [{line: 3, column: 13}] + +args: + - name: Valid arg types + input: | + input Input { id: ID } + enum Enum { A } + scalar Scalar + + type Query { + f(a: Input, b: Scalar, c: Enum): Boolean! + } + + - name: Objects not allowed + input: | + type Object { id: ID } + type Query { f(a: Object): Boolean! } + + error: + message: 'cannot use Object as argument a because OBJECT is not a valid input type' + locations: [{line: 2, column: 16}] + + - name: Union not allowed + input: | + type Object { id: ID } + union Union = Object + type Query { f(a: Union): Boolean! } + + error: + message: 'cannot use Union as argument a because UNION is not a valid input type' + locations: [{line: 3, column: 16}] + + - name: Interface not allowed + input: | + interface Interface { id: ID } + type Query { f(a: Interface): Boolean! } + + error: + message: 'cannot use Interface as argument a because INTERFACE is not a valid input type' + locations: [{line: 2, column: 16}] + +enums: + - name: must define one or more unique enum values + input: | + directive @D on ENUM + + # This pattern rejected by parser + # enum InvalidEmum1 {} + + enum InvalidEnum2 @D + + enum ValidEnum { + FOO + } + extend enum ValidEnum @D + extend enum ValidEnum { + BAR + } + error: + message: 'ENUM InvalidEnum2: must define one or more unique enum values.' + locations: [{line: 6, column: 6}] + - name: check reserved names on type name + input: | + enum __FooBar { + A + B + } + error: + message: 'Name "__FooBar" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 6}] + +unions: + - name: union types must be defined + input: | + union Foo = Bar | Baz + type Bar { + id: ID + } + error: + message: "Undefined type \"Baz\"." + locations: [{line: 1, column: 7}] + - name: union types must be objects + input: | + union Foo = Baz + interface Baz { + id: ID + } + error: + message: "UNION type \"Baz\" must be OBJECT." + locations: [{line: 1, column: 7}] + + - name: unions of pure type extensions are valid + input: | + + type Review { + body: String! + author: User! @provides(fields: "username") + product: Product! + } + + extend type User @key(fields: "id") { + id: ID! @external + reviews: [Review] + } + + extend type Product @key(fields: "upc") { + upc: String! @external + reviews: [Review] + } + + union Foo = User | Product + scalar _Any + scalar _FieldSet + directive @external on FIELD_DEFINITION + directive @requires(fields: _FieldSet!) on FIELD_DEFINITION + directive @provides(fields: _FieldSet!) on FIELD_DEFINITION + directive @key(fields: _FieldSet!) on OBJECT | INTERFACE + directive @extends on OBJECT + + + +type extensions: + - name: can extend non existant types + input: | + extend type A { + name: String + } + + + - name: cannot extend incorret type existant types + input: | + scalar A + extend type A { + name: String + } + error: + message: "Cannot extend type A because the base type is a SCALAR, not OBJECT." + locations: [{line: 2, column: 13}] + +directives: + - name: cannot redeclare directives + input: | + directive @A on FIELD_DEFINITION + directive @A on FIELD_DEFINITION + error: + message: "Cannot redeclare directive A." + locations: [{line: 2, column: 12}] + + - name: must be declared + input: | + type User { + name: String @foo + } + error: + message: "Undefined directive foo." + locations: [{line: 2, column: 17}] + + - name: cannot be self-referential + input: | + directive @A(foo: Int! @A) on FIELD_DEFINITION + error: + message: "Directive A cannot refer to itself." + locations: [{line: 1, column: 25}] + - name: check reserved names on type name + input: | + directive @__A on FIELD_DEFINITION + error: + message: 'Name "__A" must not begin with "__", which is reserved by GraphQL introspection.' + locations: [{line: 1, column: 12}] + + - name: Valid arg types + input: | + input Input { id: ID } + enum Enum { A } + scalar Scalar + + directive @A(a: Input, b: Scalar, c: Enum) on FIELD_DEFINITION + + - name: Objects not allowed + input: | + type Object { id: ID } + directive @A(a: Object) on FIELD_DEFINITION + + error: + message: 'cannot use Object as argument a because OBJECT is not a valid input type' + locations: [{line: 2, column: 14}] + + - name: Union not allowed + input: | + type Object { id: ID } + union Union = Object + directive @A(a: Union) on FIELD_DEFINITION + + error: + message: 'cannot use Union as argument a because UNION is not a valid input type' + locations: [{line: 3, column: 14}] + + - name: Interface not allowed + input: | + interface Interface { id: ID } + directive @A(a: Interface) on FIELD_DEFINITION + + error: + message: 'cannot use Interface as argument a because INTERFACE is not a valid input type' + locations: [{line: 2, column: 14}] + + - name: Invalid location usage not allowed + input: | + directive @test on FIELD_DEFINITION + input I1 @test { f: String } + + error: + message: 'Directive test is not applicable on INPUT_OBJECT.' + locations: [{line: 2, column: 11}] + + - name: Valid location usage + input: | + directive @testInputField on INPUT_FIELD_DEFINITION + directive @testField on FIELD_DEFINITION + directive @inp on INPUT_OBJECT + input I1 @inp { f: String @testInputField } + type P { name: String @testField } + interface I { id: ID @testField } + + +entry points: + - name: multiple schema entry points + input: | + schema { + query: Query + } + schema { + query: Query + } + scalar Query + error: + message: "Cannot have multiple schema entry points, consider schema extensions instead." + locations: [{line: 4, column: 8}] + + - name: Undefined schema entrypoint + input: | + schema { + query: Query + } + error: + message: "Schema root query refers to a type Query that does not exist." + locations: [{line: 2, column: 3}] + +entry point extensions: + - name: Undefined schema entrypoint + input: | + schema { + query: Query + } + scalar Query + extend schema { + mutation: Mutation + } + error: + message: "Schema root mutation refers to a type Mutation that does not exist." + locations: [{line: 6, column: 3}] + +type references: + - name: Field types + input: | + type User { + posts: Post + } + error: + message: "Undefined type Post." + locations: [{line: 2, column: 10}] + + - name: Arg types + input: | + type User { + posts(foo: FooBar): String + } + error: + message: "Undefined type FooBar." + locations: [{line: 2, column: 14}] + + - name: Directive arg types + input: | + directive @Foo(foo: FooBar) on FIELD_DEFINITION + + error: + message: "Undefined type FooBar." + locations: [{line: 1, column: 21}] diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go new file mode 100644 index 000000000..f0bbc3278 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go @@ -0,0 +1,69 @@ +package validator + +import ( + "math" + "sort" + "strings" + + "github.com/agnivade/levenshtein" +) + +// Given an invalid input string and a list of valid options, returns a filtered +// list of valid options sorted based on their similarity with the input. +func SuggestionList(input string, options []string) []string { + var results []string + optionsByDistance := map[string]int{} + + for _, option := range options { + distance := lexicalDistance(input, option) + threshold := calcThreshold(input) + if distance <= threshold { + results = append(results, option) + optionsByDistance[option] = distance + } + } + + sort.Slice(results, func(i, j int) bool { + return optionsByDistance[results[i]] < optionsByDistance[results[j]] + }) + return results +} + +func calcThreshold(a string) (threshold int) { + // the logic is copied from here + // https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/jsutils/suggestionList.ts#L14 + threshold = int(math.Floor(float64(len(a))*0.4) + 1) + + if threshold < 1 { + threshold = 1 + } + return +} + +// Computes the lexical distance between strings A and B. +// +// The "distance" between two strings is given by counting the minimum number +// of edits needed to transform string A into string B. An edit can be an +// insertion, deletion, or substitution of a single character, or a swap of two +// adjacent characters. +// +// Includes a custom alteration from Damerau-Levenshtein to treat case changes +// as a single edit which helps identify mis-cased values with an edit distance +// of 1. +// +// This distance can be useful for detecting typos in input or sorting +func lexicalDistance(a, b string) int { + if a == b { + return 0 + } + + a = strings.ToLower(a) + b = strings.ToLower(b) + + // Any case change counts as a single edit + if a == b { + return 1 + } + + return levenshtein.ComputeDistance(a, b) +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/validator.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/validator.go new file mode 100644 index 000000000..34bf93db3 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/validator.go @@ -0,0 +1,44 @@ +package validator + +import ( + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type AddErrFunc func(options ...ErrorOption) + +type ruleFunc func(observers *Events, addError AddErrFunc) + +type rule struct { + name string + rule ruleFunc +} + +var rules []rule + +// addRule to rule set. +// f is called once each time `Validate` is executed. +func AddRule(name string, f ruleFunc) { + rules = append(rules, rule{name: name, rule: f}) +} + +func Validate(schema *Schema, doc *QueryDocument) gqlerror.List { + var errs gqlerror.List + + observers := &Events{} + for i := range rules { + rule := rules[i] + rule.rule(observers, func(options ...ErrorOption) { + err := &gqlerror.Error{ + Rule: rule.name, + } + for _, o := range options { + o(err) + } + errs = append(errs, err) + }) + } + + Walk(schema, doc, observers) + return errs +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/vars.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/vars.go new file mode 100644 index 000000000..c3fd559bb --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/vars.go @@ -0,0 +1,258 @@ +package validator + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +var UnexpectedType = fmt.Errorf("Unexpected Type") + +// VariableValues coerces and validates variable values +func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, *gqlerror.Error) { + coercedVars := map[string]interface{}{} + + validator := varValidator{ + path: ast.Path{ast.PathName("variable")}, + schema: schema, + } + + for _, v := range op.VariableDefinitions { + validator.path = append(validator.path, ast.PathName(v.Variable)) + + if !v.Definition.IsInputType() { + return nil, gqlerror.ErrorPathf(validator.path, "must an input type") + } + + val, hasValue := variables[v.Variable] + + if !hasValue { + if v.DefaultValue != nil { + var err error + val, err = v.DefaultValue.Value(nil) + if err != nil { + return nil, gqlerror.WrapPath(validator.path, err) + } + hasValue = true + } else if v.Type.NonNull { + return nil, gqlerror.ErrorPathf(validator.path, "must be defined") + } + } + + if hasValue { + if val == nil { + if v.Type.NonNull { + return nil, gqlerror.ErrorPathf(validator.path, "cannot be null") + } + coercedVars[v.Variable] = nil + } else { + rv := reflect.ValueOf(val) + + jsonNumber, isJsonNumber := val.(json.Number) + if isJsonNumber { + if v.Type.NamedType == "Int" { + n, err := jsonNumber.Int64() + if err != nil { + return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %d as %s", n, v.Type.NamedType) + } + rv = reflect.ValueOf(n) + } else if v.Type.NamedType == "Float" { + f, err := jsonNumber.Float64() + if err != nil { + return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %f as %s", f, v.Type.NamedType) + } + rv = reflect.ValueOf(f) + + } + } + if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface { + rv = rv.Elem() + } + + rval, err := validator.validateVarType(v.Type, rv) + if err != nil { + return nil, err + } + coercedVars[v.Variable] = rval.Interface() + } + } + + validator.path = validator.path[0 : len(validator.path)-1] + } + return coercedVars, nil +} + +type varValidator struct { + path ast.Path + schema *ast.Schema +} + +func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflect.Value, *gqlerror.Error) { + currentPath := v.path + resetPath := func() { + v.path = currentPath + } + defer resetPath() + if typ.Elem != nil { + if val.Kind() != reflect.Slice { + // GraphQL spec says that non-null values should be coerced to an array when possible. + // Hence if the value is not a slice, we create a slice and add val to it. + slc := reflect.MakeSlice(reflect.SliceOf(val.Type()), 0, 0) + slc = reflect.Append(slc, val) + val = slc + } + for i := 0; i < val.Len(); i++ { + resetPath() + v.path = append(v.path, ast.PathIndex(i)) + field := val.Index(i) + if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface { + if typ.Elem.NonNull && field.IsNil() { + return val, gqlerror.ErrorPathf(v.path, "cannot be null") + } + field = field.Elem() + } + _, err := v.validateVarType(typ.Elem, field) + if err != nil { + return val, err + } + } + return val, nil + } + def := v.schema.Types[typ.NamedType] + if def == nil { + panic(fmt.Errorf("missing def for %s", typ.NamedType)) + } + + if !typ.NonNull && !val.IsValid() { + // If the type is not null and we got a invalid value namely null/nil, then it's valid + return val, nil + } + + switch def.Kind { + case ast.Enum: + kind := val.Type().Kind() + if kind != reflect.Int && kind != reflect.Int32 && kind != reflect.Int64 && kind != reflect.String { + return val, gqlerror.ErrorPathf(v.path, "enums must be ints or strings") + } + isValidEnum := false + for _, enumVal := range def.EnumValues { + if strings.EqualFold(val.String(), enumVal.Name) { + isValidEnum = true + } + } + if !isValidEnum { + return val, gqlerror.ErrorPathf(v.path, "%s is not a valid %s", val.String(), def.Name) + } + return val, nil + case ast.Scalar: + kind := val.Type().Kind() + switch typ.NamedType { + case "Int": + if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.Float32 || kind == reflect.Float64 || IsValidIntString(val, kind) { + return val, nil + } + case "Float": + if kind == reflect.Float32 || kind == reflect.Float64 || kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || IsValidFloatString(val, kind) { + return val, nil + } + case "String": + if kind == reflect.String { + return val, nil + } + + case "Boolean": + if kind == reflect.Bool { + return val, nil + } + + case "ID": + if kind == reflect.Int || kind == reflect.Int32 || kind == reflect.Int64 || kind == reflect.String { + return val, nil + } + default: + // assume custom scalars are ok + return val, nil + } + return val, gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType) + case ast.InputObject: + if val.Kind() != reflect.Map { + return val, gqlerror.ErrorPathf(v.path, "must be a %s", def.Name) + } + + // check for unknown fields + for _, name := range val.MapKeys() { + val.MapIndex(name) + fieldDef := def.Fields.ForName(name.String()) + resetPath() + v.path = append(v.path, ast.PathName(name.String())) + + switch { + case name.String() == "__typename": + continue + case fieldDef == nil: + return val, gqlerror.ErrorPathf(v.path, "unknown field") + } + } + + for _, fieldDef := range def.Fields { + resetPath() + v.path = append(v.path, ast.PathName(fieldDef.Name)) + + field := val.MapIndex(reflect.ValueOf(fieldDef.Name)) + if !field.IsValid() { + if fieldDef.Type.NonNull { + if fieldDef.DefaultValue != nil { + var err error + _, err = fieldDef.DefaultValue.Value(nil) + if err == nil { + continue + } + } + return val, gqlerror.ErrorPathf(v.path, "must be defined") + } + continue + } + + if field.Kind() == reflect.Ptr || field.Kind() == reflect.Interface { + if fieldDef.Type.NonNull && field.IsNil() { + return val, gqlerror.ErrorPathf(v.path, "cannot be null") + } + //allow null object field and skip it + if !fieldDef.Type.NonNull && field.IsNil() { + continue + } + field = field.Elem() + } + cval, err := v.validateVarType(fieldDef.Type, field) + if err != nil { + return val, err + } + val.SetMapIndex(reflect.ValueOf(fieldDef.Name), cval) + } + default: + panic(fmt.Errorf("unsupported type %s", def.Kind)) + } + return val, nil +} + +func IsValidIntString(val reflect.Value, kind reflect.Kind) bool { + if kind != reflect.String { + return false + } + _, e := strconv.ParseInt(fmt.Sprintf("%v", val.Interface()), 10, 64) + + return e == nil +} + +func IsValidFloatString(val reflect.Value, kind reflect.Kind) bool { + if kind != reflect.String { + return false + } + _, e := strconv.ParseFloat(fmt.Sprintf("%v", val.Interface()), 64) + return e == nil +} diff --git a/constraint/vendor/github.com/vektah/gqlparser/v2/validator/walk.go b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/walk.go new file mode 100644 index 000000000..6ee69e4c2 --- /dev/null +++ b/constraint/vendor/github.com/vektah/gqlparser/v2/validator/walk.go @@ -0,0 +1,292 @@ +package validator + +import ( + "context" + "fmt" + + "github.com/vektah/gqlparser/v2/ast" +) + +type Events struct { + operationVisitor []func(walker *Walker, operation *ast.OperationDefinition) + field []func(walker *Walker, field *ast.Field) + fragment []func(walker *Walker, fragment *ast.FragmentDefinition) + inlineFragment []func(walker *Walker, inlineFragment *ast.InlineFragment) + fragmentSpread []func(walker *Walker, fragmentSpread *ast.FragmentSpread) + directive []func(walker *Walker, directive *ast.Directive) + directiveList []func(walker *Walker, directives []*ast.Directive) + value []func(walker *Walker, value *ast.Value) + variable []func(walker *Walker, variable *ast.VariableDefinition) +} + +func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) { + o.operationVisitor = append(o.operationVisitor, f) +} +func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) { + o.field = append(o.field, f) +} +func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) { + o.fragment = append(o.fragment, f) +} +func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) { + o.inlineFragment = append(o.inlineFragment, f) +} +func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) { + o.fragmentSpread = append(o.fragmentSpread, f) +} +func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) { + o.directive = append(o.directive, f) +} +func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) { + o.directiveList = append(o.directiveList, f) +} +func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) { + o.value = append(o.value, f) +} +func (o *Events) OnVariable(f func(walker *Walker, variable *ast.VariableDefinition)) { + o.variable = append(o.variable, f) +} + +func Walk(schema *ast.Schema, document *ast.QueryDocument, observers *Events) { + w := Walker{ + Observers: observers, + Schema: schema, + Document: document, + } + + w.walk() +} + +type Walker struct { + Context context.Context + Observers *Events + Schema *ast.Schema + Document *ast.QueryDocument + + validatedFragmentSpreads map[string]bool + CurrentOperation *ast.OperationDefinition +} + +func (w *Walker) walk() { + for _, child := range w.Document.Operations { + w.validatedFragmentSpreads = make(map[string]bool) + w.walkOperation(child) + } + for _, child := range w.Document.Fragments { + w.validatedFragmentSpreads = make(map[string]bool) + w.walkFragment(child) + } +} + +func (w *Walker) walkOperation(operation *ast.OperationDefinition) { + w.CurrentOperation = operation + for _, varDef := range operation.VariableDefinitions { + varDef.Definition = w.Schema.Types[varDef.Type.Name()] + for _, v := range w.Observers.variable { + v(w, varDef) + } + if varDef.DefaultValue != nil { + varDef.DefaultValue.ExpectedType = varDef.Type + varDef.DefaultValue.Definition = w.Schema.Types[varDef.Type.Name()] + } + } + + var def *ast.Definition + var loc ast.DirectiveLocation + switch operation.Operation { + case ast.Query, "": + def = w.Schema.Query + loc = ast.LocationQuery + case ast.Mutation: + def = w.Schema.Mutation + loc = ast.LocationMutation + case ast.Subscription: + def = w.Schema.Subscription + loc = ast.LocationSubscription + } + + for _, varDef := range operation.VariableDefinitions { + if varDef.DefaultValue != nil { + w.walkValue(varDef.DefaultValue) + } + w.walkDirectives(varDef.Definition, varDef.Directives, ast.LocationVariableDefinition) + } + + w.walkDirectives(def, operation.Directives, loc) + w.walkSelectionSet(def, operation.SelectionSet) + + for _, v := range w.Observers.operationVisitor { + v(w, operation) + } + w.CurrentOperation = nil +} + +func (w *Walker) walkFragment(it *ast.FragmentDefinition) { + def := w.Schema.Types[it.TypeCondition] + + it.Definition = def + + w.walkDirectives(def, it.Directives, ast.LocationFragmentDefinition) + w.walkSelectionSet(def, it.SelectionSet) + + for _, v := range w.Observers.fragment { + v(w, it) + } +} + +func (w *Walker) walkDirectives(parentDef *ast.Definition, directives []*ast.Directive, location ast.DirectiveLocation) { + for _, dir := range directives { + def := w.Schema.Directives[dir.Name] + dir.Definition = def + dir.ParentDefinition = parentDef + dir.Location = location + + for _, arg := range dir.Arguments { + var argDef *ast.ArgumentDefinition + if def != nil { + argDef = def.Arguments.ForName(arg.Name) + } + + w.walkArgument(argDef, arg) + } + + for _, v := range w.Observers.directive { + v(w, dir) + } + } + + for _, v := range w.Observers.directiveList { + v(w, directives) + } +} + +func (w *Walker) walkValue(value *ast.Value) { + if value.Kind == ast.Variable && w.CurrentOperation != nil { + value.VariableDefinition = w.CurrentOperation.VariableDefinitions.ForName(value.Raw) + if value.VariableDefinition != nil { + value.VariableDefinition.Used = true + } + } + + if value.Kind == ast.ObjectValue { + for _, child := range value.Children { + if value.Definition != nil { + fieldDef := value.Definition.Fields.ForName(child.Name) + if fieldDef != nil { + child.Value.ExpectedType = fieldDef.Type + child.Value.Definition = w.Schema.Types[fieldDef.Type.Name()] + } + } + w.walkValue(child.Value) + } + } + + if value.Kind == ast.ListValue { + for _, child := range value.Children { + if value.ExpectedType != nil && value.ExpectedType.Elem != nil { + child.Value.ExpectedType = value.ExpectedType.Elem + child.Value.Definition = value.Definition + } + + w.walkValue(child.Value) + } + } + + for _, v := range w.Observers.value { + v(w, value) + } +} + +func (w *Walker) walkArgument(argDef *ast.ArgumentDefinition, arg *ast.Argument) { + if argDef != nil { + arg.Value.ExpectedType = argDef.Type + arg.Value.Definition = w.Schema.Types[argDef.Type.Name()] + } + + w.walkValue(arg.Value) +} + +func (w *Walker) walkSelectionSet(parentDef *ast.Definition, it ast.SelectionSet) { + for _, child := range it { + w.walkSelection(parentDef, child) + } +} + +func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) { + switch it := it.(type) { + case *ast.Field: + var def *ast.FieldDefinition + if it.Name == "__typename" { + def = &ast.FieldDefinition{ + Name: "__typename", + Type: ast.NamedType("String", nil), + } + } else if parentDef != nil { + def = parentDef.Fields.ForName(it.Name) + } + + it.Definition = def + it.ObjectDefinition = parentDef + + var nextParentDef *ast.Definition + if def != nil { + nextParentDef = w.Schema.Types[def.Type.Name()] + } + + for _, arg := range it.Arguments { + var argDef *ast.ArgumentDefinition + if def != nil { + argDef = def.Arguments.ForName(arg.Name) + } + + w.walkArgument(argDef, arg) + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationField) + w.walkSelectionSet(nextParentDef, it.SelectionSet) + + for _, v := range w.Observers.field { + v(w, it) + } + + case *ast.InlineFragment: + it.ObjectDefinition = parentDef + + nextParentDef := parentDef + if it.TypeCondition != "" { + nextParentDef = w.Schema.Types[it.TypeCondition] + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationInlineFragment) + w.walkSelectionSet(nextParentDef, it.SelectionSet) + + for _, v := range w.Observers.inlineFragment { + v(w, it) + } + + case *ast.FragmentSpread: + def := w.Document.Fragments.ForName(it.Name) + it.Definition = def + it.ObjectDefinition = parentDef + + var nextParentDef *ast.Definition + if def != nil { + nextParentDef = w.Schema.Types[def.TypeCondition] + } + + w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread) + + if def != nil && !w.validatedFragmentSpreads[def.Name] { + // prevent inifinite recursion + w.validatedFragmentSpreads[def.Name] = true + w.walkSelectionSet(nextParentDef, def.SelectionSet) + } + + for _, v := range w.Observers.fragmentSpread { + v(w, it) + } + + default: + panic(fmt.Errorf("unsupported %T", it)) + } +} diff --git a/constraint/vendor/google.golang.org/grpc/clientconn.go b/constraint/vendor/google.golang.org/grpc/clientconn.go index 3ed6eb8e7..de6d41c23 100644 --- a/constraint/vendor/google.golang.org/grpc/clientconn.go +++ b/constraint/vendor/google.golang.org/grpc/clientconn.go @@ -801,16 +801,31 @@ func (ac *addrConn) connect() error { return nil } +func equalAddresses(a, b []resolver.Address) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if !v.Equal(b[i]) { + return false + } + } + return true +} + // tryUpdateAddrs tries to update ac.addrs with the new addresses list. // -// If ac is Connecting, it returns false. The caller should tear down the ac and -// create a new one. Note that the backoff will be reset when this happens. -// // If ac is TransientFailure, it updates ac.addrs and returns true. The updated // addresses will be picked up by retry in the next iteration after backoff. // // If ac is Shutdown or Idle, it updates ac.addrs and returns true. // +// If the addresses is the same as the old list, it does nothing and returns +// true. +// +// If ac is Connecting, it returns false. The caller should tear down the ac and +// create a new one. Note that the backoff will be reset when this happens. +// // If ac is Ready, it checks whether current connected address of ac is in the // new addrs list. // - If true, it updates ac.addrs and returns true. The ac will keep using @@ -827,6 +842,10 @@ func (ac *addrConn) tryUpdateAddrs(addrs []resolver.Address) bool { return true } + if equalAddresses(ac.addrs, addrs) { + return true + } + if ac.state == connectivity.Connecting { return false } @@ -907,14 +926,10 @@ func (cc *ClientConn) healthCheckConfig() *healthCheckConfig { } func (cc *ClientConn) getTransport(ctx context.Context, failfast bool, method string) (transport.ClientTransport, func(balancer.DoneInfo), error) { - t, done, err := cc.blockingpicker.pick(ctx, failfast, balancer.PickInfo{ + return cc.blockingpicker.pick(ctx, failfast, balancer.PickInfo{ Ctx: ctx, FullMethodName: method, }) - if err != nil { - return nil, nil, toRPCErr(err) - } - return t, done, nil } func (cc *ClientConn) applyServiceConfigAndBalancer(sc *ServiceConfig, configSelector iresolver.ConfigSelector, addrs []resolver.Address) { @@ -1223,6 +1238,7 @@ func (ac *addrConn) createTransport(addr resolver.Address, copts transport.Conne ac.mu.Lock() defer ac.mu.Unlock() defer connClosed.Fire() + defer hcancel() if !hcStarted || hctx.Err() != nil { // We didn't start the health check or set the state to READY, so // no need to do anything else here. @@ -1233,7 +1249,6 @@ func (ac *addrConn) createTransport(addr resolver.Address, copts transport.Conne // state, since there may be a new transport in this addrConn. return } - hcancel() ac.transport = nil // Refresh the name resolver ac.cc.resolveNow(resolver.ResolveNowOptions{}) @@ -1256,6 +1271,7 @@ func (ac *addrConn) createTransport(addr resolver.Address, copts transport.Conne newTr, err := transport.NewClientTransport(connectCtx, ac.cc.ctx, addr, copts, func() { prefaceReceived.Fire() }, onGoAway, onClose) if err != nil { // newTr is either nil, or closed. + hcancel() channelz.Warningf(logger, ac.channelzID, "grpc: addrConn.createTransport failed to connect to %s. Err: %v", addr, err) return err } diff --git a/constraint/vendor/google.golang.org/grpc/encoding/encoding.go b/constraint/vendor/google.golang.org/grpc/encoding/encoding.go index 6d84f74c7..18e530fc9 100644 --- a/constraint/vendor/google.golang.org/grpc/encoding/encoding.go +++ b/constraint/vendor/google.golang.org/grpc/encoding/encoding.go @@ -108,7 +108,7 @@ var registeredCodecs = make(map[string]Codec) // more details. // // NOTE: this function must only be called during initialization time (i.e. in -// an init() function), and is not thread-safe. If multiple Compressors are +// an init() function), and is not thread-safe. If multiple Codecs are // registered with the same name, the one registered last will take effect. func RegisterCodec(codec Codec) { if codec == nil { diff --git a/constraint/vendor/google.golang.org/grpc/internal/transport/controlbuf.go b/constraint/vendor/google.golang.org/grpc/internal/transport/controlbuf.go index 8394d252d..244f4b081 100644 --- a/constraint/vendor/google.golang.org/grpc/internal/transport/controlbuf.go +++ b/constraint/vendor/google.golang.org/grpc/internal/transport/controlbuf.go @@ -137,6 +137,7 @@ type earlyAbortStream struct { streamID uint32 contentSubtype string status *status.Status + rst bool } func (*earlyAbortStream) isTransportResponseFrame() bool { return false } @@ -786,6 +787,11 @@ func (l *loopyWriter) earlyAbortStreamHandler(eas *earlyAbortStream) error { if err := l.writeHeader(eas.streamID, true, headerFields, nil); err != nil { return err } + if eas.rst { + if err := l.framer.fr.WriteRSTStream(eas.streamID, http2.ErrCodeNo); err != nil { + return err + } + } return nil } diff --git a/constraint/vendor/google.golang.org/grpc/internal/transport/http2_client.go b/constraint/vendor/google.golang.org/grpc/internal/transport/http2_client.go index 38ed3d566..24ca59084 100644 --- a/constraint/vendor/google.golang.org/grpc/internal/transport/http2_client.go +++ b/constraint/vendor/google.golang.org/grpc/internal/transport/http2_client.go @@ -631,8 +631,8 @@ func (t *http2Client) getCallAuthData(ctx context.Context, audience string, call // the wire. However, there are two notable exceptions: // // 1. If the stream headers violate the max header list size allowed by the -// server. In this case there is no reason to retry at all, as it is -// assumed the RPC would continue to fail on subsequent attempts. +// server. It's possible this could succeed on another transport, even if +// it's unlikely, but do not transparently retry. // 2. If the credentials errored when requesting their headers. In this case, // it's possible a retry can fix the problem, but indefinitely transparently // retrying is not appropriate as it is likely the credentials, if they can @@ -640,8 +640,7 @@ func (t *http2Client) getCallAuthData(ctx context.Context, audience string, call type NewStreamError struct { Err error - DoNotRetry bool - DoNotTransparentRetry bool + AllowTransparentRetry bool } func (e NewStreamError) Error() string { @@ -650,11 +649,11 @@ func (e NewStreamError) Error() string { // NewStream creates a stream and registers it into the transport as "active" // streams. All non-nil errors returned will be *NewStreamError. -func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (_ *Stream, err error) { +func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, error) { ctx = peer.NewContext(ctx, t.getPeer()) headerFields, err := t.createHeaderFields(ctx, callHdr) if err != nil { - return nil, &NewStreamError{Err: err, DoNotTransparentRetry: true} + return nil, &NewStreamError{Err: err, AllowTransparentRetry: false} } s := t.newStream(ctx, callHdr) cleanup := func(err error) { @@ -754,13 +753,14 @@ func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (_ *Strea return true }, hdr) if err != nil { - return nil, &NewStreamError{Err: err} + // Connection closed. + return nil, &NewStreamError{Err: err, AllowTransparentRetry: true} } if success { break } if hdrListSizeErr != nil { - return nil, &NewStreamError{Err: hdrListSizeErr, DoNotRetry: true} + return nil, &NewStreamError{Err: hdrListSizeErr} } firstTry = false select { @@ -768,9 +768,9 @@ func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (_ *Strea case <-ctx.Done(): return nil, &NewStreamError{Err: ContextErr(ctx.Err())} case <-t.goAway: - return nil, &NewStreamError{Err: errStreamDrain} + return nil, &NewStreamError{Err: errStreamDrain, AllowTransparentRetry: true} case <-t.ctx.Done(): - return nil, &NewStreamError{Err: ErrConnClosing} + return nil, &NewStreamError{Err: ErrConnClosing, AllowTransparentRetry: true} } } if t.statsHandler != nil { diff --git a/constraint/vendor/google.golang.org/grpc/internal/transport/http2_server.go b/constraint/vendor/google.golang.org/grpc/internal/transport/http2_server.go index 0956b500c..45d7bd145 100644 --- a/constraint/vendor/google.golang.org/grpc/internal/transport/http2_server.go +++ b/constraint/vendor/google.golang.org/grpc/internal/transport/http2_server.go @@ -21,7 +21,6 @@ package transport import ( "bytes" "context" - "errors" "fmt" "io" "math" @@ -53,10 +52,10 @@ import ( var ( // ErrIllegalHeaderWrite indicates that setting header is illegal because of // the stream's state. - ErrIllegalHeaderWrite = errors.New("transport: the stream is done or WriteHeader was already called") + ErrIllegalHeaderWrite = status.Error(codes.Internal, "transport: SendHeader called multiple times") // ErrHeaderListSizeLimitViolation indicates that the header list size is larger // than the limit set by peer. - ErrHeaderListSizeLimitViolation = errors.New("transport: trying to send header list size larger than the limit set by peer") + ErrHeaderListSizeLimitViolation = status.Error(codes.Internal, "transport: trying to send header list size larger than the limit set by peer") ) // serverConnectionCounter counts the number of connections a server has seen @@ -449,6 +448,7 @@ func (t *http2Server) operateHeaders(frame *http2.MetaHeadersFrame, handle func( streamID: streamID, contentSubtype: s.contentSubtype, status: status.New(codes.Internal, errMsg), + rst: !frame.StreamEnded(), }) return false } @@ -522,14 +522,16 @@ func (t *http2Server) operateHeaders(frame *http2.MetaHeadersFrame, handle func( } if httpMethod != http.MethodPost { t.mu.Unlock() + errMsg := fmt.Sprintf("http2Server.operateHeaders parsed a :method field: %v which should be POST", httpMethod) if logger.V(logLevel) { - logger.Infof("transport: http2Server.operateHeaders parsed a :method field: %v which should be POST", httpMethod) + logger.Infof("transport: %v", errMsg) } - t.controlBuf.put(&cleanupStream{ - streamID: streamID, - rst: true, - rstCode: http2.ErrCodeProtocol, - onWrite: func() {}, + t.controlBuf.put(&earlyAbortStream{ + httpStatus: 405, + streamID: streamID, + contentSubtype: s.contentSubtype, + status: status.New(codes.Internal, errMsg), + rst: !frame.StreamEnded(), }) s.cancel() return false @@ -550,6 +552,7 @@ func (t *http2Server) operateHeaders(frame *http2.MetaHeadersFrame, handle func( streamID: s.id, contentSubtype: s.contentSubtype, status: stat, + rst: !frame.StreamEnded(), }) return false } @@ -931,11 +934,25 @@ func (t *http2Server) checkForHeaderListSize(it interface{}) bool { return true } +func (t *http2Server) streamContextErr(s *Stream) error { + select { + case <-t.done: + return ErrConnClosing + default: + } + return ContextErr(s.ctx.Err()) +} + // WriteHeader sends the header metadata md back to the client. func (t *http2Server) WriteHeader(s *Stream, md metadata.MD) error { - if s.updateHeaderSent() || s.getState() == streamDone { + if s.updateHeaderSent() { return ErrIllegalHeaderWrite } + + if s.getState() == streamDone { + return t.streamContextErr(s) + } + s.hdrMu.Lock() if md.Len() > 0 { if s.header.Len() > 0 { @@ -946,7 +963,7 @@ func (t *http2Server) WriteHeader(s *Stream, md metadata.MD) error { } if err := t.writeHeaderLocked(s); err != nil { s.hdrMu.Unlock() - return err + return status.Convert(err).Err() } s.hdrMu.Unlock() return nil @@ -1062,23 +1079,12 @@ func (t *http2Server) WriteStatus(s *Stream, st *status.Status) error { func (t *http2Server) Write(s *Stream, hdr []byte, data []byte, opts *Options) error { if !s.isHeaderSent() { // Headers haven't been written yet. if err := t.WriteHeader(s, nil); err != nil { - if _, ok := err.(ConnectionError); ok { - return err - } - // TODO(mmukhi, dfawley): Make sure this is the right code to return. - return status.Errorf(codes.Internal, "transport: %v", err) + return err } } else { // Writing headers checks for this condition. if s.getState() == streamDone { - // TODO(mmukhi, dfawley): Should the server write also return io.EOF? - s.cancel() - select { - case <-t.done: - return ErrConnClosing - default: - } - return ContextErr(s.ctx.Err()) + return t.streamContextErr(s) } } df := &dataFrame{ @@ -1088,12 +1094,7 @@ func (t *http2Server) Write(s *Stream, hdr []byte, data []byte, opts *Options) e onEachWrite: t.setResetPingStrikes, } if err := s.wq.get(int32(len(hdr) + len(data))); err != nil { - select { - case <-t.done: - return ErrConnClosing - default: - } - return ContextErr(s.ctx.Err()) + return t.streamContextErr(s) } return t.controlBuf.put(df) } @@ -1229,10 +1230,6 @@ func (t *http2Server) Close() { // deleteStream deletes the stream s from transport's active streams. func (t *http2Server) deleteStream(s *Stream, eosReceived bool) { - // In case stream sending and receiving are invoked in separate - // goroutines (e.g., bi-directional streaming), cancel needs to be - // called to interrupt the potential blocking on other goroutines. - s.cancel() t.mu.Lock() if _, ok := t.activeStreams[s.id]; ok { @@ -1254,6 +1251,11 @@ func (t *http2Server) deleteStream(s *Stream, eosReceived bool) { // finishStream closes the stream and puts the trailing headerFrame into controlbuf. func (t *http2Server) finishStream(s *Stream, rst bool, rstCode http2.ErrCode, hdr *headerFrame, eosReceived bool) { + // In case stream sending and receiving are invoked in separate + // goroutines (e.g., bi-directional streaming), cancel needs to be + // called to interrupt the potential blocking on other goroutines. + s.cancel() + oldState := s.swapState(streamDone) if oldState == streamDone { // If the stream was already done, return. @@ -1273,6 +1275,11 @@ func (t *http2Server) finishStream(s *Stream, rst bool, rstCode http2.ErrCode, h // closeStream clears the footprint of a stream when the stream is not needed any more. func (t *http2Server) closeStream(s *Stream, rst bool, rstCode http2.ErrCode, eosReceived bool) { + // In case stream sending and receiving are invoked in separate + // goroutines (e.g., bi-directional streaming), cancel needs to be + // called to interrupt the potential blocking on other goroutines. + s.cancel() + s.swapState(streamDone) t.deleteStream(s, eosReceived) diff --git a/constraint/vendor/google.golang.org/grpc/picker_wrapper.go b/constraint/vendor/google.golang.org/grpc/picker_wrapper.go index e8367cb89..843633c91 100644 --- a/constraint/vendor/google.golang.org/grpc/picker_wrapper.go +++ b/constraint/vendor/google.golang.org/grpc/picker_wrapper.go @@ -131,7 +131,7 @@ func (pw *pickerWrapper) pick(ctx context.Context, failfast bool, info balancer. } if _, ok := status.FromError(err); ok { // Status error: end the RPC unconditionally with this status. - return nil, nil, err + return nil, nil, dropError{error: err} } // For all other errors, wait for ready RPCs should block and other // RPCs should fail with unavailable. @@ -175,3 +175,9 @@ func (pw *pickerWrapper) close() { pw.done = true close(pw.blockingCh) } + +// dropError is a wrapper error that indicates the LB policy wishes to drop the +// RPC and not retry it. +type dropError struct { + error +} diff --git a/constraint/vendor/google.golang.org/grpc/server.go b/constraint/vendor/google.golang.org/grpc/server.go index 96431a058..65de84b30 100644 --- a/constraint/vendor/google.golang.org/grpc/server.go +++ b/constraint/vendor/google.golang.org/grpc/server.go @@ -1801,12 +1801,26 @@ func (s *Server) getCodec(contentSubtype string) baseCodec { return codec } -// SetHeader sets the header metadata. -// When called multiple times, all the provided metadata will be merged. -// All the metadata will be sent out when one of the following happens: -// - grpc.SendHeader() is called; -// - The first response is sent out; -// - An RPC status is sent out (error or success). +// SetHeader sets the header metadata to be sent from the server to the client. +// The context provided must be the context passed to the server's handler. +// +// Streaming RPCs should prefer the SetHeader method of the ServerStream. +// +// When called multiple times, all the provided metadata will be merged. All +// the metadata will be sent out when one of the following happens: +// +// - grpc.SendHeader is called, or for streaming handlers, stream.SendHeader. +// - The first response message is sent. For unary handlers, this occurs when +// the handler returns; for streaming handlers, this can happen when stream's +// SendMsg method is called. +// - An RPC status is sent out (error or success). This occurs when the handler +// returns. +// +// SetHeader will fail if called after any of the events above. +// +// The error returned is compatible with the status package. However, the +// status code will often not match the RPC status as seen by the client +// application, and therefore, should not be relied upon for this purpose. func SetHeader(ctx context.Context, md metadata.MD) error { if md.Len() == 0 { return nil @@ -1818,8 +1832,14 @@ func SetHeader(ctx context.Context, md metadata.MD) error { return stream.SetHeader(md) } -// SendHeader sends header metadata. It may be called at most once. -// The provided md and headers set by SetHeader() will be sent. +// SendHeader sends header metadata. It may be called at most once, and may not +// be called after any event that causes headers to be sent (see SetHeader for +// a complete list). The provided md and headers set by SetHeader() will be +// sent. +// +// The error returned is compatible with the status package. However, the +// status code will often not match the RPC status as seen by the client +// application, and therefore, should not be relied upon for this purpose. func SendHeader(ctx context.Context, md metadata.MD) error { stream := ServerTransportStreamFromContext(ctx) if stream == nil { @@ -1833,6 +1853,10 @@ func SendHeader(ctx context.Context, md metadata.MD) error { // SetTrailer sets the trailer metadata that will be sent when an RPC returns. // When called more than once, all the provided metadata will be merged. +// +// The error returned is compatible with the status package. However, the +// status code will often not match the RPC status as seen by the client +// application, and therefore, should not be relied upon for this purpose. func SetTrailer(ctx context.Context, md metadata.MD) error { if md.Len() == 0 { return nil diff --git a/constraint/vendor/google.golang.org/grpc/stream.go b/constraint/vendor/google.golang.org/grpc/stream.go index e0b30b46f..236fc17ec 100644 --- a/constraint/vendor/google.golang.org/grpc/stream.go +++ b/constraint/vendor/google.golang.org/grpc/stream.go @@ -303,14 +303,28 @@ func newClientStreamWithParams(ctx context.Context, desc *StreamDesc, cc *Client } cs.binlog = binarylog.GetMethodLogger(method) - if err := cs.newAttemptLocked(false /* isTransparent */); err != nil { + cs.attempt, err = cs.newAttemptLocked(false /* isTransparent */) + if err != nil { cs.finish(err) return nil, err } - op := func(a *csAttempt) error { return a.newStream() } + // Pick the transport to use and create a new stream on the transport. + // Assign cs.attempt upon success. + op := func(a *csAttempt) error { + if err := a.getTransport(); err != nil { + return err + } + if err := a.newStream(); err != nil { + return err + } + // Because this operation is always called either here (while creating + // the clientStream) or by the retry code while locked when replaying + // the operation, it is safe to access cs.attempt directly. + cs.attempt = a + return nil + } if err := cs.withRetry(op, func() { cs.bufferForRetryLocked(0, op) }); err != nil { - cs.finish(err) return nil, err } @@ -349,9 +363,15 @@ func newClientStreamWithParams(ctx context.Context, desc *StreamDesc, cc *Client return cs, nil } -// newAttemptLocked creates a new attempt with a transport. -// If it succeeds, then it replaces clientStream's attempt with this new attempt. -func (cs *clientStream) newAttemptLocked(isTransparent bool) (retErr error) { +// newAttemptLocked creates a new csAttempt without a transport or stream. +func (cs *clientStream) newAttemptLocked(isTransparent bool) (*csAttempt, error) { + if err := cs.ctx.Err(); err != nil { + return nil, toRPCErr(err) + } + if err := cs.cc.ctx.Err(); err != nil { + return nil, ErrClientConnClosing + } + ctx := newContextWithRPCInfo(cs.ctx, cs.callInfo.failFast, cs.callInfo.codec, cs.cp, cs.comp) method := cs.callHdr.Method sh := cs.cc.dopts.copts.StatsHandler @@ -385,44 +405,39 @@ func (cs *clientStream) newAttemptLocked(isTransparent bool) (retErr error) { ctx = trace.NewContext(ctx, trInfo.tr) } - newAttempt := &csAttempt{ + if cs.cc.parsedTarget.Scheme == "xds" { + // Add extra metadata (metadata that will be added by transport) to context + // so the balancer can see them. + ctx = grpcutil.WithExtraMetadata(ctx, metadata.Pairs( + "content-type", grpcutil.ContentType(cs.callHdr.ContentSubtype), + )) + } + + return &csAttempt{ ctx: ctx, beginTime: beginTime, cs: cs, dc: cs.cc.dopts.dc, statsHandler: sh, trInfo: trInfo, - } - defer func() { - if retErr != nil { - // This attempt is not set in the clientStream, so it's finish won't - // be called. Call it here for stats and trace in case they are not - // nil. - newAttempt.finish(retErr) - } - }() + }, nil +} - if err := ctx.Err(); err != nil { - return toRPCErr(err) - } +func (a *csAttempt) getTransport() error { + cs := a.cs - if cs.cc.parsedTarget.Scheme == "xds" { - // Add extra metadata (metadata that will be added by transport) to context - // so the balancer can see them. - ctx = grpcutil.WithExtraMetadata(ctx, metadata.Pairs( - "content-type", grpcutil.ContentType(cs.callHdr.ContentSubtype), - )) - } - t, done, err := cs.cc.getTransport(ctx, cs.callInfo.failFast, cs.callHdr.Method) + var err error + a.t, a.done, err = cs.cc.getTransport(a.ctx, cs.callInfo.failFast, cs.callHdr.Method) if err != nil { + if de, ok := err.(dropError); ok { + err = de.error + a.drop = true + } return err } - if trInfo != nil { - trInfo.firstLine.SetRemoteAddr(t.RemoteAddr()) + if a.trInfo != nil { + a.trInfo.firstLine.SetRemoteAddr(a.t.RemoteAddr()) } - newAttempt.t = t - newAttempt.done = done - cs.attempt = newAttempt return nil } @@ -431,12 +446,21 @@ func (a *csAttempt) newStream() error { cs.callHdr.PreviousAttempts = cs.numRetries s, err := a.t.NewStream(a.ctx, cs.callHdr) if err != nil { - // Return without converting to an RPC error so retry code can - // inspect. - return err + nse, ok := err.(*transport.NewStreamError) + if !ok { + // Unexpected. + return err + } + + if nse.AllowTransparentRetry { + a.allowTransparentRetry = true + } + + // Unwrap and convert error. + return toRPCErr(nse.Err) } - cs.attempt.s = s - cs.attempt.p = &parser{r: s} + a.s = s + a.p = &parser{r: s} return nil } @@ -514,6 +538,11 @@ type csAttempt struct { statsHandler stats.Handler beginTime time.Time + + // set for newStream errors that may be transparently retried + allowTransparentRetry bool + // set for pick errors that are returned as a status + drop bool } func (cs *clientStream) commitAttemptLocked() { @@ -533,41 +562,21 @@ func (cs *clientStream) commitAttempt() { // shouldRetry returns nil if the RPC should be retried; otherwise it returns // the error that should be returned by the operation. If the RPC should be // retried, the bool indicates whether it is being retried transparently. -func (cs *clientStream) shouldRetry(err error) (bool, error) { - if cs.attempt.s == nil { - // Error from NewClientStream. - nse, ok := err.(*transport.NewStreamError) - if !ok { - // Unexpected, but assume no I/O was performed and the RPC is not - // fatal, so retry indefinitely. - return true, nil - } - - // Unwrap and convert error. - err = toRPCErr(nse.Err) - - // Never retry DoNotRetry errors, which indicate the RPC should not be - // retried due to max header list size violation, etc. - if nse.DoNotRetry { - return false, err - } +func (a *csAttempt) shouldRetry(err error) (bool, error) { + cs := a.cs - // In the event of a non-IO operation error from NewStream, we never - // attempted to write anything to the wire, so we can retry - // indefinitely. - if !nse.DoNotTransparentRetry { - return true, nil - } - } - if cs.finished || cs.committed { - // RPC is finished or committed; cannot retry. + if cs.finished || cs.committed || a.drop { + // RPC is finished or committed or was dropped by the picker; cannot retry. return false, err } + if a.s == nil && a.allowTransparentRetry { + return true, nil + } // Wait for the trailers. unprocessed := false - if cs.attempt.s != nil { - <-cs.attempt.s.Done() - unprocessed = cs.attempt.s.Unprocessed() + if a.s != nil { + <-a.s.Done() + unprocessed = a.s.Unprocessed() } if cs.firstAttempt && unprocessed { // First attempt, stream unprocessed: transparently retry. @@ -579,14 +588,14 @@ func (cs *clientStream) shouldRetry(err error) (bool, error) { pushback := 0 hasPushback := false - if cs.attempt.s != nil { - if !cs.attempt.s.TrailersOnly() { + if a.s != nil { + if !a.s.TrailersOnly() { return false, err } // TODO(retry): Move down if the spec changes to not check server pushback // before considering this a failure for throttling. - sps := cs.attempt.s.Trailer()["grpc-retry-pushback-ms"] + sps := a.s.Trailer()["grpc-retry-pushback-ms"] if len(sps) == 1 { var e error if pushback, e = strconv.Atoi(sps[0]); e != nil || pushback < 0 { @@ -603,10 +612,10 @@ func (cs *clientStream) shouldRetry(err error) (bool, error) { } var code codes.Code - if cs.attempt.s != nil { - code = cs.attempt.s.Status().Code() + if a.s != nil { + code = a.s.Status().Code() } else { - code = status.Convert(err).Code() + code = status.Code(err) } rp := cs.methodConfig.RetryPolicy @@ -651,19 +660,24 @@ func (cs *clientStream) shouldRetry(err error) (bool, error) { } // Returns nil if a retry was performed and succeeded; error otherwise. -func (cs *clientStream) retryLocked(lastErr error) error { +func (cs *clientStream) retryLocked(attempt *csAttempt, lastErr error) error { for { - cs.attempt.finish(toRPCErr(lastErr)) - isTransparent, err := cs.shouldRetry(lastErr) + attempt.finish(toRPCErr(lastErr)) + isTransparent, err := attempt.shouldRetry(lastErr) if err != nil { cs.commitAttemptLocked() return err } cs.firstAttempt = false - if err := cs.newAttemptLocked(isTransparent); err != nil { + attempt, err = cs.newAttemptLocked(isTransparent) + if err != nil { + // Only returns error if the clientconn is closed or the context of + // the stream is canceled. return err } - if lastErr = cs.replayBufferLocked(); lastErr == nil { + // Note that the first op in the replay buffer always sets cs.attempt + // if it is able to pick a transport and create a stream. + if lastErr = cs.replayBufferLocked(attempt); lastErr == nil { return nil } } @@ -673,7 +687,10 @@ func (cs *clientStream) Context() context.Context { cs.commitAttempt() // No need to lock before using attempt, since we know it is committed and // cannot change. - return cs.attempt.s.Context() + if cs.attempt.s != nil { + return cs.attempt.s.Context() + } + return cs.ctx } func (cs *clientStream) withRetry(op func(a *csAttempt) error, onSuccess func()) error { @@ -703,7 +720,7 @@ func (cs *clientStream) withRetry(op func(a *csAttempt) error, onSuccess func()) cs.mu.Unlock() return err } - if err := cs.retryLocked(err); err != nil { + if err := cs.retryLocked(a, err); err != nil { cs.mu.Unlock() return err } @@ -734,7 +751,7 @@ func (cs *clientStream) Header() (metadata.MD, error) { cs.binlog.Log(logEntry) cs.serverHeaderBinlogged = true } - return m, err + return m, nil } func (cs *clientStream) Trailer() metadata.MD { @@ -752,10 +769,9 @@ func (cs *clientStream) Trailer() metadata.MD { return cs.attempt.s.Trailer() } -func (cs *clientStream) replayBufferLocked() error { - a := cs.attempt +func (cs *clientStream) replayBufferLocked(attempt *csAttempt) error { for _, f := range cs.buffer { - if err := f(a); err != nil { + if err := f(attempt); err != nil { return err } } @@ -803,22 +819,17 @@ func (cs *clientStream) SendMsg(m interface{}) (err error) { if len(payload) > *cs.callInfo.maxSendMessageSize { return status.Errorf(codes.ResourceExhausted, "trying to send message larger than max (%d vs. %d)", len(payload), *cs.callInfo.maxSendMessageSize) } - msgBytes := data // Store the pointer before setting to nil. For binary logging. op := func(a *csAttempt) error { - err := a.sendMsg(m, hdr, payload, data) - // nil out the message and uncomp when replaying; they are only needed for - // stats which is disabled for subsequent attempts. - m, data = nil, nil - return err + return a.sendMsg(m, hdr, payload, data) } err = cs.withRetry(op, func() { cs.bufferForRetryLocked(len(hdr)+len(payload), op) }) if cs.binlog != nil && err == nil { cs.binlog.Log(&binarylog.ClientMessage{ OnClientSide: true, - Message: msgBytes, + Message: data, }) } - return + return err } func (cs *clientStream) RecvMsg(m interface{}) error { @@ -1370,8 +1381,10 @@ func (as *addrConnStream) finish(err error) { // ServerStream defines the server-side behavior of a streaming RPC. // -// All errors returned from ServerStream methods are compatible with the -// status package. +// Errors returned from ServerStream methods are compatible with the status +// package. However, the status code will often not match the RPC status as +// seen by the client application, and therefore, should not be relied upon for +// this purpose. type ServerStream interface { // SetHeader sets the header metadata. It may be called multiple times. // When call multiple times, all the provided metadata will be merged. diff --git a/constraint/vendor/google.golang.org/grpc/version.go b/constraint/vendor/google.golang.org/grpc/version.go index 6af76dfe7..5bc03f9b3 100644 --- a/constraint/vendor/google.golang.org/grpc/version.go +++ b/constraint/vendor/google.golang.org/grpc/version.go @@ -19,4 +19,4 @@ package grpc // Version is the current grpc version. -const Version = "1.46.0" +const Version = "1.47.0" diff --git a/constraint/vendor/modules.txt b/constraint/vendor/modules.txt index 9403a454a..1022884c4 100644 --- a/constraint/vendor/modules.txt +++ b/constraint/vendor/modules.txt @@ -7,6 +7,9 @@ github.com/PuerkitoBio/purell # github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 ## explicit github.com/PuerkitoBio/urlesc +# github.com/agnivade/levenshtein v1.0.1 +## explicit +github.com/agnivade/levenshtein # github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e ## explicit; go 1.16 github.com/antlr/antlr4/runtime/Go/antlr @@ -161,7 +164,7 @@ github.com/modern-go/reflect2 # github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 ## explicit github.com/munnerz/goautoneg -# github.com/open-policy-agent/opa v0.40.0 +# github.com/open-policy-agent/opa v0.41.0 ## explicit; go 1.16 github.com/open-policy-agent/opa/ast github.com/open-policy-agent/opa/ast/internal/scanner @@ -258,6 +261,15 @@ github.com/spf13/pflag # github.com/stoewer/go-strcase v1.2.0 ## explicit; go 1.11 github.com/stoewer/go-strcase +# github.com/vektah/gqlparser/v2 v2.4.4 +## explicit; go 1.16 +github.com/vektah/gqlparser/v2 +github.com/vektah/gqlparser/v2/ast +github.com/vektah/gqlparser/v2/gqlerror +github.com/vektah/gqlparser/v2/lexer +github.com/vektah/gqlparser/v2/parser +github.com/vektah/gqlparser/v2/validator +github.com/vektah/gqlparser/v2/validator/rules # github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb ## explicit github.com/xeipuuv/gojsonpointer @@ -270,10 +282,10 @@ github.com/yashtewari/glob-intersection # go.opentelemetry.io/contrib v0.20.0 ## explicit; go 1.14 go.opentelemetry.io/contrib -# go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0 => go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.20.0 +# go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.32.0 => go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.20.0 ## explicit; go 1.14 go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp -# go.opentelemetry.io/otel v1.6.3 => go.opentelemetry.io/otel v0.20.0 +# go.opentelemetry.io/otel v1.7.0 => go.opentelemetry.io/otel v0.20.0 ## explicit; go 1.14 go.opentelemetry.io/otel go.opentelemetry.io/otel/attribute @@ -298,7 +310,7 @@ go.opentelemetry.io/otel/metric go.opentelemetry.io/otel/metric/global go.opentelemetry.io/otel/metric/number go.opentelemetry.io/otel/metric/registry -# go.opentelemetry.io/otel/sdk v1.6.3 => go.opentelemetry.io/otel/sdk v0.20.0 +# go.opentelemetry.io/otel/sdk v1.7.0 => go.opentelemetry.io/otel/sdk v0.20.0 ## explicit; go 1.14 go.opentelemetry.io/otel/sdk/instrumentation go.opentelemetry.io/otel/sdk/internal @@ -321,10 +333,10 @@ go.opentelemetry.io/otel/sdk/metric/controller/basic go.opentelemetry.io/otel/sdk/metric/controller/time go.opentelemetry.io/otel/sdk/metric/processor/basic go.opentelemetry.io/otel/sdk/metric/selector/simple -# go.opentelemetry.io/otel/trace v1.6.3 => go.opentelemetry.io/otel/trace v0.20.0 +# go.opentelemetry.io/otel/trace v1.7.0 => go.opentelemetry.io/otel/trace v0.20.0 ## explicit; go 1.14 go.opentelemetry.io/otel/trace -# go.opentelemetry.io/proto/otlp v0.15.0 => go.opentelemetry.io/proto/otlp v0.7.0 +# go.opentelemetry.io/proto/otlp v0.16.0 => go.opentelemetry.io/proto/otlp v0.7.0 ## explicit; go 1.15 go.opentelemetry.io/proto/otlp/collector/metrics/v1 go.opentelemetry.io/proto/otlp/collector/trace/v1 @@ -380,7 +392,7 @@ google.golang.org/genproto/googleapis/api/expr/v1alpha1 google.golang.org/genproto/googleapis/api/httpbody google.golang.org/genproto/googleapis/rpc/status google.golang.org/genproto/protobuf/field_mask -# google.golang.org/grpc v1.46.0 +# google.golang.org/grpc v1.47.0 ## explicit; go 1.14 google.golang.org/grpc google.golang.org/grpc/attributes