diff --git a/.circleci/config.yml b/.circleci/config.yml index 3ff23679d1..2762dbc746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -25,6 +25,7 @@ executors: - image: cimg/redis:7.2.4 - image: jaegertracing/all-in-one:1.54.0 - image: openzipkin/zipkin:2.23.2 + - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.17.0 resource_class: xlarge environment: CARGO_BUILD_JOBS: 4 diff --git a/.config/nextest.toml b/.config/nextest.toml index df8bab66e6..cff09c8ef6 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -1,3 +1,92 @@ +[[profile.default.overrides]] +# These are known flaky tests according to the test flakiness report provided +# in CircleCI insights, based on the `dev` branch: +# +# https://app.circleci.com/insights/github/apollographql/router/workflows/ci_checks/tests +# +# We will retry these tests up to 2 additional times. Retry counts are recorded. +# Items on this list should be prioritized to get improved and removed from this +# list at the time they are fixed. +# +# Frankly, it may be best to just retry all tests in the apollo-router::integration_tests +# module, as they have a high failure rate, in general. +retries = 2 +filter = ''' + ( binary_id(=apollo-router) & test(=axum_factory::axum_http_server_factory::tests::request_cancel_log) ) +or ( binary_id(=apollo-router) & test(=axum_factory::axum_http_server_factory::tests::request_cancel_no_log) ) +or ( binary_id(=apollo-router) & test(=notification::tests::it_test_ttl) ) +or ( binary_id(=apollo-router) & test(=plugins::telemetry::metrics::apollo::test::apollo_metrics_enabled) ) +or ( binary_id(=apollo-router) & test(=plugins::telemetry::tests::it_test_prometheus_metrics) ) +or ( binary_id(=apollo-router) & test(=services::subgraph_service::tests::test_subgraph_service_websocket_with_error) ) +or ( binary_id(=apollo-router) & test(=uplink::license_stream::test::license_expander_claim_pause_claim) ) +or ( binary_id(=apollo-router) & test(=uplink::persisted_queries_manifest_stream::test::integration_test) ) +or ( binary_id(=apollo-router-benchmarks) & test(=tests::test) ) +or ( binary_id(=apollo-router::apollo_otel_traces) & test(=test_batch_send_header) ) +or ( binary_id(=apollo-router::apollo_otel_traces) & test(=test_batch_trace_id) ) +or ( binary_id(=apollo-router::apollo_otel_traces) & test(=test_condition_if) ) +or ( binary_id(=apollo-router::apollo_otel_traces) & test(=test_trace_id) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=non_defer) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_batch_stats) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_client_name) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_client_version) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_condition_if) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_send_header) ) +or ( binary_id(=apollo-router::apollo_reports) & test(=test_trace_id) ) +or ( binary_id(=apollo-router::integration_tests) & test(=api_schema_hides_field) ) +or ( binary_id(=apollo-router::integration_tests) & test(=automated_persisted_queries) ) +or ( binary_id(=apollo-router::integration_tests) & test(=defer_default_variable) ) +or ( binary_id(=apollo-router::integration_tests) & test(=defer_path) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_batches_with_errors_in_multi_graph) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_batches_with_errors_in_single_graph) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_cancelled_by_coprocessor) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_cancelled_by_rhai) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_indefinite_timeouts) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_short_timeouts) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_single_invalid_graphql) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_single_request_cancelled_by_coprocessor) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_handles_single_request_cancelled_by_rhai) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_supports_multi_subgraph_batching) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::batching::it_supports_single_subgraph_batching) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::coprocessor::test_error_not_propagated_to_client) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_incompatible_query_order) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_invalid_file_order) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_invalid_multipart_order) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_upload_without_file) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_with_file_count_limits) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_with_file_size_limit) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_fails_with_no_boundary_in_multipart) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::file_upload::it_uploads_to_multiple_subgraphs) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::lifecycle::test_graceful_shutdown) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::lifecycle::test_happy) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::lifecycle::test_reload_config_valid) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::lifecycle::test_reload_config_with_broken_plugin) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::lifecycle::test_reload_config_with_broken_plugin_recovery) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::apq) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::connection_failure_blocks_startup) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::entity_cache) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::query_planner_redis_update_defer) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::query_planner_redis_update_query_fragments) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::query_planner_redis_update_reuse_query_fragments) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::redis::test::connection_failure_blocks_startup) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::telemetry::jaeger::test_decimal_trace_id) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::telemetry::logging::test_json) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::telemetry::logging::test_json_sampler_off) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::telemetry::logging::test_text_sampler_off) ) +or ( binary_id(=apollo-router::integration_tests) & test(=integration::telemetry::metrics::test_subgraph_auth_metrics) ) +or ( binary_id(=apollo-router::samples) & test(=/enterprise/entity-cache/invalidation) ) +or ( binary_id(=apollo-router::samples) & test(=/enterprise/entity-cache/invalidation-subgraph) ) +or ( binary_id(=apollo-router::samples) & test(=/enterprise/entity-cache/invalidation-subgraph-type) ) +or ( binary_id(=apollo-router::samples) & test(=/enterprise/query-planning-redis) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_dependent_fetch_failure) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_list) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_list_of_lists) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_no_typenames) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_type_mismatch) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_union) ) +or ( binary_id(=apollo-router::set_context) & test(=test_set_context_unrelated_fetch_failure) ) +''' + [profile.ci] # Print out output for failing tests as soon as they fail, and also at the end # of the run (for easy scrollability). diff --git a/.github/workflows/update_apollo_protobuf.yaml b/.github/workflows/update_apollo_protobuf.yaml index b2c660a66b..cdb6aa84b4 100644 --- a/.github/workflows/update_apollo_protobuf.yaml +++ b/.github/workflows/update_apollo_protobuf.yaml @@ -9,7 +9,7 @@ jobs: Update-Protobuf-Schema: runs-on: ubuntu-latest steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - name: Make changes to pull request run: | curl -f https://usage-reporting.api.apollographql.com/proto/reports.proto > ./apollo-router/src/plugins/telemetry/proto/reports.proto diff --git a/.github/workflows/update_uplink_schema.yml b/.github/workflows/update_uplink_schema.yml index 6e4bdce77f..dd89b1ecdb 100644 --- a/.github/workflows/update_uplink_schema.yml +++ b/.github/workflows/update_uplink_schema.yml @@ -9,7 +9,7 @@ jobs: Update-Uplink-Schema: runs-on: ubuntu-latest steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - name: Install Rover run: | curl -sSL https://rover.apollo.dev/nix/v0.14.1 | sh diff --git a/CHANGELOG.md b/CHANGELOG.md index dccc28aa3f..de369468ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,230 @@ All notable changes to Router will be documented in this file. This project adheres to [Semantic Versioning v2.0.0](https://semver.org/spec/v2.0.0.html). +# [1.51.0] - 2024-07-16 + +## 🚀 Features + +### Support conditional coprocessor execution per stage of request lifecycle ([PR #5557](https://github.com/apollographql/router/pull/5557)) + +The router now supports conditional execution of the coprocessor for each stage of the request lifecycle (except for the `Execution` stage). + +To configure, define conditions for a specific stage by using selectors based on headers or context entries. For example, based on a supergraph response you can configure the coprocessor not to execute for any subscription: + + + +```yaml title=router.yaml +coprocessor: + url: http://127.0.0.1:3000 # mandatory URL which is the address of the coprocessor + timeout: 2s # optional timeout (2 seconds in this example). If not set, defaults to 1 second + supergraph: + response: + condition: + not: + eq: + - subscription + - operation_kind: string + body: true +``` + +To learn more, see the documentation about [coprocessor conditions](https://www.apollographql.com/docs/router/customizations/coprocessor/#conditions). + +By [@bnjjj](https://github.com/bnjjj) in https://github.com/apollographql/router/pull/5557 + +### Add option to deactivate introspection response caching ([PR #5583](https://github.com/apollographql/router/pull/5583)) + +The router now supports an option to deactivate introspection response caching. Because the router caches responses as introspection happens in the query planner, cached introspection responses may consume too much of the distributed cache or fill it up. Setting this option prevents introspection responses from filling up the router's distributed cache. + +To deactivate introspection caching, set `supergraph.query_planning.legacy_introspection_caching` to `false`: + + +```yaml +supergraph: + query_planning: + legacy_introspection_caching: false +``` + +By [@Geal](https://github.com/Geal) in https://github.com/apollographql/router/pull/5583 + +### Add 'subgraph_on_graphql_error' selector for subgraph ([PR #5622](https://github.com/apollographql/router/pull/5622)) + +The router now supports the `subgraph_on_graphql_error` selector for the subgraph service, which it already supported for the router and supergraph services. Subgraph service support enables easier detection of GraphQL errors in response bodies of subgraph requests. + +An example configuration with `subgraph_on_graphql_error` configured: + +```yaml +telemetry: + instrumentation: + instruments: + subgraph: + http.client.request.duration: + attributes: + subgraph.graphql.errors: # attribute containing a boolean set to true if response.errors is not empty + subgraph_on_graphql_error: true +``` + +By [@bnjjj](https://github.com/bnjjj) in https://github.com/apollographql/router/pull/5622 + +## 🐛 Fixes + +### Add `response_context` in event selector for `event_*` instruments ([PR #5565](https://github.com/apollographql/router/pull/5565)) + +The router now supports creating custom instruments with a value set to `event_*` and using both a condition executed on an event and the `response_context` selector in attributes. Previous releases didn't support the `response_context` selector in attributes. + +An example configuration: + +```yaml +telemetry: + instrumentation: + instruments: + supergraph: + sf.graphql_router.errors: + value: event_unit + type: counter + unit: count + description: "graphql errors handled by the apollo router" + condition: + eq: + - true + - on_graphql_error: true + attributes: + "operation": + response_context: "operation_name" # This was not working before +``` + +By [@bnjjj](https://github.com/bnjjj) in https://github.com/apollographql/router/pull/5565 + +### Provide valid trace IDs for unsampled traces in Rhai scripts ([PR #5606](https://github.com/apollographql/router/pull/5606)) + +The `traceid()` function in a Rhai script for the router now returns a valid trace ID for all traces. + +Previously, `traceid()` didn't return a trace ID if the trace wasn't selected for sampling. + +By [@bnjjj](https://github.com/bnjjj) in https://github.com/apollographql/router/pull/5606 + +### Allow query batching and entity caching to work together ([PR #5598](https://github.com/apollographql/router/pull/5598)) + +The router now supports entity caching and subgraph batching to run simultaneously. Specifically, this change updates entity caching to ignore a subgraph request if the request is part of a batch. + +By [@garypen](https://github.com/garypen) in https://github.com/apollographql/router/pull/5598 + +### Gracefully handle subgraph response with `-1` values inside error locations ([PR #5633](https://github.com/apollographql/router/pull/5633)) + +This router now gracefully handles responses that contain invalid "`-1`" positional values for error locations in queries by ignoring those invalid locations. + +This change resolves the problem of GraphQL Java and GraphQL Kotlin using `{ "line": -1, "column": -1 }` values if they can't determine an error's location in a query, but the GraphQL specification [requires both `line` and `column` to be positive numbers](https://spec.graphql.org/draft/#sel-GAPHRPFCCaCGX5zM). + +As an example, a subgraph can respond with invalid error locations: +```json +{ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "locations": [ + { "line": -1, "column": -1 }, + ], + "path":["topProducts"] + }] +} +``` + +With this change, the router returns a response that ignores the invalid locations: + +```json +{ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "path":["topProducts"] + }] +} +``` + +By [@IvanGoncharov](https://github.com/IvanGoncharov) in https://github.com/apollographql/router/pull/5633 + +### Return request timeout and rate limited error responses as structured errors ([PR #5578](https://github.com/apollographql/router/pull/5578)) + +The router now returns request timeout errors (`408 Request Timeout`) and request rate limited errors (`429 Too Many Requests`) as structured GraphQL errors (for example, `{"errors": [...]}`). Previously, the router returned these as plaintext errors to clients. + +Both types of errors are properly tracked in telemetry, including the `apollo_router_graphql_error_total` metric. + +By [@IvanGoncharov](https://github.com/IvanGoncharov) in https://github.com/apollographql/router/pull/5578 + +### Fix span names and resource mapping for Datadog trace exporter ([Issue #5282](https://github.com/apollographql/router/issues/5282)) + +> [!NOTE] +> This is an **incremental** improvement, but we expect more improvements in Router v1.52.0 after https://github.com/apollographql/router/pull/5609/ lands. + +The router now uses _static span names_ by default. This change fixes the user experience of the Datadog trace exporter when sending traces with Datadog native configuration. + +The router has two ways of sending traces to Datadog: + +1. The [OpenTelemetry for Datadog](https://www.apollographql.com/docs/router/configuration/telemetry/exporters/tracing/datadog/#otlp-configuration) approach (which is the recommended method). This is identified by `otlp` in YAML configuration, and it is *not* impacted by this fix. +2. The ["Datadog native" configuration](https://www.apollographql.com/docs/router/configuration/telemetry/exporters/tracing/datadog/#datadog-native-configuration). This is identified by the use of a `datadog:` key in YAML configuration. + +This change fixes a bug in the latter approach that broke some Datadog experiences, such as the "Resources" section of the [Datadog APM Service Catalog](https://docs.datadoghq.com/service_catalog/) page. + +We now use static span names by default, with resource mappings providing additional context when requested, which enables the desired behavior which was not possible before. + +_If for some reason you wish to maintain the existing behavior, you must either update your spans and resource mappings, or keep your spans and instead configure the router to use _dynamic span names_ and disable resource mapping._ + +Enabling resource mapping and fixed span names is configured by the `enable_span_mapping` and `fixed_span_names` options: + +```yaml +telemetry: + exporters: + tracing: + datadog: + enabled: true + # Enables resource mapping, previously disabled by default, but now enabled. + enable_span_mapping: true + # Enables fixed span names, defaults to true. + fixed_span_names: true + + instrumentation: + spans: + mode: spec_compliant +``` + +With `enable_span_mapping` set to `true` (now default), the following resource mappings are applied: + +| OpenTelemetry Span Name | Datadog Span Operation Name | +|-------------------------|-----------------------------| +| `request` | `http.route` | +| `router` | `http.route` | +| `supergraph` | `graphql.operation.name` | +| `query_planning` | `graphql.operation.name` | +| `subgraph` | `subgraph.name` | +| `subgraph_request` | `graphql.operation.name` | +| `http_request` | `http.route` | + +You can override the default resource mappings by specifying the `resource_mapping` configuration: + +```yaml +telemetry: + exporters: + tracing: + datadog: + enabled: true + resource_mapping: + # Use `my.span.attribute` as the resource name for the `router` span + router: "my.span.attribute" +``` + +To learn more, see the [Datadog trace exporter](https://www.apollographql.com/docs/router/configuration/telemetry/exporters/tracing/datadog) documentation. + +By [@bnjjj](https://github.com/bnjjj) and [@bryncooke](https://github.com/bryncooke) in https://github.com/apollographql/router/pull/5386 + +## 📚 Documentation + +### Update documentation for `ignore_other_prefixes` ([PR #5592](https://github.com/apollographql/router/pull/5592)) + +Update [JWT authentication documentation](https://www.apollographql.com/docs/router/configuration/authn-jwt/) to clarify the behavior of the `ignore_other_prefixes` configuration option. + +By [@andrewmcgivery](https://github.com/andrewmcgivery) in https://github.com/apollographql/router/pull/5592 + + + # [1.50.0] - 2024-07-02 ## 🚀 Features @@ -347,7 +571,7 @@ By [@bnjjj](https://github.com/bnjjj) in https://github.com/apollographql/router ## đŸ§Ș Experimental -### Add experimental extended reference reporting configuration ([Issue #ROUTER-360](https://apollographql.atlassian.net/browse/ROUTER-360)) +### Add experimental extended reference reporting configuration ([PR #5331](https://github.com/apollographql/router/pull/5331)) Adds an experimental configuration to turn on extended references in Apollo usage reports, including references to input object fields and enum values. diff --git a/Cargo.lock b/Cargo.lock index 83b1bfec99..0884397df0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -180,9 +180,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.6.0" +version = "4.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1cf67dadb19d7c95e5a299e2dda24193b89d5d4f33a3b9800888ede9e19aa32" +checksum = "1988c02af8d2b718c05bc4aeb6a66395b7cdf32858c2c71131e5637a8c05a9ff" dependencies = [ "actix-codec", "actix-http", @@ -239,9 +239,9 @@ dependencies = [ [[package]] name = "actix-web-codegen" -version = "4.2.2" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1f50ebbb30eca122b188319a4398b3f7bb4a8cdf50ecfb73bfc6a3c3ce54f5" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" dependencies = [ "actix-router", "proc-macro2 1.0.76", @@ -388,23 +388,22 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "apollo-compiler" -version = "1.0.0-beta.17" +version = "1.0.0-beta.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a46c3a5e9a23a39089af6bada6fe0976369458434095bd9c26ce94c56f219842" +checksum = "16a61580d9ee85ec35b892efb1f3eec193c520fc957b612989dc823551e2639d" dependencies = [ "apollo-parser", "ariadne", - "indexmap 2.2.3", + "indexmap 2.2.6", "rowan", "serde", "serde_json_bytes", - "sptr", "thiserror", "triomphe", "uuid", @@ -422,12 +421,12 @@ dependencies = [ [[package]] name = "apollo-federation" -version = "1.50.0" +version = "1.51.0" dependencies = [ "apollo-compiler", "derive_more", "hex", - "indexmap 2.2.3", + "indexmap 2.2.6", "insta", "itertools 0.13.0", "lazy_static", @@ -468,7 +467,7 @@ dependencies = [ [[package]] name = "apollo-router" -version = "1.50.0" +version = "1.51.0" dependencies = [ "access-json", "anyhow", @@ -488,7 +487,7 @@ dependencies = [ "base64 0.21.7", "basic-toml", "bloomfilter", - "brotli 3.4.0", + "brotli 3.5.0", "buildstructor", "bytes", "bytesize", @@ -511,18 +510,18 @@ dependencies = [ "futures", "futures-test", "graphql_client", - "heck", + "heck 0.4.1", "hex", "hmac", "http 0.2.11", - "http-body", + "http-body 0.4.6", "http-serde", "humantime", "humantime-serde", "hyper", "hyper-rustls", "hyperlocal", - "indexmap 2.2.3", + "indexmap 2.2.6", "insta", "itertools 0.12.1", "jsonpath-rust", @@ -563,8 +562,8 @@ dependencies = [ "paste", "pin-project-lite", "prometheus", - "prost 0.12.3", - "prost-types 0.12.3", + "prost 0.12.6", + "prost-types 0.12.6", "proteus", "rand 0.8.5", "rand_core 0.6.4", @@ -578,7 +577,7 @@ dependencies = [ "rustls-native-certs", "rustls-pemfile", "schemars", - "semver 1.0.22", + "semver 1.0.23", "serde", "serde_derive_default", "serde_json", @@ -632,7 +631,7 @@ dependencies = [ [[package]] name = "apollo-router-benchmarks" -version = "1.50.0" +version = "1.51.0" dependencies = [ "apollo-parser", "apollo-router", @@ -648,7 +647,7 @@ dependencies = [ [[package]] name = "apollo-router-scaffold" -version = "1.50.0" +version = "1.51.0" dependencies = [ "anyhow", "cargo-scaffold", @@ -693,14 +692,14 @@ dependencies = [ [[package]] name = "apollo-smith" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d75bb8c2d542c5fa6b9d8bdff73edb003bba0272ea7b8d196bb4cd803083b3b" +checksum = "901bd689b4c67883d0fde26d7af952a4b2a50815c6b92c790d3fae8f6ea46cd3" dependencies = [ "apollo-compiler", "apollo-parser", "arbitrary", - "indexmap 2.2.3", + "indexmap 2.2.6", "once_cell", "thiserror", ] @@ -716,9 +715,9 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "ariadne" @@ -834,7 +833,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" dependencies = [ - "brotli 3.4.0", + "brotli 3.5.0", "flate2", "futures-core", "memchr", @@ -1158,9 +1157,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aws-config" -version = "1.1.6" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3182c19847238b50b62ae0383a6dbfc14514e552eb5e307e1ea83ccf5840b8a6" +checksum = "2368fb843e9eec932f7789d64d0e05850f4a79067188c657e572f1f5a7589df0" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1183,14 +1182,15 @@ dependencies = [ "time", "tokio", "tracing", + "url", "zeroize", ] [[package]] name = "aws-credential-types" -version = "1.1.6" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5635d8707f265c773282a22abe1ecd4fbe96a8eb2f0f14c0796f8016f11a41a" +checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1200,9 +1200,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.1.6" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f82b9ae2adfd9d6582440d0eeb394c07f74d21b4c0cc72bdb73735c9e1a9c0e" +checksum = "9a4a5e448145999d7de17bf44a886900ecb834953408dae8aaf90465ce91c1dd" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -1214,7 +1214,7 @@ dependencies = [ "bytes", "fastrand 2.0.1", "http 0.2.11", - "http-body", + "http-body 0.4.6", "percent-encoding", "pin-project-lite", "tracing", @@ -1223,9 +1223,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.14.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca7e8097448832fcd22faf6bb227e97d76b40e354509d1307653a885811c7151" +checksum = "b8aee358b755b2738b3ffb8a5b54ee991b28c8a07483a0ff7d49a58305cc2609" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1245,9 +1245,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.14.0" +version = "1.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75073590e23d63044606771afae309fada8eb10ded54a1ce4598347221d3fef" +checksum = "1d5ce026f0ae73e06b20be5932150dd0e9b063417fd7c3acf5ca97018b9cbd64" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1267,9 +1267,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.14.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650e4aaae41547151dea4d8142f7ffcc8ab8ba76d5dccc8933936ef2102c3356" +checksum = "c820248cb02e4ea83630ad2e43d0721cdbccedba5ac902cd0b6fb84d7271f205" dependencies = [ "aws-credential-types", "aws-runtime", @@ -1290,9 +1290,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.6" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "404c64a104188ac70dd1684718765cb5559795458e446480e41984e68e57d888" +checksum = "31eed8d45759b2c5fe7fd304dd70739060e9e0de509209036eabea14d0720cce" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -1313,9 +1313,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.1.6" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ec441341e019c441aa78472ed6d206cfe198026c495277a95ac5bebda520742" +checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c" dependencies = [ "futures-util", "pin-project-lite", @@ -1324,9 +1324,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.5" +version = "0.60.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85d6a0619f7b67183067fa3b558f94f90753da2df8c04aeb7336d673f804b0b8" +checksum = "d9cd0ae3d97daa0a2bf377a4d8e8e1362cae590c4a1aad0d40058ebca18eb91e" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", @@ -1334,7 +1334,7 @@ dependencies = [ "bytes-utils", "futures-core", "http 0.2.11", - "http-body", + "http-body 0.4.6", "once_cell", "percent-encoding", "pin-project-lite", @@ -1344,18 +1344,18 @@ dependencies = [ [[package]] name = "aws-smithy-json" -version = "0.60.5" +version = "0.60.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1c1b5186b6f5c579bf0de1bcca9dd3d946d6d51361ea1d18131f6a0b64e13ae" +checksum = "4683df9469ef09468dad3473d129960119a0d3593617542b7d52086c8486f2d6" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-query" -version = "0.60.5" +version = "0.60.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c0a2ce65882e788d2cf83ff28b9b16918de0460c47bf66c5da4f6c17b4c9694" +checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" dependencies = [ "aws-smithy-types", "urlencoding", @@ -1363,9 +1363,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.1.6" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b36f1f98c8d7b6256b86d4a3c8c4abb120670267baa9712a485ba477eaac9e9" +checksum = "3df4217d39fe940066174e6238310167bf466bfbebf3be0661e53cacccde6313" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1375,7 +1375,9 @@ dependencies = [ "fastrand 2.0.1", "h2", "http 0.2.11", - "http-body", + "http-body 0.4.6", + "http-body 1.0.0", + "httparse", "hyper", "hyper-rustls", "once_cell", @@ -1388,9 +1390,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.1.6" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180898ed701a773fb3fadbd94b9e9559125cf88eeb1815ab99e35d4f5f34f7fb" +checksum = "30819352ed0a04ecf6a2f3477e344d2d1ba33d43e0f09ad9047c12e0d923616f" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1405,16 +1407,18 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.1.6" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897f1db4020ad91f2c2421945ec49b7e3eb81cc3fea99e8b5dd5be721e697fed" +checksum = "cfe321a6b21f5d8eabd0ade9c55d3d0335f3c3157fc2b3e87f05f34b539e4df5" dependencies = [ "base64-simd", "bytes", "bytes-utils", - "futures-core", "http 0.2.11", - "http-body", + "http 1.0.0", + "http-body 0.4.6", + "http-body 1.0.0", + "http-body-util", "itoa", "num-integer", "pin-project-lite", @@ -1426,24 +1430,23 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.5" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16f94c9673412b7a72e3c3efec8de89081c320bf59ea12eed34c417a62ad600" +checksum = "d123fbc2a4adc3c301652ba8e149bf4bc1d1725affb9784eb20c953ace06bf55" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.1.6" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fbb5d48aae496f628e7aa2e41991dd4074f606d9e3ade1ce1059f293d40f9a2" +checksum = "2009a9733865d0ebf428a314440bbe357cc10d0c16d86a8e15d32e9b47c1e80e" dependencies = [ "aws-credential-types", "aws-smithy-async", "aws-smithy-runtime-api", "aws-smithy-types", - "http 0.2.11", "rustc_version 0.4.0", "tracing", ] @@ -1462,7 +1465,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.11", - "http-body", + "http-body 0.4.6", "hyper", "itoa", "matchit", @@ -1494,7 +1497,7 @@ dependencies = [ "bytes", "futures-util", "http 0.2.11", - "http-body", + "http-body 0.4.6", "mime", "rustversion", "tower-layer", @@ -1558,9 +1561,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-toml" -version = "0.1.4" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bfc506e7a2370ec239e1d072507b2a80c833083699d3c6fa176fbb4de8448c6" +checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8" dependencies = [ "serde", ] @@ -1571,7 +1574,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "bit-vec", + "bit-vec 0.6.3", ] [[package]] @@ -1580,6 +1583,12 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +[[package]] +name = "bit-vec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c54ff287cfc0a34f38a6b832ea1bd8e448a330b3e40a50859e6488bee07f22" + [[package]] name = "bitflags" version = "1.3.2" @@ -1619,20 +1628,20 @@ dependencies = [ [[package]] name = "bloomfilter" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b64d54e47a7f4fd723f082e8f11429f3df6ba8adaeca355a76556f9f0602bbcf" +checksum = "bc0bdbcf2078e0ba8a74e1fe0cf36f54054a04485759b61dfd60b174658e9607" dependencies = [ - "bit-vec", + "bit-vec 0.7.0", "getrandom 0.2.10", "siphasher", ] [[package]] name = "brotli" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" +checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -1810,7 +1819,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" dependencies = [ "camino", "cargo-platform", - "semver 1.0.22", + "semver 1.0.23", "serde", "serde_json", ] @@ -1883,9 +1892,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.1" +version = "4.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da" +checksum = "84b3edb18336f4df585bc9aa31dd99c036dfa5dc5e9a2939a722a188f3a8970d" dependencies = [ "clap_builder", "clap_derive", @@ -1893,9 +1902,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.1" +version = "4.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb" +checksum = "c1c09dd5ada6c6c78075d6fd0da3f90d8080651e2d6cc8eb2f1aaa4034ced708" dependencies = [ "anstream", "anstyle", @@ -1905,11 +1914,11 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.0" +version = "4.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" +checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2 1.0.76", "quote 1.0.35", "syn 2.0.48", @@ -1998,8 +2007,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" dependencies = [ "futures-core", - "prost 0.12.3", - "prost-types 0.12.3", + "prost 0.12.6", + "prost-types 0.12.6", "tonic 0.10.2", "tracing-core", ] @@ -2016,7 +2025,7 @@ dependencies = [ "futures-task", "hdrhistogram", "humantime", - "prost-types 0.12.3", + "prost-types 0.12.6", "serde", "serde_json", "thread_local", @@ -2363,7 +2372,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if 1.0.0", - "hashbrown 0.14.1", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -2783,7 +2792,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2 1.0.76", "quote 1.0.35", "syn 2.0.48", @@ -3006,9 +3015,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "libz-ng-sys", @@ -3128,7 +3137,7 @@ dependencies = [ "rustls", "rustls-native-certs", "rustls-webpki", - "semver 1.0.22", + "semver 1.0.23", "socket2 0.5.5", "tokio", "tokio-rustls", @@ -3436,7 +3445,7 @@ checksum = "a40f793251171991c4eb75bd84bc640afa8b68ff6907bc89d3b712a22f700506" dependencies = [ "graphql-introspection-query", "graphql-parser", - "heck", + "heck 0.4.1", "lazy_static", "proc-macro2 1.0.76", "quote 1.0.35", @@ -3479,7 +3488,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.11", - "indexmap 2.2.3", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -3528,9 +3537,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.1" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", @@ -3580,6 +3589,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hello-world" version = "0.1.0" @@ -3681,6 +3696,29 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.0.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "pin-project-lite", +] + [[package]] name = "http-range-header" version = "0.3.1" @@ -3767,7 +3805,7 @@ dependencies = [ "futures-util", "h2", "http 0.2.11", - "http-body", + "http-body 0.4.6", "httparse", "httpdate", "itoa", @@ -3865,12 +3903,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.3" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.1", + "hashbrown 0.14.5", "serde", ] @@ -3915,9 +3953,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.38.0" +version = "1.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eab73f58e59ca6526037208f0e98851159ec1633cf17b6cd2e1f2c3fd5d53cc" +checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5" dependencies = [ "console", "lazy_static", @@ -4011,15 +4049,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.12.1" @@ -4116,9 +4145,9 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "9.2.0" +version = "9.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7ea04a7c5c055c175f189b6dc6ba036fd62306b58c66c9f6389036c503a3f4" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" dependencies = [ "base64 0.21.7", "js-sys", @@ -4201,15 +4230,15 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.153" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libfuzzer-sys" @@ -4258,9 +4287,9 @@ dependencies = [ [[package]] name = "libtest-mimic" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fefdf21230d6143476a28adbee3d930e2b68a3d56443c777cae3fe9340eebff9" +checksum = "cc0bda45ed5b3a2904262c1bb91e526127aa70e7ef3758aba2ef93cf896b9b58" dependencies = [ "clap", "escape8259", @@ -4301,18 +4330,18 @@ dependencies = [ [[package]] name = "linkme" -version = "0.3.23" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a78816ac097580aa7fd9d2e9cc7395dda34367c07267a8657516d4ad5e2e3d3" +checksum = "ccb76662d78edc9f9bf56360d6919bdacc8b7761227727e5082f128eeb90bbf5" dependencies = [ "linkme-impl", ] [[package]] name = "linkme-impl" -version = "0.3.23" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee9023a564f8bf7fe3da285a50c3e70de0df3e2bf277ff7c4e76d66008ef93b0" +checksum = "f8dccda732e04fa3baf2e17cf835bfe2601c7c2edafd64417c627dabae3a8cda" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -4370,11 +4399,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2c024b41519440580066ba82aab04092b333e09066a5eb86c7c4890df31f22" +checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" dependencies = [ - "hashbrown 0.14.1", + "hashbrown 0.14.5", ] [[package]] @@ -4442,9 +4471,9 @@ checksum = "8878cd8d1b3c8c8ae4b2ba0a36652b7cf192f618a599a7fbdfa25cffd4ea72dd" [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" @@ -4475,12 +4504,12 @@ dependencies = [ [[package]] name = "memory-stats" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34f79cf9964c5c9545493acda1263f1912f8d2c56c8a2ffee2606cb960acaacc" +checksum = "c73f5c649995a115e1a0220b35e4df0a1294500477f97a91d0660fb5abeb574a" dependencies = [ "libc", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -4999,7 +5028,7 @@ dependencies = [ "hex", "opentelemetry 0.22.0", "opentelemetry_sdk 0.22.1", - "prost 0.12.3", + "prost 0.12.6", "serde", "tonic 0.11.0", ] @@ -5172,9 +5201,9 @@ checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -5195,9 +5224,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pem" @@ -5275,7 +5304,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.3", + "indexmap 2.2.6", "serde", "serde_derive", ] @@ -5302,9 +5331,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -5545,12 +5574,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.12.3" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive 0.12.3", + "prost-derive 0.12.6", ] [[package]] @@ -5560,7 +5589,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", - "heck", + "heck 0.4.1", "itertools 0.10.5", "lazy_static", "log", @@ -5590,12 +5619,12 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.3" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.11.0", + "itertools 0.12.1", "proc-macro2 1.0.76", "quote 1.0.35", "syn 2.0.48", @@ -5612,11 +5641,11 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.12.3" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" dependencies = [ - "prost 0.12.3", + "prost 0.12.6", ] [[package]] @@ -5837,9 +5866,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.4" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", @@ -5887,9 +5916,9 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "reqwest" -version = "0.11.24" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "async-compression", "base64 0.21.7", @@ -5899,7 +5928,7 @@ dependencies = [ "futures-util", "h2", "http 0.2.11", - "http-body", + "http-body 0.4.6", "hyper", "hyper-rustls", "ipnet", @@ -6037,9 +6066,9 @@ dependencies = [ [[package]] name = "rhai_codegen" -version = "2.0.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9db7f8dc4c9d48183a17ce550574c42995252b82d267eaca3fcd1b979159856c" +checksum = "a5a11a05ee1ce44058fa3d5961d05194fdbe3ad6b40f904af764d81b86450e6b" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -6118,7 +6147,7 @@ dependencies = [ "apollo-compiler", "apollo-parser", "apollo-router", - "apollo-smith 0.7.0", + "apollo-smith 0.8.0", "async-trait", "env_logger 0.10.2", "http 0.2.11", @@ -6162,9 +6191,9 @@ dependencies = [ [[package]] name = "rust-embed" -version = "8.2.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82c0bbc10308ed323529fd3c1dce8badda635aa319a5ff0e6466f33b8101e3f" +checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -6173,9 +6202,9 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "8.2.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6227c01b1783cdfee1bcf844eb44594cd16ec71c35305bf1c9fb5aade2735e16" +checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -6186,9 +6215,9 @@ dependencies = [ [[package]] name = "rust-embed-utils" -version = "8.2.0" +version = "8.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb0a25bfbb2d4b4402179c2cf030387d9990857ce08a32592c6238db9fa8665" +checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32" dependencies = [ "globset", "sha2", @@ -6222,7 +6251,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.22", + "semver 1.0.23", ] [[package]] @@ -6254,9 +6283,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.11" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring 0.17.5", @@ -6316,6 +6345,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scc" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ad2bbb0ae5100a07b7a6f2ed7ab5fd0045551a4c507989b7a620046ea3efdc" +dependencies = [ + "sdd", +] + [[package]] name = "schannel" version = "0.1.22" @@ -6375,6 +6413,12 @@ dependencies = [ "untrusted 0.7.1", ] +[[package]] +name = "sdd" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d" + [[package]] name = "sec1" version = "0.7.3" @@ -6423,9 +6467,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" dependencies = [ "serde", ] @@ -6438,9 +6482,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.199" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9f6e76df036c77cd94996771fb40db98187f096dd0b9af39c6c6e452ba966a" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] @@ -6456,9 +6500,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.199" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11bd257a6541e141e42ca6d24ae26f7714887b47e89aa739099104c7e4d3b7fc" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -6490,11 +6534,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.116" +version = "1.0.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -6508,7 +6552,7 @@ checksum = "0ecd92a088fb2500b2f146c9ddc5da9950bb7264d3f00932cd2a6fb369c26c46" dependencies = [ "ahash", "bytes", - "indexmap 2.2.3", + "indexmap 2.2.6", "jsonpath-rust", "regex", "serde", @@ -6587,23 +6631,23 @@ dependencies = [ [[package]] name = "serial_test" -version = "3.0.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ad9342b3aaca7cb43c45c097dd008d4907070394bd0751a0aa8817e5a018d" +checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" dependencies = [ - "dashmap", "futures", - "lazy_static", "log", + "once_cell", "parking_lot", + "scc", "serial_test_derive", ] [[package]] name = "serial_test_derive" -version = "3.0.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b93fb4adc70021ac1b47f7d45e8cc4169baaa7ea58483bc5b721d19a26202212" +checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -6695,9 +6739,9 @@ dependencies = [ [[package]] name = "siphasher" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "skeptic" @@ -6802,12 +6846,6 @@ dependencies = [ "der", ] -[[package]] -name = "sptr" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" - [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -6863,7 +6901,7 @@ version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2 1.0.76", "quote 1.0.35", "rustversion", @@ -6876,7 +6914,7 @@ version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2 1.0.76", "quote 1.0.35", "rustversion", @@ -6979,9 +7017,9 @@ checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" [[package]] name = "tempfile" -version = "3.10.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if 1.0.0", "fastrand 2.0.1", @@ -7016,9 +7054,9 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "test-log" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" +checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" dependencies = [ "test-log-macros", "tracing-subscriber", @@ -7026,9 +7064,9 @@ dependencies = [ [[package]] name = "test-log-macros" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" +checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -7084,18 +7122,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.57" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -7175,9 +7213,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", @@ -7198,9 +7236,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -7242,9 +7280,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes", @@ -7272,9 +7310,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -7293,9 +7331,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -7333,9 +7371,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", @@ -7343,7 +7381,6 @@ dependencies = [ "pin-project-lite", "slab", "tokio", - "tracing", ] [[package]] @@ -7373,7 +7410,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.6", "toml_datetime", "winnow", ] @@ -7384,7 +7421,7 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", @@ -7407,7 +7444,7 @@ dependencies = [ "futures-util", "h2", "http 0.2.11", - "http-body", + "http-body 0.4.6", "hyper", "hyper-timeout", "percent-encoding", @@ -7437,12 +7474,12 @@ dependencies = [ "bytes", "h2", "http 0.2.11", - "http-body", + "http-body 0.4.6", "hyper", "hyper-timeout", "percent-encoding", "pin-project", - "prost 0.12.3", + "prost 0.12.6", "tokio", "tokio-stream", "tower", @@ -7461,10 +7498,10 @@ dependencies = [ "base64 0.21.7", "bytes", "http 0.2.11", - "http-body", + "http-body 0.4.6", "percent-encoding", "pin-project", - "prost 0.12.3", + "prost 0.12.6", "tokio", "tokio-stream", "tower-layer", @@ -7518,7 +7555,7 @@ dependencies = [ "futures-core", "futures-util", "http 0.2.11", - "http-body", + "http-body 0.4.6", "http-range-header", "pin-project-lite", "tokio", @@ -7556,11 +7593,10 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if 1.0.0", "log", "pin-project-lite", "tracing-attributes", @@ -7569,9 +7605,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -7580,9 +7616,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -7692,9 +7728,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.9" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f" +checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" dependencies = [ "serde", "stable_deref_trait", @@ -7960,9 +7996,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna 0.5.0", @@ -8003,9 +8039,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.7.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" dependencies = [ "getrandom 0.2.10", "serde", @@ -8541,29 +8577,28 @@ checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" [[package]] name = "zstd" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" +checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.0.0" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" +checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.8+zstd.1.5.5" +version = "2.0.11+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +checksum = "75652c55c0b6f3e6f12eb786fe1bc960396bf05a1eb3bf1f3691c3610ac2e6d4" dependencies = [ "cc", - "libc", "pkg-config", ] diff --git a/Cargo.toml b/Cargo.toml index 8b35b01119..e3ab292476 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -49,7 +49,7 @@ debug = 1 # Dependencies used in more than one place are specified here in order to keep versions in sync: # https://doc.rust-lang.org/cargo/reference/workspaces.html#the-dependencies-table [workspace.dependencies] -apollo-compiler = "=1.0.0-beta.17" +apollo-compiler = "=1.0.0-beta.18" apollo-parser = "0.7.6" apollo-smith = { version = "0.5.0", features = ["parser-impl"] } async-trait = "0.1.77" @@ -73,6 +73,6 @@ serde_json = { version = "1.0.114", features = [ ] } serde_json_bytes = { version = "0.2.4", features = ["preserve_order"] } sha1 = "0.10.6" -tempfile = "3.10.0" +tempfile = "3.10.1" tokio = { version = "1.36.0", features = ["full"] } tower = { version = "0.4.13", features = ["full"] } diff --git a/apollo-federation/Cargo.toml b/apollo-federation/Cargo.toml index 353a837eb4..49868b2989 100644 --- a/apollo-federation/Cargo.toml +++ b/apollo-federation/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-federation" -version = "1.50.0" +version = "1.51.0" authors = ["The Apollo GraphQL Contributors"] edition = "2021" description = "Apollo Federation" @@ -15,7 +15,7 @@ time = { version = "0.3.34", default-features = false, features = [ "local-offset", ] } derive_more = "0.99.17" -indexmap = "2.2.3" +indexmap = "2.2.6" itertools = "0.13.0" lazy_static = "1.4.0" multimap = "0.10.0" diff --git a/apollo-federation/src/compat.rs b/apollo-federation/src/compat.rs index df899de5d8..a4e7242629 100644 --- a/apollo-federation/src/compat.rs +++ b/apollo-federation/src/compat.rs @@ -11,8 +11,8 @@ use apollo_compiler::ast::Value; use apollo_compiler::schema::Directive; use apollo_compiler::schema::ExtendedType; use apollo_compiler::schema::InputValueDefinition; -use apollo_compiler::schema::Name; use apollo_compiler::schema::Type; +use apollo_compiler::Name; use apollo_compiler::Node; use apollo_compiler::Schema; use indexmap::IndexMap; diff --git a/apollo-federation/src/error/mod.rs b/apollo-federation/src/error/mod.rs index c89daa5ef6..edbcd43951 100644 --- a/apollo-federation/src/error/mod.rs +++ b/apollo-federation/src/error/mod.rs @@ -4,9 +4,9 @@ use std::fmt::Display; use std::fmt::Formatter; use std::fmt::Write; -use apollo_compiler::ast::InvalidNameError; use apollo_compiler::validation::DiagnosticList; use apollo_compiler::validation::WithErrors; +use apollo_compiler::InvalidNameError; use lazy_static::lazy_static; use crate::subgraph::spec::FederationSpecError; @@ -35,6 +35,9 @@ pub enum SingleFederationError { "An internal error has occurred, please report this bug to Apollo.\n\nDetails: {message}" )] Internal { message: String }, + #[error("An internal error has occurred, please report this bug to Apollo. Details: {0}")] + #[allow(private_interfaces)] // users should not inspect this. + InternalRebaseError(#[from] crate::operation::RebaseError), #[error("{message}")] InvalidGraphQL { message: String }, #[error("{message}")] @@ -199,6 +202,7 @@ impl SingleFederationError { pub fn code(&self) -> ErrorCode { match self { SingleFederationError::Internal { .. } => ErrorCode::Internal, + SingleFederationError::InternalRebaseError { .. } => ErrorCode::Internal, SingleFederationError::InvalidGraphQL { .. } => ErrorCode::InvalidGraphQL, SingleFederationError::DirectiveDefinitionInvalid { .. } => { ErrorCode::DirectiveDefinitionInvalid @@ -378,7 +382,7 @@ impl SingleFederationError { impl From for SingleFederationError { fn from(err: InvalidNameError) -> Self { SingleFederationError::InvalidGraphQL { - message: format!("Invalid GraphQL name \"{}\"", err.0), + message: format!("Invalid GraphQL name \"{}\"", err.name), } } } @@ -433,7 +437,7 @@ impl Display for MultipleFederationErrors { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "The following errors occurred:")?; for error in &self.errors { - write!(f, "\n\n - ")?; + write!(f, "\n - ")?; for c in error.to_string().chars() { if c == '\n' { write!(f, "\n ")?; diff --git a/apollo-federation/src/lib.rs b/apollo-federation/src/lib.rs index cbf2694840..4a92c92f4a 100644 --- a/apollo-federation/src/lib.rs +++ b/apollo-federation/src/lib.rs @@ -28,8 +28,8 @@ pub mod schema; pub mod sources; pub mod subgraph; +use apollo_compiler::ast::NamedType; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; use apollo_compiler::Schema; use link::join_spec_definition::JOIN_VERSIONS; use schema::FederationSchema; @@ -140,6 +140,6 @@ const _: () = { }; /// Returns if the type of the node is a scalar or enum. -pub(crate) fn is_leaf_type(schema: &Schema, ty: &NodeStr) -> bool { +pub(crate) fn is_leaf_type(schema: &Schema, ty: &NamedType) -> bool { schema.get_scalar(ty).is_some() || schema.get_enum(ty).is_some() } diff --git a/apollo-federation/src/link/argument.rs b/apollo-federation/src/link/argument.rs index cbaa936d39..662cd0a08d 100644 --- a/apollo-federation/src/link/argument.rs +++ b/apollo-federation/src/link/argument.rs @@ -2,9 +2,8 @@ use std::ops::Deref; use apollo_compiler::ast::Value; use apollo_compiler::schema::Directive; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use crate::error::FederationError; use crate::error::SingleFederationError; @@ -45,13 +44,13 @@ pub(crate) fn directive_required_enum_argument( }) } -pub(crate) fn directive_optional_string_argument( - application: &Node, +pub(crate) fn directive_optional_string_argument<'doc>( + application: &'doc Node, name: &Name, -) -> Result, FederationError> { +) -> Result, FederationError> { match application.argument_by_name(name) { Some(value) => match value.deref() { - Value::String(name) => Ok(Some(name.clone())), + Value::String(name) => Ok(Some(name)), Value::Null => Ok(None), _ => Err(SingleFederationError::Internal { message: format!( @@ -65,10 +64,10 @@ pub(crate) fn directive_optional_string_argument( } } -pub(crate) fn directive_required_string_argument( - application: &Node, +pub(crate) fn directive_required_string_argument<'doc>( + application: &'doc Node, name: &Name, -) -> Result { +) -> Result<&'doc str, FederationError> { directive_optional_string_argument(application, name)?.ok_or_else(|| { SingleFederationError::Internal { message: format!( @@ -80,38 +79,6 @@ pub(crate) fn directive_required_string_argument( }) } -pub(crate) fn directive_optional_fieldset_argument( - application: &Node, - name: &Name, -) -> Result, FederationError> { - match application.argument_by_name(name) { - Some(value) => match value.deref() { - Value::String(name) => Ok(Some(name.clone())), - Value::Null => Ok(None), - _ => Err(SingleFederationError::Internal { - message: format!("Invalid value for argument \"{}\": must be a string.", name), - } - .into()), - }, - None => Ok(None), - } -} - -pub(crate) fn directive_required_fieldset_argument( - application: &Node, - name: &Name, -) -> Result { - directive_optional_fieldset_argument(application, name)?.ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Required argument \"{}\" of directive \"@{}\" was not present.", - name, application.name - ), - } - .into() - }) -} - pub(crate) fn directive_optional_boolean_argument( application: &Node, name: &Name, diff --git a/apollo-federation/src/link/federation_spec_definition.rs b/apollo-federation/src/link/federation_spec_definition.rs index 5a4738a19d..b62fb7d762 100644 --- a/apollo-federation/src/link/federation_spec_definition.rs +++ b/apollo-federation/src/link/federation_spec_definition.rs @@ -3,17 +3,16 @@ use apollo_compiler::name; use apollo_compiler::schema::Directive; use apollo_compiler::schema::DirectiveDefinition; use apollo_compiler::schema::ExtendedType; -use apollo_compiler::schema::Name; use apollo_compiler::schema::UnionType; use apollo_compiler::schema::Value; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use lazy_static::lazy_static; use crate::error::FederationError; use crate::error::SingleFederationError; use crate::link::argument::directive_optional_boolean_argument; -use crate::link::argument::directive_required_fieldset_argument; +use crate::link::argument::directive_required_string_argument; use crate::link::spec::Identity; use crate::link::spec::Url; use crate::link::spec::Version; @@ -35,18 +34,19 @@ pub(crate) const FEDERATION_FIELDS_ARGUMENT_NAME: Name = name!("fields"); pub(crate) const FEDERATION_RESOLVABLE_ARGUMENT_NAME: Name = name!("resolvable"); pub(crate) const FEDERATION_REASON_ARGUMENT_NAME: Name = name!("reason"); pub(crate) const FEDERATION_FROM_ARGUMENT_NAME: Name = name!("from"); +pub(crate) const FEDERATION_OVERRIDE_LABEL_ARGUMENT_NAME: Name = name!("label"); -pub(crate) struct KeyDirectiveArguments { - pub(crate) fields: NodeStr, +pub(crate) struct KeyDirectiveArguments<'doc> { + pub(crate) fields: &'doc str, pub(crate) resolvable: bool, } -pub(crate) struct RequiresDirectiveArguments { - pub(crate) fields: NodeStr, +pub(crate) struct RequiresDirectiveArguments<'doc> { + pub(crate) fields: &'doc str, } -pub(crate) struct ProvidesDirectiveArguments { - pub(crate) fields: NodeStr, +pub(crate) struct ProvidesDirectiveArguments<'doc> { + pub(crate) fields: &'doc str, } #[derive(Debug)] @@ -108,7 +108,7 @@ impl FederationSpecDefinition { pub(crate) fn key_directive( &self, schema: &FederationSchema, - fields: NodeStr, + fields: &str, resolvable: bool, ) -> Result { let name_in_schema = self @@ -121,7 +121,7 @@ impl FederationSpecDefinition { arguments: vec![ Node::new(Argument { name: FEDERATION_FIELDS_ARGUMENT_NAME, - value: Node::new(Value::String(fields)), + value: Node::new(Value::String(fields.to_owned())), }), Node::new(Argument { name: FEDERATION_RESOLVABLE_ARGUMENT_NAME, @@ -131,12 +131,12 @@ impl FederationSpecDefinition { }) } - pub(crate) fn key_directive_arguments( + pub(crate) fn key_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(KeyDirectiveArguments { - fields: directive_required_fieldset_argument( + fields: directive_required_string_argument( application, &FEDERATION_FIELDS_ARGUMENT_NAME, )?, @@ -219,7 +219,7 @@ impl FederationSpecDefinition { pub(crate) fn external_directive( &self, schema: &FederationSchema, - reason: Option, + reason: Option, ) -> Result { let name_in_schema = self .directive_name_in_schema(schema, &FEDERATION_EXTERNAL_DIRECTIVE_NAME_IN_SPEC)? @@ -257,7 +257,7 @@ impl FederationSpecDefinition { pub(crate) fn requires_directive( &self, schema: &FederationSchema, - fields: NodeStr, + fields: String, ) -> Result { let name_in_schema = self .directive_name_in_schema(schema, &FEDERATION_REQUIRES_DIRECTIVE_NAME_IN_SPEC)? @@ -273,12 +273,12 @@ impl FederationSpecDefinition { }) } - pub(crate) fn requires_directive_arguments( + pub(crate) fn requires_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(RequiresDirectiveArguments { - fields: directive_required_fieldset_argument( + fields: directive_required_string_argument( application, &FEDERATION_FIELDS_ARGUMENT_NAME, )?, @@ -303,7 +303,7 @@ impl FederationSpecDefinition { pub(crate) fn provides_directive( &self, schema: &FederationSchema, - fields: NodeStr, + fields: String, ) -> Result { let name_in_schema = self .directive_name_in_schema(schema, &FEDERATION_PROVIDES_DIRECTIVE_NAME_IN_SPEC)? @@ -319,12 +319,12 @@ impl FederationSpecDefinition { }) } - pub(crate) fn provides_directive_arguments( + pub(crate) fn provides_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(ProvidesDirectiveArguments { - fields: directive_required_fieldset_argument( + fields: directive_required_string_argument( application, &FEDERATION_FIELDS_ARGUMENT_NAME, )?, @@ -362,19 +362,29 @@ impl FederationSpecDefinition { pub(crate) fn override_directive( &self, schema: &FederationSchema, - from: NodeStr, + from: String, + label: &Option<&str>, ) -> Result { let name_in_schema = self .directive_name_in_schema(schema, &FEDERATION_OVERRIDE_DIRECTIVE_NAME_IN_SPEC)? .ok_or_else(|| SingleFederationError::Internal { message: "Unexpectedly could not find federation spec in schema".to_owned(), })?; + + let mut arguments = vec![Node::new(Argument { + name: FEDERATION_FROM_ARGUMENT_NAME, + value: Node::new(Value::String(from)), + })]; + + if let Some(label) = label { + arguments.push(Node::new(Argument { + name: FEDERATION_OVERRIDE_LABEL_ARGUMENT_NAME, + value: Node::new(Value::String(label.to_string())), + })); + } Ok(Directive { name: name_in_schema, - arguments: vec![Node::new(Argument { - name: FEDERATION_FROM_ARGUMENT_NAME, - value: Node::new(Value::String(from)), - })], + arguments, }) } } diff --git a/apollo-federation/src/link/graphql_definition.rs b/apollo-federation/src/link/graphql_definition.rs index 056747ae52..260bec67b3 100644 --- a/apollo-federation/src/link/graphql_definition.rs +++ b/apollo-federation/src/link/graphql_definition.rs @@ -2,10 +2,9 @@ use std::fmt::Display; use apollo_compiler::ast::Value; use apollo_compiler::executable::Directive; -use apollo_compiler::executable::Name; use apollo_compiler::name; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use crate::error::FederationError; use crate::link::argument::directive_optional_string_argument; @@ -13,21 +12,16 @@ use crate::link::argument::directive_optional_variable_boolean_argument; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct DeferDirectiveArguments { - label: Option, - if_: Option, -} - -impl DeferDirectiveArguments { - pub(crate) fn label(&self) -> Option<&NodeStr> { - self.label.as_ref() - } + pub(crate) label: Option, + pub(crate) if_: Option, } pub(crate) fn defer_directive_arguments( application: &Node, ) -> Result { Ok(DeferDirectiveArguments { - label: directive_optional_string_argument(application, &name!("label"))?, + label: directive_optional_string_argument(application, &name!("label"))? + .map(|s| s.to_owned()), if_: directive_optional_variable_boolean_argument(application, &name!("if"))?, }) } diff --git a/apollo-federation/src/link/inaccessible_spec_definition.rs b/apollo-federation/src/link/inaccessible_spec_definition.rs index 6d3013facf..28887af686 100644 --- a/apollo-federation/src/link/inaccessible_spec_definition.rs +++ b/apollo-federation/src/link/inaccessible_spec_definition.rs @@ -9,8 +9,8 @@ use apollo_compiler::schema::DirectiveLocation; use apollo_compiler::schema::ExtendedType; use apollo_compiler::schema::FieldDefinition; use apollo_compiler::schema::InputValueDefinition; -use apollo_compiler::schema::Name; use apollo_compiler::schema::Value; +use apollo_compiler::Name; use apollo_compiler::Node; use indexmap::IndexMap; use indexmap::IndexSet; @@ -294,13 +294,12 @@ fn validate_inaccessible_in_default_value( // expected. (Value::Enum(_) | Value::String(_), ExtendedType::Enum(type_)) => { let value = match default_value { - Value::Enum(name) => name.clone(), - // It's no problem if this name is invalid. - Value::String(node_str) => Name::new_unchecked(node_str.clone()), + Value::Enum(name) => name.as_str(), + Value::String(s) => s, // Guaranteed to be enum or string by parent match branch. _ => unreachable!(), }; - let Some(enum_value) = type_.values.get(&value) else { + let Some(enum_value) = type_.values.get(value) else { return Ok(()); }; let enum_value_position = EnumValueDefinitionPosition { diff --git a/apollo-federation/src/link/join_spec_definition.rs b/apollo-federation/src/link/join_spec_definition.rs index 792a5f3031..1328cbdcb7 100644 --- a/apollo-federation/src/link/join_spec_definition.rs +++ b/apollo-federation/src/link/join_spec_definition.rs @@ -3,16 +3,14 @@ use apollo_compiler::schema::Directive; use apollo_compiler::schema::DirectiveDefinition; use apollo_compiler::schema::EnumType; use apollo_compiler::schema::ExtendedType; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use lazy_static::lazy_static; use crate::error::FederationError; use crate::error::SingleFederationError; use crate::link::argument::directive_optional_boolean_argument; use crate::link::argument::directive_optional_enum_argument; -use crate::link::argument::directive_optional_fieldset_argument; use crate::link::argument::directive_optional_string_argument; use crate::link::argument::directive_required_enum_argument; use crate::link::argument::directive_required_string_argument; @@ -43,41 +41,43 @@ pub(crate) const JOIN_PROVIDES_ARGUMENT_NAME: Name = name!("provides"); pub(crate) const JOIN_TYPE_ARGUMENT_NAME: Name = name!("type"); pub(crate) const JOIN_EXTERNAL_ARGUMENT_NAME: Name = name!("external"); pub(crate) const JOIN_OVERRIDE_ARGUMENT_NAME: Name = name!("override"); +pub(crate) const JOIN_OVERRIDE_LABEL_ARGUMENT_NAME: Name = name!("overrideLabel"); pub(crate) const JOIN_USEROVERRIDDEN_ARGUMENT_NAME: Name = name!("usedOverridden"); pub(crate) const JOIN_INTERFACE_ARGUMENT_NAME: Name = name!("interface"); pub(crate) const JOIN_MEMBER_ARGUMENT_NAME: Name = name!("member"); -pub(crate) struct GraphDirectiveArguments { - pub(crate) name: NodeStr, - pub(crate) url: NodeStr, +pub(crate) struct GraphDirectiveArguments<'doc> { + pub(crate) name: &'doc str, + pub(crate) url: &'doc str, } -pub(crate) struct TypeDirectiveArguments { +pub(crate) struct TypeDirectiveArguments<'doc> { pub(crate) graph: Name, - pub(crate) key: Option, + pub(crate) key: Option<&'doc str>, pub(crate) extension: bool, pub(crate) resolvable: bool, pub(crate) is_interface_object: bool, } -pub(crate) struct FieldDirectiveArguments { +pub(crate) struct FieldDirectiveArguments<'doc> { pub(crate) graph: Option, - pub(crate) requires: Option, - pub(crate) provides: Option, - pub(crate) type_: Option, + pub(crate) requires: Option<&'doc str>, + pub(crate) provides: Option<&'doc str>, + pub(crate) type_: Option<&'doc str>, pub(crate) external: Option, - pub(crate) override_: Option, + pub(crate) override_: Option<&'doc str>, + pub(crate) override_label: Option<&'doc str>, pub(crate) user_overridden: Option, } -pub(crate) struct ImplementsDirectiveArguments { +pub(crate) struct ImplementsDirectiveArguments<'doc> { pub(crate) graph: Name, - pub(crate) interface: NodeStr, + pub(crate) interface: &'doc str, } -pub(crate) struct UnionMemberDirectiveArguments { +pub(crate) struct UnionMemberDirectiveArguments<'doc> { pub(crate) graph: Name, - pub(crate) member: NodeStr, + pub(crate) member: &'doc str, } pub(crate) struct EnumValueDirectiveArguments { @@ -136,10 +136,10 @@ impl JoinSpecDefinition { }) } - pub(crate) fn graph_directive_arguments( + pub(crate) fn graph_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(GraphDirectiveArguments { name: directive_required_string_argument(application, &JOIN_NAME_ARGUMENT_NAME)?, url: directive_required_string_argument(application, &JOIN_URL_ARGUMENT_NAME)?, @@ -159,13 +159,13 @@ impl JoinSpecDefinition { }) } - pub(crate) fn type_directive_arguments( + pub(crate) fn type_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(TypeDirectiveArguments { graph: directive_required_enum_argument(application, &JOIN_GRAPH_ARGUMENT_NAME)?, - key: directive_optional_fieldset_argument(application, &JOIN_KEY_ARGUMENT_NAME)?, + key: directive_optional_string_argument(application, &JOIN_KEY_ARGUMENT_NAME)?, extension: directive_optional_boolean_argument( application, &JOIN_EXTENSION_ARGUMENT_NAME, @@ -197,17 +197,17 @@ impl JoinSpecDefinition { }) } - pub(crate) fn field_directive_arguments( + pub(crate) fn field_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(FieldDirectiveArguments { graph: directive_optional_enum_argument(application, &JOIN_GRAPH_ARGUMENT_NAME)?, - requires: directive_optional_fieldset_argument( + requires: directive_optional_string_argument( application, &JOIN_REQUIRES_ARGUMENT_NAME, )?, - provides: directive_optional_fieldset_argument( + provides: directive_optional_string_argument( application, &JOIN_PROVIDES_ARGUMENT_NAME, )?, @@ -220,6 +220,10 @@ impl JoinSpecDefinition { application, &JOIN_OVERRIDE_ARGUMENT_NAME, )?, + override_label: directive_optional_string_argument( + application, + &JOIN_OVERRIDE_LABEL_ARGUMENT_NAME, + )?, user_overridden: directive_optional_boolean_argument( application, &JOIN_USEROVERRIDDEN_ARGUMENT_NAME, @@ -244,10 +248,10 @@ impl JoinSpecDefinition { .map(Some) } - pub(crate) fn implements_directive_arguments( + pub(crate) fn implements_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(ImplementsDirectiveArguments { graph: directive_required_enum_argument(application, &JOIN_GRAPH_ARGUMENT_NAME)?, interface: directive_required_string_argument( @@ -274,10 +278,10 @@ impl JoinSpecDefinition { .map(Some) } - pub(crate) fn union_member_directive_arguments( + pub(crate) fn union_member_directive_arguments<'doc>( &self, - application: &Node, - ) -> Result { + application: &'doc Node, + ) -> Result, FederationError> { Ok(UnionMemberDirectiveArguments { graph: directive_required_enum_argument(application, &JOIN_GRAPH_ARGUMENT_NAME)?, member: directive_required_string_argument(application, &JOIN_MEMBER_ARGUMENT_NAME)?, diff --git a/apollo-federation/src/link/mod.rs b/apollo-federation/src/link/mod.rs index 16d8379c05..272c5f4adc 100644 --- a/apollo-federation/src/link/mod.rs +++ b/apollo-federation/src/link/mod.rs @@ -4,12 +4,11 @@ use std::str; use std::sync::Arc; use apollo_compiler::ast::Directive; -use apollo_compiler::ast::InvalidNameError; use apollo_compiler::ast::Value; use apollo_compiler::name; -use apollo_compiler::schema::Name; +use apollo_compiler::InvalidNameError; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use thiserror::Error; use crate::error::FederationError; @@ -130,23 +129,23 @@ impl Import { if let Some(directive_name) = str.strip_prefix('@') { Ok(Import { element: Name::new(directive_name)?, is_directive: true, alias: None }) } else { - Ok(Import { element: Name::new(str.clone())?, is_directive: false, alias: None }) + Ok(Import { element: Name::new(str)?, is_directive: false, alias: None }) } }, Value::Object(fields) => { - let mut name: Option = None; - let mut alias: Option = None; + let mut name: Option<&str> = None; + let mut alias: Option<&str> = None; for (k, v) in fields { match k.as_str() { "name" => { - name = Some(v.as_node_str().ok_or_else(|| { + name = Some(v.as_str().ok_or_else(|| { LinkError::BootstrapError("invalid value for `name` field in @link(import:) argument: must be a string".to_string()) - })?.clone()) + })?) }, "as" => { - alias = Some(v.as_node_str().ok_or_else(|| { + alias = Some(v.as_str().ok_or_else(|| { LinkError::BootstrapError("invalid value for `as` field in @link(import:) argument: must be a string".to_string()) - })?.clone()) + })?) }, _ => Err(LinkError::BootstrapError(format!("unknown field `{k}` in @link(import:) argument")))? } @@ -157,7 +156,7 @@ impl Import { let Some(alias_str) = alias_str.strip_prefix('@') else { return Err(LinkError::BootstrapError(format!("invalid alias '{}' for import name '{}': should start with '@' since the imported name does", alias_str, element))); }; - alias = Some(alias_str.into()); + alias = Some(alias_str); } Ok(Import { element: Name::new(directive_name)?, @@ -257,7 +256,7 @@ impl Link { self.spec_name_in_schema().clone() } else { // Both sides are `Name`s and we just add valid characters in between. - Name::new_unchecked(format!("{}__{}", self.spec_name_in_schema(), name).into()) + Name::new_unchecked(&format!("{}__{}", self.spec_name_in_schema(), name)) } } @@ -268,7 +267,7 @@ impl Link { import.alias.clone().unwrap_or_else(|| name.clone()) } else { // Both sides are `Name`s and we just add valid characters in between. - Name::new_unchecked(format!("{}__{}", self.spec_name_in_schema(), name).into()) + Name::new_unchecked(&format!("{}__{}", self.spec_name_in_schema(), name)) } } @@ -302,7 +301,7 @@ impl Link { let spec_alias = directive .argument_by_name("as") - .and_then(|arg| arg.as_node_str()) + .and_then(|arg| arg.as_str()) .map(Name::new) .transpose()?; let purpose = if let Some(value) = directive.argument_by_name("for") { diff --git a/apollo-federation/src/link/spec.rs b/apollo-federation/src/link/spec.rs index e5982a10dd..25dd24c4b2 100644 --- a/apollo-federation/src/link/spec.rs +++ b/apollo-federation/src/link/spec.rs @@ -2,8 +2,8 @@ use std::fmt; use std::str; -use apollo_compiler::ast::Name; use apollo_compiler::name; +use apollo_compiler::Name; use thiserror::Error; use crate::error::FederationError; @@ -219,7 +219,7 @@ impl str::FromStr for Url { // So we pretend that it's fine. You can't reference an imported element by the // namespaced name because it's not valid GraphQL to do so--but you can // explicitly import elements from a spec with an invalid name. - .map(|segment| Name::new_unchecked(segment.into()))?; + .map(Name::new_unchecked)?; let scheme = url.scheme(); if !scheme.starts_with("http") { return Err(SpecError::ParseError("invalid `@link` specification url: only http(s) urls are supported currently".to_string())); diff --git a/apollo-federation/src/link/spec_definition.rs b/apollo-federation/src/link/spec_definition.rs index 8ba7e01836..5826f8f4d9 100644 --- a/apollo-federation/src/link/spec_definition.rs +++ b/apollo-federation/src/link/spec_definition.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use apollo_compiler::schema::DirectiveDefinition; use apollo_compiler::schema::ExtendedType; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; use crate::error::FederationError; diff --git a/apollo-federation/src/merge.rs b/apollo-federation/src/merge.rs index 095504a9e1..2a71aa92c7 100644 --- a/apollo-federation/src/merge.rs +++ b/apollo-federation/src/merge.rs @@ -20,14 +20,13 @@ use apollo_compiler::schema::ExtendedType; use apollo_compiler::schema::InputObjectType; use apollo_compiler::schema::InputValueDefinition; use apollo_compiler::schema::InterfaceType; -use apollo_compiler::schema::Name; use apollo_compiler::schema::ObjectType; use apollo_compiler::schema::ScalarType; use apollo_compiler::schema::UnionType; use apollo_compiler::ty; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use apollo_compiler::Schema; use indexmap::map::Entry::Occupied; use indexmap::map::Entry::Vacant; @@ -38,10 +37,15 @@ use itertools::Itertools; use crate::error::FederationError; use crate::link::federation_spec_definition::FEDERATION_EXTERNAL_DIRECTIVE_NAME_IN_SPEC; +use crate::link::federation_spec_definition::FEDERATION_FIELDS_ARGUMENT_NAME; +use crate::link::federation_spec_definition::FEDERATION_FROM_ARGUMENT_NAME; use crate::link::federation_spec_definition::FEDERATION_INTERFACEOBJECT_DIRECTIVE_NAME_IN_SPEC; use crate::link::federation_spec_definition::FEDERATION_KEY_DIRECTIVE_NAME_IN_SPEC; +use crate::link::federation_spec_definition::FEDERATION_OVERRIDE_DIRECTIVE_NAME_IN_SPEC; +use crate::link::federation_spec_definition::FEDERATION_OVERRIDE_LABEL_ARGUMENT_NAME; use crate::link::federation_spec_definition::FEDERATION_PROVIDES_DIRECTIVE_NAME_IN_SPEC; use crate::link::federation_spec_definition::FEDERATION_REQUIRES_DIRECTIVE_NAME_IN_SPEC; +use crate::link::join_spec_definition::JOIN_OVERRIDE_LABEL_ARGUMENT_NAME; use crate::link::spec::Identity; use crate::link::LinksMetadata; use crate::schema::ValidFederationSchema; @@ -203,8 +207,15 @@ impl Merger { type_name.clone(), value, ), - ExtendedType::Scalar(_value) => { - // DO NOTHING + ExtendedType::Scalar(value) => { + if !value.is_built_in() { + self.merge_scalar_type( + &mut supergraph.types, + subgraph_name.clone(), + type_name.clone(), + value, + ); + } } } } @@ -433,6 +444,10 @@ impl Merger { }) .unwrap_or(FEDERATION_INTERFACEOBJECT_DIRECTIVE_NAME_IN_SPEC); + let override_directive_name = federation_identity + .map(|link| link.directive_name_in_schema(&FEDERATION_OVERRIDE_DIRECTIVE_NAME_IN_SPEC)) + .unwrap_or(FEDERATION_OVERRIDE_DIRECTIVE_NAME_IN_SPEC); + let is_interface_object = object.directives.has(&interface_object_directive_name); let existing_type = types .entry(object_name.clone()) @@ -492,23 +507,29 @@ impl Merger { } } - let requires_directive_option = Option::and_then( - field.directives.get_all(&requires_directive_name).next(), - |p| { - let requires_fields = - directive_string_arg_value(p, &name!("fields")).unwrap(); - Some(requires_fields.as_str()) - }, - ); + let requires_directive_option = field + .directives + .get_all(&requires_directive_name) + .next() + .and_then(|p| directive_string_arg_value(p, &FEDERATION_FIELDS_ARGUMENT_NAME)); - let provides_directive_option = Option::and_then( - field.directives.get_all(&provides_directive_name).next(), - |p| { - let provides_fields = - directive_string_arg_value(p, &name!("fields")).unwrap(); - Some(provides_fields.as_str()) - }, - ); + let provides_directive_option = field + .directives + .get_all(&provides_directive_name) + .next() + .and_then(|p| directive_string_arg_value(p, &FEDERATION_FIELDS_ARGUMENT_NAME)); + + let overrides_directive_option = field + .directives + .get_all(&override_directive_name) + .next() + .and_then(|p| { + let overrides_from = + directive_string_arg_value(p, &FEDERATION_FROM_ARGUMENT_NAME); + let overrides_label = + directive_string_arg_value(p, &FEDERATION_OVERRIDE_LABEL_ARGUMENT_NAME); + overrides_from.map(|from| (from, overrides_label)) + }); let external_field = field .directives @@ -521,6 +542,7 @@ impl Merger { requires_directive_option, provides_directive_option, external_field, + overrides_directive_option, ); supergraph_field @@ -569,13 +591,32 @@ impl Merger { }), Node::new(Argument { name: name!("member"), - value: Node::new(Value::String(NodeStr::new(union_member))), + value: union_member.as_str().into(), }), ], })); } } } + + fn merge_scalar_type( + &self, + types: &mut IndexMap, + subgraph_name: Name, + scalar_name: NamedType, + ty: &Node, + ) { + let existing_type = types + .entry(scalar_name.clone()) + .or_insert(copy_scalar_type(scalar_name, ty)); + if let ExtendedType::Scalar(s) = existing_type { + let join_type_directives = + join_type_applied_directive(subgraph_name.clone(), iter::empty(), false); + s.make_mut().directives.extend(join_type_directives); + } else { + // conflict? + } + } } const EXECUTABLE_DIRECTIVE_LOCATIONS: [DirectiveLocation; 8] = [ @@ -605,6 +646,14 @@ fn is_mergeable_type(type_name: &str) -> bool { !FEDERATION_TYPES.contains(&type_name) } +fn copy_scalar_type(scalar_name: Name, scalar_type: &Node) -> ExtendedType { + ExtendedType::Scalar(Node::new(ScalarType { + description: scalar_type.description.clone(), + name: scalar_name, + directives: Default::default(), + })) +} + fn copy_enum_type(enum_name: Name, enum_type: &Node) -> ExtendedType { ExtendedType::Enum(Node::new(EnumType { description: enum_type.description.clone(), @@ -713,7 +762,7 @@ fn copy_fields( new_fields } -fn copy_union_type(union_name: Name, description: Option) -> ExtendedType { +fn copy_union_type(union_name: Name, description: Option>) -> ExtendedType { ExtendedType::Union(Node::new(UnionType { description, name: union_name, @@ -749,7 +798,7 @@ fn join_type_applied_directive<'a>( .arguments .push(Node::new(Argument { name: name!("key"), - value: Node::new(Value::String(NodeStr::new(field_set.as_str()))), + value: field_set.into(), })); let resolvable = @@ -786,7 +835,7 @@ fn join_implements_applied_directive( }), Node::new(Argument { name: name!("interface"), - value: Node::new(Value::String(intf_name.to_string().into())), + value: intf_name.as_str().into(), }), ], }) @@ -800,10 +849,7 @@ fn directive_arg_value<'a>(directive: &'a Directive, arg_name: &Name) -> Option< .map(|arg| arg.value.as_ref()) } -fn directive_string_arg_value<'a>( - directive: &'a Directive, - arg_name: &Name, -) -> Option<&'a NodeStr> { +fn directive_string_arg_value<'a>(directive: &'a Directive, arg_name: &Name) -> Option<&'a str> { match directive_arg_value(directive, arg_name) { Some(Value::String(value)) => Some(value), _ => None, @@ -828,9 +874,7 @@ fn add_core_feature_link(supergraph: &mut Schema) { name: name!("link"), arguments: vec![Node::new(Argument { name: name!("url"), - value: Node::new(Value::String(NodeStr::new( - "https://specs.apollo.dev/link/v1.0", - ))), + value: Node::new("https://specs.apollo.dev/link/v1.0".into()), })], })); @@ -914,16 +958,16 @@ fn link_purpose_enum_type() -> (Name, EnumType) { values: IndexMap::new(), }; let link_purpose_security_value = EnumValueDefinition { - description: Some(NodeStr::new( - r"SECURITY features provide metadata necessary to securely resolve fields.", - )), + description: Some( + r"SECURITY features provide metadata necessary to securely resolve fields.".into(), + ), directives: Default::default(), value: name!("SECURITY"), }; let link_purpose_execution_value = EnumValueDefinition { - description: Some(NodeStr::new( - r"EXECUTION features provide metadata necessary for operation execution.", - )), + description: Some( + r"EXECUTION features provide metadata necessary for operation execution.".into(), + ), directives: Default::default(), value: name!("EXECUTION"), }; @@ -953,9 +997,7 @@ fn add_core_feature_join( arguments: vec![ Node::new(Argument { name: name!("url"), - value: Node::new(Value::String(NodeStr::new( - "https://specs.apollo.dev/join/v0.3", - ))), + value: "https://specs.apollo.dev/join/v0.3".into(), }), Node::new(Argument { name: name!("for"), @@ -1088,6 +1130,13 @@ fn join_field_directive_definition() -> DirectiveDefinition { ty: ty!(String).into(), default_value: None, }), + Node::new(InputValueDefinition { + name: JOIN_OVERRIDE_LABEL_ARGUMENT_NAME, + description: None, + directives: Default::default(), + ty: ty!(String).into(), + default_value: None, + }), Node::new(InputValueDefinition { name: name!("usedOverridden"), description: None, @@ -1109,6 +1158,7 @@ fn join_field_applied_directive( requires: Option<&str>, provides: Option<&str>, external: bool, + overrides: Option<(&str, Option<&str>)>, // from, label ) -> Directive { let mut join_field_directive = Directive { name: name!("join__field"), @@ -1120,21 +1170,33 @@ fn join_field_applied_directive( if let Some(required_fields) = requires { join_field_directive.arguments.push(Node::new(Argument { name: name!("requires"), - value: Node::new(Value::String(NodeStr::new(required_fields))), + value: required_fields.into(), })); } if let Some(provided_fields) = provides { join_field_directive.arguments.push(Node::new(Argument { name: name!("provides"), - value: Node::new(Value::String(NodeStr::new(provided_fields))), + value: provided_fields.into(), })); } if external { join_field_directive.arguments.push(Node::new(Argument { name: name!("external"), - value: Node::new(Value::Boolean(external)), + value: external.into(), })); } + if let Some((from, label)) = overrides { + join_field_directive.arguments.push(Node::new(Argument { + name: name!("override"), + value: Node::new(Value::String(from.to_string())), + })); + if let Some(label) = label { + join_field_directive.arguments.push(Node::new(Argument { + name: name!("overrideLabel"), + value: Node::new(Value::String(label.to_string())), + })); + } + } join_field_directive } @@ -1296,11 +1358,11 @@ fn join_graph_enum_type( arguments: vec![ (Node::new(Argument { name: name!("name"), - value: Node::new(Value::String(NodeStr::new(s.name.as_str()))), + value: s.name.as_str().into(), })), (Node::new(Argument { name: name!("url"), - value: Node::new(Value::String(NodeStr::new(s.url.as_str()))), + value: s.url.as_str().into(), })), ], }; @@ -1409,4 +1471,40 @@ mod tests { assert_snapshot!(schema.serialize()); } + + #[test] + fn test_basic() { + let one_sdl = include_str!("./sources/connect/expand/merge/basic_1.graphql"); + let two_sdl = include_str!("./sources/connect/expand/merge/basic_2.graphql"); + + let mut subgraphs = ValidFederationSubgraphs::new(); + subgraphs + .add(ValidFederationSubgraph { + name: "basic_1".to_string(), + url: "".to_string(), + schema: ValidFederationSchema::new( + Schema::parse_and_validate(one_sdl, "./basic_1.graphql").unwrap(), + ) + .unwrap(), + }) + .unwrap(); + subgraphs + .add(ValidFederationSubgraph { + name: "basic_2".to_string(), + url: "".to_string(), + schema: ValidFederationSchema::new( + Schema::parse_and_validate(two_sdl, "./basic_2.graphql").unwrap(), + ) + .unwrap(), + }) + .unwrap(); + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); + } } diff --git a/apollo-federation/src/operation/contains.rs b/apollo-federation/src/operation/contains.rs index 9e9ecd2fa3..d947a8faf2 100644 --- a/apollo-federation/src/operation/contains.rs +++ b/apollo-federation/src/operation/contains.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; -use apollo_compiler::ast::Name; use apollo_compiler::executable; +use apollo_compiler::Name; use apollo_compiler::Node; use super::FieldSelection; @@ -276,12 +276,10 @@ impl Selection { impl FieldSelection { pub fn containment(&self, other: &FieldSelection, options: ContainmentOptions) -> Containment { - let self_field = self.field.data(); - let other_field = other.field.data(); - if self_field.name() != other_field.name() - || self_field.alias != other_field.alias - || !same_arguments(&self_field.arguments, &other_field.arguments) - || !same_directives(&self_field.directives, &other_field.directives) + if self.field.name() != other.field.name() + || self.field.alias != other.field.alias + || !same_arguments(&self.field.arguments, &other.field.arguments) + || !same_directives(&self.field.directives, &other.field.directives) { return Containment::NotContained; } diff --git a/apollo-federation/src/operation/mod.rs b/apollo-federation/src/operation/mod.rs index 86226f3769..d9dea7294b 100644 --- a/apollo-federation/src/operation/mod.rs +++ b/apollo-federation/src/operation/mod.rs @@ -24,11 +24,10 @@ use std::sync::Arc; use std::sync::OnceLock; use apollo_compiler::executable; -use apollo_compiler::executable::Name; use apollo_compiler::name; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use indexmap::IndexMap; use indexmap::IndexSet; @@ -40,9 +39,8 @@ use crate::query_plan::conditions::Conditions; use crate::query_plan::FetchDataKeyRenamer; use crate::query_plan::FetchDataPathElement; use crate::query_plan::FetchDataRewrite; -use crate::schema::definitions::is_composite_type; use crate::schema::definitions::types_can_be_merged; -use crate::schema::definitions::AbstractType; +use crate::schema::position::AbstractTypeDefinitionPosition; use crate::schema::position::CompositeTypeDefinitionPosition; use crate::schema::position::FieldDefinitionPosition; use crate::schema::position::InterfaceTypeDefinitionPosition; @@ -52,11 +50,12 @@ use crate::schema::ValidFederationSchema; mod contains; mod optimize; mod rebase; +mod simplify; #[cfg(test)] mod tests; -pub use contains::*; -pub use rebase::*; +pub(crate) use contains::*; +pub(crate) use rebase::*; pub(crate) const TYPENAME_FIELD: Name = name!("__typename"); @@ -97,7 +96,7 @@ pub struct Operation { pub(crate) struct NormalizedDefer { pub operation: Operation, pub has_defers: bool, - pub assigned_defer_labels: HashSet, + pub assigned_defer_labels: HashSet, pub defer_conditions: IndexMap>, } @@ -713,24 +712,24 @@ impl Selection { pub(crate) fn schema(&self) -> &ValidFederationSchema { match self { - Selection::Field(field_selection) => &field_selection.field.data().schema, + Selection::Field(field_selection) => &field_selection.field.schema, Selection::FragmentSpread(fragment_spread_selection) => { - &fragment_spread_selection.spread.data().schema + &fragment_spread_selection.spread.schema } Selection::InlineFragment(inline_fragment_selection) => { - &inline_fragment_selection.inline_fragment.data().schema + &inline_fragment_selection.inline_fragment.schema } } } fn directives(&self) -> &Arc { match self { - Selection::Field(field_selection) => &field_selection.field.data().directives, + Selection::Field(field_selection) => &field_selection.field.directives, Selection::FragmentSpread(fragment_spread_selection) => { - &fragment_spread_selection.spread.data().directives + &fragment_spread_selection.spread.directives } Selection::InlineFragment(inline_fragment_selection) => { - &inline_fragment_selection.inline_fragment.data().directives + &inline_fragment_selection.inline_fragment.directives } } } @@ -785,6 +784,10 @@ impl Selection { } } + fn sub_selection_type_position(&self) -> Option { + Some(self.try_selection_set()?.type_position.clone()) + } + pub(crate) fn conditions(&self) -> Result { let self_conditions = Conditions::from_directives(self.directives())?; if let Conditions::Boolean(false) = self_conditions { @@ -850,33 +853,13 @@ impl Selection { } Selection::FragmentSpread(fragment) => { let current_count = aggregator - .entry(fragment.spread.data().fragment_name.clone()) + .entry(fragment.spread.fragment_name.clone()) .or_default(); *current_count += 1; } } } - fn normalize( - &self, - parent_type: &CompositeTypeDefinitionPosition, - named_fragments: &NamedFragments, - schema: &ValidFederationSchema, - option: NormalizeSelectionOption, - ) -> Result, FederationError> { - match self { - Selection::Field(field) => { - field.normalize(parent_type, named_fragments, schema, option) - } - Selection::FragmentSpread(spread) => { - spread.normalize(parent_type, named_fragments, schema) - } - Selection::InlineFragment(inline) => { - inline.normalize(parent_type, named_fragments, schema, option) - } - } - } - pub(crate) fn with_updated_selection_set( &self, selection_set: Option, @@ -1066,11 +1049,12 @@ mod field_selection { use std::collections::HashSet; use std::hash::Hash; use std::hash::Hasher; + use std::ops::Deref; use std::sync::Arc; use apollo_compiler::ast; use apollo_compiler::executable; - use apollo_compiler::executable::Name; + use apollo_compiler::Name; use apollo_compiler::Node; use crate::error::FederationError; @@ -1183,6 +1167,14 @@ mod field_selection { } } + impl Deref for Field { + type Target = FieldData; + + fn deref(&self) -> &Self::Target { + &self.data + } + } + impl Field { pub(crate) fn new(data: FieldData) -> Self { let mut arguments = data.arguments.as_ref().clone(); @@ -1237,8 +1229,7 @@ mod field_selection { "Field and its selection set should point to the same type position [field position: {}, selection position: {}]", field_type_position, selection_set.type_position, ); debug_assert_eq!( - self.data().schema, - selection_set.schema, + self.schema, selection_set.schema, "Field and its selection set should point to the same schema", ); } else { @@ -1292,17 +1283,17 @@ mod field_selection { } pub(crate) fn as_path_element(&self) -> FetchDataPathElement { - FetchDataPathElement::Key(self.data().response_name().into()) + FetchDataPathElement::Key(self.response_name()) } pub(crate) fn collect_variables<'selection>( &'selection self, variables: &mut HashSet<&'selection Name>, ) { - for arg in self.data().arguments.iter() { + for arg in self.arguments.iter() { collect_variables_from_argument(arg, variables) } - for dir in self.data().directives.iter() { + for dir in self.directives.iter() { collect_variables_from_directive(dir, variables) } } @@ -1421,10 +1412,11 @@ pub(crate) use field_selection::FieldSelection; pub(crate) use field_selection::SiblingTypename; mod fragment_spread_selection { + use std::ops::Deref; use std::sync::Arc; use apollo_compiler::executable; - use apollo_compiler::executable::Name; + use apollo_compiler::Name; use crate::operation::is_deferred_selection; use crate::operation::sort_directives; @@ -1470,6 +1462,14 @@ mod fragment_spread_selection { impl Eq for FragmentSpread {} + impl Deref for FragmentSpread { + type Target = FragmentSpreadData; + + fn deref(&self) -> &Self::Target { + &self.data + } + } + impl FragmentSpread { pub(crate) fn new(data: FragmentSpreadData) -> Self { Self { @@ -1478,13 +1478,13 @@ mod fragment_spread_selection { } } - pub(super) fn directives_mut(&mut self) -> &mut Arc { - &mut self.data.directives - } - pub(crate) fn data(&self) -> &FragmentSpreadData { &self.data } + + pub(super) fn directives_mut(&mut self) -> &mut Arc { + &mut self.data.directives + } } impl HasSelectionKey for FragmentSpread { @@ -1533,7 +1533,7 @@ pub(crate) use fragment_spread_selection::FragmentSpreadSelection; impl FragmentSpreadSelection { pub(crate) fn has_defer(&self) -> bool { - self.spread.data().directives.has("defer") || self.selection_set.has_defer() + self.spread.directives.has("defer") || self.selection_set.has_defer() } /// Copies fragment spread selection and assigns it a new unique selection ID. @@ -1579,65 +1579,28 @@ impl FragmentSpreadSelection { fragment_spread: FragmentSpread, named_fragments: &NamedFragments, ) -> Result { - let fragment_name = &fragment_spread.data().fragment_name; + let fragment_name = &fragment_spread.fragment_name; let fragment = named_fragments.get(fragment_name).ok_or_else(|| { FederationError::internal(format!("Fragment {} not found", fragment_name)) })?; - debug_assert_eq!(fragment_spread.data().schema, fragment.schema); + debug_assert_eq!(fragment_spread.schema, fragment.schema); Ok(Self { spread: fragment_spread, selection_set: fragment.selection_set.clone(), }) } - fn normalize( - &self, - parent_type: &CompositeTypeDefinitionPosition, - named_fragments: &NamedFragments, - schema: &ValidFederationSchema, - ) -> Result, FederationError> { - let this_condition = self.spread.data().type_condition_position.clone(); - // This method assumes by contract that `parent_type` runtimes intersects `self.inline_fragment.data().parent_type_position`'s, - // but `parent_type` runtimes may be a subset. So first check if the selection should not be discarded on that account (that - // is, we should not keep the selection if its condition runtimes don't intersect at all with those of - // `parent_type` as that would ultimately make an invalid selection set). - if (self.spread.data().schema != *schema || this_condition != *parent_type) - && !runtime_types_intersect(&this_condition, parent_type, schema) - { - return Ok(None); - } - - // We must update the spread parent type if necessary since we're not going deeper, - // or we'll be fundamentally losing context. - if self.spread.data().schema != *schema { - return Err(FederationError::internal( - "Should not try to normalize using a type from another schema", - )); - } - - if let Some(rebased_fragment_spread) = self.rebase_on( - parent_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )? { - Ok(Some(SelectionOrSet::Selection(rebased_fragment_spread))) - } else { - unreachable!("We should always be able to either rebase the fragment spread OR throw an exception"); - } - } - pub(crate) fn any_element( &self, parent_type_position: CompositeTypeDefinitionPosition, predicate: &mut impl FnMut(OpPathElement) -> Result, ) -> Result { let inline_fragment = InlineFragment::new(InlineFragmentData { - schema: self.spread.data().schema.clone(), + schema: self.spread.schema.clone(), parent_type_position, - type_condition_position: Some(self.spread.data().type_condition_position.clone()), - directives: self.spread.data().directives.clone(), - selection_id: self.spread.data().selection_id.clone(), + type_condition_position: Some(self.spread.type_condition_position.clone()), + directives: self.spread.directives.clone(), + selection_id: self.spread.selection_id.clone(), }); if predicate(inline_fragment.into())? { return Ok(true); @@ -1651,11 +1614,11 @@ impl FragmentSpreadSelection { callback: &mut impl FnMut(OpPathElement) -> Result<(), FederationError>, ) -> Result<(), FederationError> { let inline_fragment = InlineFragment::new(InlineFragmentData { - schema: self.spread.data().schema.clone(), + schema: self.spread.schema.clone(), parent_type_position, - type_condition_position: Some(self.spread.data().type_condition_position.clone()), - directives: self.spread.data().directives.clone(), - selection_id: self.spread.data().selection_id.clone(), + type_condition_position: Some(self.spread.type_condition_position.clone()), + directives: self.spread.directives.clone(), + selection_id: self.spread.selection_id.clone(), }); callback(inline_fragment.into())?; self.selection_set.for_each_element(callback) @@ -1682,10 +1645,11 @@ mod inline_fragment_selection { use std::collections::HashSet; use std::hash::Hash; use std::hash::Hasher; + use std::ops::Deref; use std::sync::Arc; use apollo_compiler::executable; - use apollo_compiler::executable::Name; + use apollo_compiler::Name; use super::field_selection::collect_variables_from_directive; use crate::error::FederationError; @@ -1776,6 +1740,14 @@ mod inline_fragment_selection { } } + impl Deref for InlineFragment { + type Target = InlineFragmentData; + + fn deref(&self) -> &Self::Target { + &self.data + } + } + impl InlineFragment { pub(crate) fn new(data: InlineFragmentData) -> Self { Self { @@ -1814,10 +1786,10 @@ mod inline_fragment_selection { } pub(crate) fn as_path_element(&self) -> Option { - let condition = self.data().type_condition_position.clone()?; + let condition = self.type_condition_position.clone()?; Some(FetchDataPathElement::TypenameEquals( - condition.type_name().clone().into(), + condition.type_name().clone(), )) } @@ -2055,7 +2027,7 @@ impl SelectionSet { return Err(Internal { message: format!( "Field selection key for field \"{}\" references non-field selection", - field.data().field_position, + field.field_position, ), } .into()); @@ -2233,7 +2205,7 @@ impl SelectionSet { return Err(Internal { message: format!( "Field selection key for field \"{}\" references non-field selection", - self_field_selection.field.data().field_position, + self_field_selection.field.field_position, ), }.into()); }; @@ -2249,7 +2221,7 @@ impl SelectionSet { return Err(Internal { message: format!( "Fragment spread selection key for fragment \"{}\" references non-field selection", - self_fragment_spread_selection.spread.data().fragment_name, + self_fragment_spread_selection.spread.fragment_name, ), }.into()); }; @@ -2265,8 +2237,8 @@ impl SelectionSet { return Err(Internal { message: format!( "Inline fragment selection key under parent type \"{}\" {}references non-field selection", - self_inline_fragment_selection.inline_fragment.data().parent_type_position, - self_inline_fragment_selection.inline_fragment.data().type_condition_position.clone() + self_inline_fragment_selection.inline_fragment.parent_type_position, + self_inline_fragment_selection.inline_fragment.type_condition_position.clone() .map_or_else( String::new, |cond| format!("(type condition: {}) ", cond), @@ -2353,11 +2325,11 @@ impl SelectionSet { )) } Selection::FragmentSpread(spread_selection) => { - let fragment_spread_data = spread_selection.spread.data(); // We can hoist/collapse named fragments if their type condition is on the // parent type and they don't have any directives. - if fragment_spread_data.type_condition_position == selection_set.type_position - && fragment_spread_data.directives.is_empty() + if spread_selection.spread.type_condition_position + == selection_set.type_position + && spread_selection.spread.directives.is_empty() { SelectionSet::expand_selection_set( destination, @@ -2431,7 +2403,7 @@ impl SelectionSet { for (key, entry) in mutable_selection_map.iter_mut() { match entry { SelectionValue::Field(mut field_selection) => { - if field_selection.get().field.data().name() == &TYPENAME_FIELD + if field_selection.get().field.name() == &TYPENAME_FIELD && !is_interface_object && typename_field_key.is_none() { @@ -2455,7 +2427,7 @@ impl SelectionSet { return Err(FederationError::internal( format!( "Error while optimizing sibling typename information, selection set contains {} named fragment", - fragment_spread.get().spread.data().fragment_name + fragment_spread.get().spread.fragment_name ) )); } @@ -2474,7 +2446,7 @@ impl SelectionSet { ) { // Note that as we tag the element, we also record the alias used if any since that // needs to be preserved. - let sibling_typename = match &typename_field.field.data().alias { + let sibling_typename = match &typename_field.field.alias { None => SiblingTypename::Unaliased, Some(alias) => SiblingTypename::Aliased(alias.clone()), }; @@ -2563,20 +2535,12 @@ impl SelectionSet { let Some(second) = iter.next() else { // Optimize for the simple case of a single selection, as we don't have to do anything // complex to merge the sub-selections. - return first - .rebase_on( - parent_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )? - .ok_or_else(|| FederationError::internal("Unable to rebase selection updates")); + return first.rebase_on(parent_type, named_fragments, schema); }; - let element = - first - .operation_element()? - .rebase_on_or_error(parent_type, schema, named_fragments)?; + let element = first + .operation_element()? + .rebase_on(parent_type, schema, named_fragments)?; let sub_selection_parent_type: Option = element.sub_selection_type_position()?; @@ -2722,7 +2686,7 @@ impl SelectionSet { pub(crate) fn add_typename_field_for_abstract_types( &self, - parent_type_if_abstract: Option, + parent_type_if_abstract: Option, ) -> Result { let mut selection_map = SelectionMap::new(); if let Some(parent) = parent_type_if_abstract { @@ -2736,7 +2700,9 @@ impl SelectionSet { } for selection in self.selections.values() { selection_map.insert(if let Some(selection_set) = selection.selection_set()? { - let type_if_abstract = subselection_type_if_abstract(selection)?; + let type_if_abstract = selection + .sub_selection_type_position() + .and_then(|ty| ty.try_into().ok()); let updated_selection_set = selection_set.add_typename_field_for_abstract_types(type_if_abstract)?; @@ -2833,12 +2799,8 @@ impl SelectionSet { selection_set: &SelectionSet, named_fragments: &NamedFragments, ) -> Result<(), FederationError> { - let rebased = selection_set.rebase_on( - &self.type_position, - named_fragments, - &self.schema, - RebaseErrorHandlingOption::ThrowError, - )?; + let rebased = + selection_set.rebase_on(&self.type_position, named_fragments, &self.schema)?; self.add_local_selection_set(&rebased) } @@ -2876,7 +2838,7 @@ impl SelectionSet { match path.split_first() { // If we have a sub-path, recurse. Some((ele, path @ &[_, ..])) => { - let element = ele.rebase_on_or_error(&self.type_position, &self.schema)?; + let element = ele.rebase_on(&self.type_position, &self.schema)?; let Some(sub_selection_type) = element.sub_selection_type_position()? else { return Err(FederationError::internal("unexpected error: add_at_path encountered a field that is not of a composite type".to_string())); }; @@ -2909,7 +2871,7 @@ impl SelectionSet { // turn the path and selection set into a selection. Because we are mutating things // in-place, we eagerly construct the selection that needs to be rebased on the target // schema. - let element = ele.rebase_on_or_error(&self.type_position, &self.schema)?; + let element = ele.rebase_on(&self.type_position, &self.schema)?; if selection_set.is_none() || selection_set.is_some_and(|s| s.is_empty()) { // This is a somewhat common case when dealing with `@key` "conditions" that we can // end up with trying to add empty sub selection set on a non-leaf node. There is @@ -2932,7 +2894,6 @@ impl SelectionSet { })?, &NamedFragments::default(), &self.schema, - RebaseErrorHandlingOption::ThrowError, ) }) .transpose()? @@ -2957,113 +2918,6 @@ impl SelectionSet { .for_each(|(_, s)| s.collect_used_fragment_names(aggregator)); } - /// Applies some normalization rules to this selection set in the context of the provided `parent_type`. - /// - /// Normalization mostly removes unnecessary/redundant inline fragments, so that for instance, with a schema: - /// ```graphql - /// type Query { - /// t1: T1 - /// i: I - /// } - /// - /// interface I { - /// id: ID! - /// } - /// - /// type T1 implements I { - /// id: ID! - /// v1: Int - /// } - /// - /// type T2 implements I { - /// id: ID! - /// v2: Int - /// } - /// ``` - /// We can perform following normalization - /// ```graphql - /// normalize({ - /// t1 { - /// ... on I { - /// id - /// } - /// } - /// i { - /// ... on T1 { - /// ... on I { - /// ... on T1 { - /// v1 - /// } - /// ... on T2 { - /// v2 - /// } - /// } - /// } - /// ... on T2 { - /// ... on I { - /// id - /// } - /// } - /// } - /// }) === { - /// t1 { - /// id - /// } - /// i { - /// ... on T1 { - /// v1 - /// } - /// ... on T2 { - /// id - /// } - /// } - /// } - /// ``` - /// - /// For this operation to be valid (to not throw), `parent_type` must be such that every field selection in - /// this selection set is such that its type position intersects with passed `parent_type` (there is no limitation - /// on the fragment selections, though any fragment selections whose condition do not intersects `parent_type` - /// will be discarded). Note that `self.normalize(self.type_condition)` is always valid and useful, but it is - /// also possible to pass a `parent_type` that is more "restrictive" than the selection current type position - /// (as long as the top-level fields of this selection set can be rebased on that type). - /// - /// Passing the option `recursive == false` makes the normalization only apply at the top-level, removing - /// any unnecessary top-level inline fragments, possibly multiple layers of them, but we never recurse - /// inside the sub-selection of an selection that is not removed by the normalization. - // PORT_NOTE: this is now module-private, because it looks like it *can* be. If some place - // outside this module *does* need it, feel free to mark it pub(crate). - fn normalize( - &self, - parent_type: &CompositeTypeDefinitionPosition, - named_fragments: &NamedFragments, - schema: &ValidFederationSchema, - option: NormalizeSelectionOption, - ) -> Result { - let mut normalized_selections = Self { - schema: schema.clone(), - type_position: parent_type.clone(), - selections: Default::default(), // start empty - }; - for (_, selection) in self.selections.iter() { - if let Some(selection_or_set) = - selection.normalize(parent_type, named_fragments, schema, option)? - { - match selection_or_set { - SelectionOrSet::Selection(normalized_selection) => { - normalized_selections.add_local_selection(&normalized_selection)?; - } - SelectionOrSet::SelectionSet(normalized_set) => { - // Since the `selection` has been expanded/lifted, we use - // `add_selection_set_with_fragments` to make sure it's rebased. - normalized_selections - .add_selection_set_with_fragments(&normalized_set, named_fragments)?; - } - } - } - } - Ok(normalized_selections) - } - /// Removes the @defer directive from all selections without removing that selection. fn without_defer(&mut self) { for (_key, mut selection) in Arc::make_mut(&mut self.selections).iter_mut() { @@ -3102,7 +2956,7 @@ impl SelectionSet { response_name, alias, }| { - path.push(FetchDataPathElement::Key(alias.into())); + path.push(FetchDataPathElement::Key(alias)); Arc::new(FetchDataRewrite::KeyRenamer(FetchDataKeyRenamer { path, rename_key_to: response_name, @@ -3193,8 +3047,18 @@ impl SelectionSet { }) } - // - `self.selections` must be fragment-spread-free. - pub(crate) fn fields_in_set(&self) -> Vec { + /// In a normalized selection set containing only fields and inline fragments, + /// iterate over all the fields that may be selected. + /// + /// # Preconditions + /// The selection set must not contain named fragment spreads. + pub(crate) fn field_selections(&self) -> FieldSelectionsIter<'_> { + FieldSelectionsIter::new(self.selections.values()) + } + + /// # Preconditions + /// The selection set must not contain named fragment spreads. + fn fields_in_set(&self) -> Vec { let mut fields = Vec::new(); for (_key, selection) in self.selections.iter() { @@ -3212,12 +3076,11 @@ impl SelectionSet { Selection::InlineFragment(inline_fragment) => { let condition = inline_fragment .inline_fragment - .data() .type_condition_position .as_ref(); let header = match condition { Some(cond) => vec![FetchDataPathElement::TypenameEquals( - cond.type_name().clone().into(), + cond.type_name().clone(), )], None => vec![], }; @@ -3351,6 +3214,36 @@ impl IntoIterator for SelectionSet { } } +pub(crate) struct FieldSelectionsIter<'sel> { + stack: Vec>, +} + +impl<'sel> FieldSelectionsIter<'sel> { + fn new(iter: indexmap::map::Values<'sel, SelectionKey, Selection>) -> Self { + Self { stack: vec![iter] } + } +} + +impl<'sel> Iterator for FieldSelectionsIter<'sel> { + type Item = &'sel Arc; + + fn next(&mut self) -> Option { + match self.stack.last_mut()?.next() { + None if self.stack.len() == 1 => None, + None => { + self.stack.pop(); + self.next() + } + Some(Selection::Field(field)) => Some(field), + Some(Selection::InlineFragment(frag)) => { + self.stack.push(frag.selection_set.selections.values()); + self.next() + } + Some(Selection::FragmentSpread(_frag)) => unreachable!(), + } + } +} + #[derive(Clone, Debug)] pub(crate) struct SelectionSetAtPath { path: Vec, @@ -3359,7 +3252,7 @@ pub(crate) struct SelectionSetAtPath { pub(crate) struct FieldToAlias { path: Vec, - response_name: NodeStr, + response_name: Name, alias: Name, } @@ -3411,19 +3304,19 @@ fn compute_aliases_for_non_merging_fields( for FieldInPath { mut path, field } in selections.iter().flat_map(rebased_fields_in_set) { let field_schema = field.field.schema().schema(); - let field_data = field.field.data(); - let field_name = field_data.name(); - let response_name = field_data.response_name(); - let field_type = &field_data.field_position.get(field_schema)?.ty; + let field_name = field.field.name(); + let response_name = field.field.response_name(); + let field_type = &field.field.field_position.get(field_schema)?.ty; match seen_response_names.get(&response_name) { Some(previous) => { if &previous.field_name == field_name && types_can_be_merged(&previous.field_type, field_type, schema.schema())? { + let output_type = schema.get_type(field_type.inner_named_type().clone())?; // If the type is non-composite, then we're all set. But if it is composite, we need to record the sub-selection to that response name // as we need to "recurse" on the merged of both the previous and this new field. - if is_composite_type(field_type.inner_named_type(), schema.schema())? { + if output_type.is_composite_type() { match &previous.selections { None => { return Err(SingleFederationError::Internal { @@ -3437,7 +3330,7 @@ fn compute_aliases_for_non_merging_fields( Some(s) => { let mut selections = s.clone(); let mut p = path.clone(); - p.push(FetchDataPathElement::Key(response_name.clone().into())); + p.push(FetchDataPathElement::Key(response_name.clone())); selections.push(SelectionSetAtPath { path: p, selections: field.selection_set.clone(), @@ -3463,7 +3356,7 @@ fn compute_aliases_for_non_merging_fields( let selections = match field.selection_set.as_ref() { Some(s) => { let mut p = path.clone(); - p.push(FetchDataPathElement::Key(alias.clone().into())); + p.push(FetchDataPathElement::Key(alias.clone())); Some(vec![SelectionSetAtPath { path: p, selections: Some(s.clone()), @@ -3485,7 +3378,7 @@ fn compute_aliases_for_non_merging_fields( alias_collector.push(FieldToAlias { path, - response_name: response_name.into(), + response_name, alias, }) } @@ -3494,7 +3387,7 @@ fn compute_aliases_for_non_merging_fields( let selections: Option> = match field.selection_set.as_ref() { Some(s) => { - path.push(FetchDataPathElement::Key(response_name.clone().into())); + path.push(FetchDataPathElement::Key(response_name.clone())); Some(vec![SelectionSetAtPath { path, selections: Some(s.clone()), @@ -3526,7 +3419,7 @@ fn compute_aliases_for_non_merging_fields( fn gen_alias_name(base_name: &Name, unavailable_names: &HashMap) -> Name { let mut counter = 0usize; loop { - if let Ok(name) = Name::try_from(NodeStr::new(&format!("{base_name}__alias_{counter}"))) { + if let Ok(name) = Name::try_from(format!("{base_name}__alias_{counter}")) { if !unavailable_names.contains_key(&name) { return name; } @@ -3535,21 +3428,6 @@ fn gen_alias_name(base_name: &Name, unavailable_names: &HashMap Result, FederationError> { - let Some(sub_selection_type) = selection.element()?.sub_selection_type_position()? else { - return Ok(None); - }; - match sub_selection_type { - CompositeTypeDefinitionPosition::Interface(interface_type) => { - Ok(Some(interface_type.into())) - } - CompositeTypeDefinitionPosition::Union(union_type) => Ok(Some(union_type.into())), - CompositeTypeDefinitionPosition::Object(_) => Ok(None), - } -} - impl FieldData { fn with_updated_position( &self, @@ -3589,8 +3467,10 @@ impl FieldSelection { // Operation creation and the creation of the ValidFederationSchema, it's safer to just // confirm it exists in this schema. field_position.get(schema.schema())?; - let field_composite_type_result: Result = - schema.get_type(field.selection_set.ty.clone())?.try_into(); + let is_composite = CompositeTypeDefinitionPosition::try_from( + schema.get_type(field.selection_set.ty.clone())?, + ) + .is_ok(); Ok(Some(FieldSelection { field: Field::new(FieldData { @@ -3601,7 +3481,7 @@ impl FieldSelection { directives: Arc::new(field.directives.clone()), sibling_typename: None, }), - selection_set: if field_composite_type_result.is_ok() { + selection_set: if is_composite { Some(SelectionSet::from_selection_set( &field.selection_set, fragments, @@ -3620,90 +3500,6 @@ impl FieldSelection { } } - fn normalize( - &self, - parent_type: &CompositeTypeDefinitionPosition, - named_fragments: &NamedFragments, - schema: &ValidFederationSchema, - option: NormalizeSelectionOption, - ) -> Result, FederationError> { - let field_position = - if self.field.schema() == schema && self.field.parent_type_position() == *parent_type { - self.field.data().field_position.clone() - } else { - parent_type.field(self.field.data().name().clone())? - }; - - let field_element = if self.field.schema() == schema - && self.field.data().field_position == field_position - { - self.field.data().clone() - } else { - self.field - .data() - .with_updated_position(schema.clone(), field_position) - }; - - if let Some(selection_set) = &self.selection_set { - let field_composite_type_position: CompositeTypeDefinitionPosition = - field_element.output_base_type()?.try_into()?; - let mut normalized_selection: SelectionSet = - if NormalizeSelectionOption::NormalizeRecursively == option { - selection_set.normalize( - &field_composite_type_position, - named_fragments, - schema, - option, - )? - } else { - selection_set.clone() - }; - - let mut selection = self.with_updated_element(field_element); - if normalized_selection.is_empty() { - // In rare cases, it's possible that everything in the sub-selection was trimmed away and so the - // sub-selection is empty. Which suggest something may be wrong with this part of the query - // intent, but the query was valid while keeping an empty sub-selection isn't. So in that - // case, we just add some "non-included" __typename field just to keep the query valid. - let directives = - executable::DirectiveList(vec![Node::new(executable::Directive { - name: name!("include"), - arguments: vec![Node::new(executable::Argument { - name: name!("if"), - value: Node::new(executable::Value::Boolean(false)), - })], - })]); - let non_included_typename = Selection::from_field( - Field::new(FieldData { - schema: schema.clone(), - field_position: field_composite_type_position - .introspection_typename_field(), - alias: None, - arguments: Arc::new(vec![]), - directives: Arc::new(directives), - sibling_typename: None, - }), - None, - ); - let mut typename_selection = SelectionMap::new(); - typename_selection.insert(non_included_typename); - - normalized_selection.selections = Arc::new(typename_selection); - selection.selection_set = Some(normalized_selection); - } else { - selection.selection_set = Some(normalized_selection); - } - Ok(Some(SelectionOrSet::Selection(Selection::from(selection)))) - } else { - // JS PORT NOTE: In JS implementation field selection stores field definition information, - // in RS version we only store the field position reference so we don't need to update the - // underlying elements - Ok(Some(SelectionOrSet::Selection(Selection::from( - self.with_updated_element(field_element), - )))) - } - } - pub(crate) fn has_defer(&self) -> bool { self.field.has_defer() || self.selection_set.as_ref().is_some_and(|s| s.has_defer()) } @@ -3746,18 +3542,18 @@ impl<'a> FieldSelectionValue<'a> { let mut selection_sets = vec![]; for other in others { let other_field = &other.field; - if other_field.data().schema != self_field.data().schema { + if other_field.schema != self_field.schema { return Err(Internal { message: "Cannot merge field selections from different schemas".to_owned(), } .into()); } - if other_field.data().field_position != self_field.data().field_position { + if other_field.field_position != self_field.field_position { return Err(Internal { message: format!( "Cannot merge field selection for field \"{}\" into a field selection for field \"{}\"", - other_field.data().field_position, - self_field.data().field_position, + other_field.field_position, + self_field.field_position, ), }.into()); } @@ -3766,7 +3562,7 @@ impl<'a> FieldSelectionValue<'a> { return Err(Internal { message: format!( "Field \"{}\" has composite type but not a selection set", - other_field.data().field_position, + other_field.field_position, ), } .into()); @@ -3776,7 +3572,7 @@ impl<'a> FieldSelectionValue<'a> { return Err(Internal { message: format!( "Field \"{}\" has non-composite type but also has a selection set", - other_field.data().field_position, + other_field.field_position, ), } .into()); @@ -3796,12 +3592,12 @@ impl Field { } pub(crate) fn parent_type_position(&self) -> CompositeTypeDefinitionPosition { - self.data().field_position.parent() + self.field_position.parent() } pub(crate) fn types_can_be_merged(&self, other: &Self) -> Result { - let self_definition = self.data().field_position.get(self.schema().schema())?; - let other_definition = other.data().field_position.get(self.schema().schema())?; + let self_definition = self.field_position.get(self.schema().schema())?; + let other_definition = other.field_position.get(self.schema().schema())?; types_can_be_merged( &self_definition.ty, &other_definition.ty, @@ -3820,7 +3616,7 @@ impl<'a> FragmentSpreadSelectionValue<'a> { let self_fragment_spread = &self.get().spread; for other in others { let other_fragment_spread = &other.spread; - if other_fragment_spread.data().schema != self_fragment_spread.data().schema { + if other_fragment_spread.schema != self_fragment_spread.schema { return Err(Internal { message: "Cannot merge fragment spread from different schemas".to_owned(), } @@ -3839,13 +3635,12 @@ impl<'a> FragmentSpreadSelectionValue<'a> { impl InlineFragmentSelection { pub(crate) fn new(inline_fragment: InlineFragment, selection_set: SelectionSet) -> Self { debug_assert_eq!( - inline_fragment.data().casted_type(), + inline_fragment.casted_type(), selection_set.type_position, "Inline fragment type condition and its selection set should point to the same type position", ); debug_assert_eq!( - inline_fragment.data().schema, - selection_set.schema, + inline_fragment.schema, selection_set.schema, "Inline fragment and its selection set should point to the same schema", ); Self { @@ -3902,15 +3697,19 @@ impl InlineFragmentSelection { parent_type_position: CompositeTypeDefinitionPosition, fragment_spread_selection: &Arc, ) -> Result { - let fragment_spread_data = fragment_spread_selection.spread.data(); - // Note: We assume that fragment_spread_data.type_condition_position is the same as + // Note: We assume that fragment_spread_selection.spread.type_condition_position is the same as // fragment_spread_selection.selection_set.type_position. Ok(InlineFragmentSelection::new( InlineFragment::new(InlineFragmentData { - schema: fragment_spread_data.schema.clone(), + schema: fragment_spread_selection.spread.schema.clone(), parent_type_position, - type_condition_position: Some(fragment_spread_data.type_condition_position.clone()), - directives: fragment_spread_data.directives.clone(), + type_condition_position: Some( + fragment_spread_selection + .spread + .type_condition_position + .clone(), + ), + directives: fragment_spread_selection.spread.directives.clone(), selection_id: SelectionId::new(), }), fragment_spread_selection @@ -3936,288 +3735,15 @@ impl InlineFragmentSelection { InlineFragmentSelection::new(InlineFragment::new(inline_fragment_data), selection_set) } - fn normalize( - &self, - parent_type: &CompositeTypeDefinitionPosition, - named_fragments: &NamedFragments, - schema: &ValidFederationSchema, - option: NormalizeSelectionOption, - ) -> Result, FederationError> { - let this_condition = self.inline_fragment.data().type_condition_position.clone(); - // This method assumes by contract that `parent_type` runtimes intersects `self.inline_fragment.data().parent_type_position`'s, - // but `parent_type` runtimes may be a subset. So first check if the selection should not be discarded on that account (that - // is, we should not keep the selection if its condition runtimes don't intersect at all with those of - // `parent_type` as that would ultimately make an invalid selection set). - if let Some(ref type_condition) = this_condition { - if (self.inline_fragment.data().schema != *schema - || self.inline_fragment.data().parent_type_position != *parent_type) - && !runtime_types_intersect(type_condition, parent_type, schema) - { - return Ok(None); - } - } - - // We know the condition is "valid", but it may not be useful. That said, if the condition has directives, - // we preserve the fragment no matter what. - if self.inline_fragment.data().directives.is_empty() { - // There is a number of cases where a fragment is not useful: - // 1. if there is no type condition (remember it also has no directives). - // 2. if it's the same type as the current type: it's not restricting types further. - // 3. if the current type is an object more generally: because in that case the condition - // cannot be restricting things further (it's typically a less precise interface/union). - let useless_fragment = match this_condition { - None => true, - Some(ref c) => self.inline_fragment.data().schema == *schema && c == parent_type, - }; - if useless_fragment || parent_type.is_object_type() { - // Try to skip this fragment and normalize self.selection_set with `parent_type`, - // instead of its original type. - let normalized_selection_set = - self.selection_set - .normalize(parent_type, named_fragments, schema, option)?; - return if normalized_selection_set.is_empty() { - Ok(None) - } else { - // We need to rebase since the parent type for the selection set could be - // changed. - // Note: Rebasing after normalization, since rebasing before that can error out. - // Or, `normalize` could `rebase` at the same time. - let normalized_selection_set = if useless_fragment { - normalized_selection_set.clone() - } else { - normalized_selection_set.rebase_on( - parent_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )? - }; - Ok(Some(SelectionOrSet::SelectionSet(normalized_selection_set))) - }; - } - } - - // We preserve the current fragment, so we only recurse within the sub-selection if we're asked to be recursive. - // (note that even if we're not recursive, we may still have some "lifting" to do) - // Note: This normalized_selection_set is not rebased here yet. It will be rebased later as necessary. - let normalized_selection_set = if NormalizeSelectionOption::NormalizeRecursively == option { - let normalized = self.selection_set.normalize( - &self.selection_set.type_position, - named_fragments, - &self.selection_set.schema, - option, - )?; - // It could be that nothing was satisfiable. - if normalized.is_empty() { - if self.inline_fragment.data().directives.is_empty() { - return Ok(None); - } else if let Some(rebased_fragment) = self.inline_fragment.rebase_on( - parent_type, - schema, - RebaseErrorHandlingOption::ThrowError, - )? { - // We should be able to rebase, or there is a bug, so error if that is the case. - // If we rebased successfully then we add "non-included" __typename field selection - // just to keep the query valid. - let directives = - executable::DirectiveList(vec![Node::new(executable::Directive { - name: name!("include"), - arguments: vec![Node::new(executable::Argument { - name: name!("if"), - value: Node::new(executable::Value::Boolean(false)), - })], - })]); - let parent_typename_field = if let Some(condition) = this_condition { - condition.introspection_typename_field() - } else { - parent_type.introspection_typename_field() - }; - let typename_field_selection = Selection::from_field( - Field::new(FieldData { - schema: schema.clone(), - field_position: parent_typename_field, - alias: None, - arguments: Arc::new(vec![]), - directives: Arc::new(directives), - sibling_typename: None, - }), - None, - ); - - // Return `... [on ] { __typename @include(if: false) }` - let rebased_casted_type = rebased_fragment.data().casted_type(); - return Ok(Some(SelectionOrSet::Selection( - InlineFragmentSelection::new( - rebased_fragment, - SelectionSet::from_selection( - rebased_casted_type, - typename_field_selection, - ), - ) - .into(), - ))); - } - } - normalized - } else { - self.selection_set.clone() - }; - - // Second, we check if some of the sub-selection fragments can be "lifted" outside of this fragment. This can happen if: - // 1. the current fragment is an abstract type, - // 2. the sub-fragment is an object type, - // 3. the sub-fragment type is a valid runtime of the current type. - if self.inline_fragment.data().directives.is_empty() - && this_condition.is_some_and(|c| c.is_abstract_type()) - { - let mut liftable_selections = SelectionMap::new(); - for (_, selection) in normalized_selection_set.selections.iter() { - match selection { - Selection::FragmentSpread(spread_selection) => { - let type_condition = spread_selection - .spread - .data() - .type_condition_position - .clone(); - if type_condition.is_object_type() - && runtime_types_intersect(parent_type, &type_condition, schema) - { - liftable_selections - .insert(Selection::FragmentSpread(spread_selection.clone())); - } - } - Selection::InlineFragment(inline_fragment_selection) => { - if let Some(type_condition) = inline_fragment_selection - .inline_fragment - .data() - .type_condition_position - .clone() - { - if type_condition.is_object_type() - && runtime_types_intersect(parent_type, &type_condition, schema) - { - liftable_selections.insert(Selection::InlineFragment( - inline_fragment_selection.clone(), - )); - } - }; - } - _ => continue, - } - } - - // If we can lift all selections, then that just mean we can get rid of the current fragment altogether - if liftable_selections.len() == normalized_selection_set.selections.len() { - // Rebasing is necessary since this normalized sub-selection set changed its parent. - let rebased_selection_set = normalized_selection_set.rebase_on( - parent_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )?; - return Ok(Some(SelectionOrSet::SelectionSet(rebased_selection_set))); - } - - // Otherwise, if there are "liftable" selections, we must return a set comprised of those lifted selection, - // and the current fragment _without_ those lifted selections. - if liftable_selections.len() > 0 { - // Converting `... [on T] { }` into - // `{ ... [on T] { } }`. - // PORT_NOTE: It appears that this lifting could be repeatable (meaning lifted - // selection could be broken down further and lifted again), but normalize is not - // applied recursively. This could be worth investigating. - let Some(rebased_inline_fragment) = self.inline_fragment.rebase_on( - parent_type, - schema, - RebaseErrorHandlingOption::ThrowError, - )? - else { - return Err(FederationError::internal( - "Rebase should've thrown an error", - )); - }; - let mut mutable_selections = self.selection_set.selections.clone(); - let final_fragment_selections = Arc::make_mut(&mut mutable_selections); - final_fragment_selections.retain(|k, _| !liftable_selections.contains_key(k)); - let rebased_casted_type = rebased_inline_fragment.data().casted_type(); - let final_inline_fragment: Selection = InlineFragmentSelection::new( - rebased_inline_fragment, - SelectionSet { - schema: schema.clone(), - type_position: rebased_casted_type, - selections: Arc::new(final_fragment_selections.clone()), - }, - ) - .into(); - - // Since liftable_selections are changing their parent, we need to rebase them. - liftable_selections = liftable_selections - .into_iter() - .map(|(_key, sel)| { - sel.rebase_on( - parent_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )? - .ok_or_else(|| { - FederationError::internal("Unable to rebase selection updates") - }) - }) - .collect::>()?; - - let mut final_selection_map = SelectionMap::new(); - final_selection_map.insert(final_inline_fragment); - final_selection_map.extend(liftable_selections); - let final_selections = SelectionSet { - schema: schema.clone(), - type_position: parent_type.clone(), - selections: final_selection_map.into(), - }; - return Ok(Some(SelectionOrSet::SelectionSet(final_selections))); - } - } - - if self.inline_fragment.data().schema == *schema - && self.inline_fragment.data().parent_type_position == *parent_type - && self.selection_set == normalized_selection_set - { - // normalization did not change the fragment - Ok(Some(SelectionOrSet::Selection(Selection::InlineFragment( - Arc::new(self.clone()), - )))) - } else if let Some(rebased_inline_fragment) = self.inline_fragment.rebase_on( - parent_type, - schema, - RebaseErrorHandlingOption::ThrowError, - )? { - let rebased_casted_type = rebased_inline_fragment.data().casted_type(); - let rebased_selection_set = normalized_selection_set.rebase_on( - &rebased_casted_type, - named_fragments, - schema, - RebaseErrorHandlingOption::ThrowError, - )?; - Ok(Some(SelectionOrSet::Selection(Selection::InlineFragment( - Arc::new(InlineFragmentSelection::new( - rebased_inline_fragment, - rebased_selection_set, - )), - )))) - } else { - unreachable!("We should always be able to either rebase the inline fragment OR throw an exception"); - } - } - pub(crate) fn casted_type(&self) -> &CompositeTypeDefinitionPosition { - let data = self.inline_fragment.data(); - data.type_condition_position + self.inline_fragment + .type_condition_position .as_ref() - .unwrap_or(&data.parent_type_position) + .unwrap_or(&self.inline_fragment.parent_type_position) } pub(crate) fn has_defer(&self) -> bool { - self.inline_fragment.data().directives.has("defer") + self.inline_fragment.directives.has("defer") || self .selection_set .selections @@ -4231,9 +3757,8 @@ impl InlineFragmentSelection { /// * it has no applied directives /// * has no type condition OR type condition is same as passed in `maybe_parent` fn is_unnecessary(&self, maybe_parent: &CompositeTypeDefinitionPosition) -> bool { - let inline_fragment = self.inline_fragment.data(); - let inline_fragment_type_condition = inline_fragment.type_condition_position.clone(); - inline_fragment.directives.is_empty() + let inline_fragment_type_condition = self.inline_fragment.type_condition_position.clone(); + self.inline_fragment.directives.is_empty() && (inline_fragment_type_condition.is_none() || inline_fragment_type_condition.is_some_and(|t| t == *maybe_parent)) } @@ -4268,20 +3793,20 @@ impl<'a> InlineFragmentSelectionValue<'a> { let mut selection_sets = vec![]; for other in others { let other_inline_fragment = &other.inline_fragment; - if other_inline_fragment.data().schema != self_inline_fragment.data().schema { + if other_inline_fragment.schema != self_inline_fragment.schema { return Err(Internal { message: "Cannot merge inline fragment from different schemas".to_owned(), } .into()); } - if other_inline_fragment.data().parent_type_position - != self_inline_fragment.data().parent_type_position + if other_inline_fragment.parent_type_position + != self_inline_fragment.parent_type_position { return Err(Internal { message: format!( "Cannot merge inline fragment of parent type \"{}\" into an inline fragment of parent type \"{}\"", - other_inline_fragment.data().parent_type_position, - self_inline_fragment.data().parent_type_position, + other_inline_fragment.parent_type_position, + self_inline_fragment.parent_type_position, ), }.into()); } @@ -4309,13 +3834,6 @@ pub(crate) fn merge_selection_sets( Ok(selection_sets.into_iter().next().unwrap()) } -/// Options for normalizing the selection sets -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub(crate) enum NormalizeSelectionOption { - NormalizeRecursively, - NormalizeSingleSelection, -} - /// This uses internal copy-on-write optimization to make `Clone` cheap. /// However a cloned `NamedFragments` still behaves like a deep copy: /// unlike in JS where we can have multiple references to a mutable map, @@ -4531,7 +4049,7 @@ pub(crate) struct RebasedFragments { pub(crate) original_fragments: NamedFragments, // JS PORT NOTE: In JS implementation values were optional /// Map key: subgraph name - rebased_fragments: Arc>, + rebased_fragments: Arc, NamedFragments>>, } impl RebasedFragments { @@ -4544,7 +4062,7 @@ impl RebasedFragments { pub(crate) fn for_subgraph( &mut self, - subgraph_name: impl Into, + subgraph_name: impl Into>, subgraph_schema: &ValidFederationSchema, ) -> &NamedFragments { Arc::make_mut(&mut self.rebased_fragments) @@ -4636,9 +4154,8 @@ impl TryFrom<&Field> for executable::Field { fn try_from(normalized_field: &Field) -> Result { let definition = normalized_field - .data() .field_position - .get(normalized_field.data().schema.schema())? + .get(normalized_field.schema.schema())? .node .to_owned(); let selection_set = executable::SelectionSet { @@ -4647,10 +4164,10 @@ impl TryFrom<&Field> for executable::Field { }; Ok(Self { definition, - alias: normalized_field.data().alias.to_owned(), - name: normalized_field.data().name().to_owned(), - arguments: normalized_field.data().arguments.deref().to_owned(), - directives: normalized_field.data().directives.deref().to_owned(), + alias: normalized_field.alias.to_owned(), + name: normalized_field.name().to_owned(), + arguments: normalized_field.arguments.deref().to_owned(), + directives: normalized_field.directives.deref().to_owned(), selection_set, }) } @@ -4673,24 +4190,18 @@ impl TryFrom<&InlineFragment> for executable::InlineFragment { fn try_from(normalized_inline_fragment: &InlineFragment) -> Result { let type_condition = normalized_inline_fragment - .data() .type_condition_position .as_ref() .map(|pos| pos.type_name().clone()); let ty = type_condition.clone().unwrap_or_else(|| { normalized_inline_fragment - .data() .parent_type_position .type_name() .clone() }); Ok(Self { type_condition, - directives: normalized_inline_fragment - .data() - .directives - .deref() - .to_owned(), + directives: normalized_inline_fragment.directives.deref().to_owned(), selection_set: executable::SelectionSet { ty, selections: Vec::new(), @@ -4714,12 +4225,8 @@ impl From<&FragmentSpreadSelection> for executable::FragmentSpread { fn from(val: &FragmentSpreadSelection) -> Self { let normalized_fragment_spread = &val.spread; Self { - fragment_name: normalized_fragment_spread.data().fragment_name.to_owned(), - directives: normalized_fragment_spread - .data() - .directives - .deref() - .to_owned(), + fragment_name: normalized_fragment_spread.fragment_name.to_owned(), + directives: normalized_fragment_spread.directives.deref().to_owned(), } } } @@ -4833,7 +4340,7 @@ impl Display for InlineFragment { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { // We can't use the same trick we did with `Field`'s display logic, since // selection sets are non-optional for inline fragment selections. - let data = self.data(); + let data = self; if let Some(type_name) = &data.type_condition_position { f.write_str("... on ")?; f.write_str(type_name.type_name())?; @@ -4846,7 +4353,7 @@ impl Display for InlineFragment { impl Display for FragmentSpread { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let data = self.data(); + let data = self; f.write_str("...")?; f.write_str(&data.fragment_name)?; data.directives.serialize().no_indent().fmt(f) @@ -4883,16 +4390,16 @@ pub(crate) fn normalize_operation( normalized_selection_set = normalized_selection_set.expand_all_fragments()?; // We clear up the fragments since we've expanded all. // Also note that expanding fragment usually generate unnecessary fragments/inefficient - // selections, so it basically always make sense to normalize afterwards. Besides, fragment - // reuse (done by `optimize`) rely on the fact that its input is normalized to work properly, + // selections, so it basically always make sense to flatten afterwards. Besides, fragment + // reuse (done by `optimize`) relies on the fact that its input is normalized to work properly, // so all the more reason to do it here. // PORT_NOTE: This was done in `Operation.expandAllFragments`, but it's moved here. - normalized_selection_set = normalized_selection_set.normalize( + normalized_selection_set = normalized_selection_set.flatten_unnecessary_fragments( &normalized_selection_set.type_position, &named_fragments, schema, - NormalizeSelectionOption::NormalizeRecursively, )?; + remove_introspection(&mut normalized_selection_set); normalized_selection_set.optimize_sibling_typenames(interface_types_with_interface_objects)?; let normalized_operation = Operation { @@ -4907,21 +4414,72 @@ pub(crate) fn normalize_operation( Ok(normalized_operation) } +// PORT_NOTE: This is a port of `withoutIntrospection` from JS version. +fn remove_introspection(selection_set: &mut SelectionSet) { + // Note that, because we only apply this to the top-level selections, we skip all + // introspection, including __typename. In general, we don't want to ignore __typename during + // query plans, but at top-level, we can let the router execution deal with it rather than + // querying some service for that. + + Arc::make_mut(&mut selection_set.selections).retain(|_, selection| { + !matches!(selection, + Selection::Field(field_selection) if + field_selection.field.field_position.is_introspection_typename_field() + ) + }); +} + +/// Check if the runtime types of two composite types intersect. +/// +/// This avoids using `possible_runtime_types` and instead implements fast paths. fn runtime_types_intersect( type1: &CompositeTypeDefinitionPosition, type2: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, ) -> bool { - if type1 == type2 { - return true; - } - - if let (Ok(runtimes_1), Ok(runtimes_2)) = ( - schema.possible_runtime_types(type1.clone()), - schema.possible_runtime_types(type2.clone()), - ) { - return runtimes_1.intersection(&runtimes_2).next().is_some(); + use CompositeTypeDefinitionPosition::*; + match (type1, type2) { + (Object(left), Object(right)) => left == right, + (Object(object), Union(union_)) | (Union(union_), Object(object)) => union_ + .get(schema.schema()) + .is_ok_and(|union_| union_.members.contains(&object.type_name)), + (Object(object), Interface(interface)) | (Interface(interface), Object(object)) => schema + .referencers() + .get_interface_type(&interface.type_name) + .is_ok_and(|referencers| referencers.object_types.contains(object)), + (Union(left), Union(right)) if left == right => true, + (Union(left), Union(right)) => { + match (left.get(schema.schema()), right.get(schema.schema())) { + (Ok(left), Ok(right)) => left.members.intersection(&right.members).next().is_some(), + _ => false, + } + } + (Interface(left), Interface(right)) if left == right => true, + (Interface(left), Interface(right)) => { + let r = schema.referencers(); + match ( + r.get_interface_type(&left.type_name), + r.get_interface_type(&right.type_name), + ) { + (Ok(left), Ok(right)) => left + .object_types + .intersection(&right.object_types) + .next() + .is_some(), + _ => false, + } + } + (Union(union_), Interface(interface)) | (Interface(interface), Union(union_)) => match ( + union_.get(schema.schema()), + schema + .referencers() + .get_interface_type(&interface.type_name), + ) { + (Ok(union_), Ok(referencers)) => referencers + .object_types + .iter() + .any(|implementer| union_.members.contains(&implementer.type_name)), + _ => false, + }, } - - false } diff --git a/apollo-federation/src/operation/optimize.rs b/apollo-federation/src/operation/optimize.rs index 94a2e21835..36cd4a7bbe 100644 --- a/apollo-federation/src/operation/optimize.rs +++ b/apollo-federation/src/operation/optimize.rs @@ -18,7 +18,7 @@ //! conflicts. //! //! ## Matching fragments with selection set -//! `try_optimize_with_fragments` tries to match all applicable fragments one by one. +//! `try_apply_fragments` tries to match all applicable fragments one by one. //! They are expanded into selection sets in order to match against given selection set. //! Set-intersection/-minus/-containment operations are used to narrow down to fewer number of //! fragments that can be used to optimize the selection set. If there is a single fragment that @@ -32,7 +32,7 @@ //! Optimization of named fragment definitions in query documents based on the usage of //! fragments in (optimized) operations. //! -//! ## `optimize` methods (putting everything together) +//! ## `reuse_fragments` methods (putting everything together) //! Recursive optimization of selection and selection sets. use std::collections::HashMap; @@ -41,10 +41,9 @@ use std::ops::Not; use std::sync::Arc; use apollo_compiler::executable; -use apollo_compiler::executable::Name; +use apollo_compiler::Name; use apollo_compiler::Node; -use super::CollectedFieldInSet; use super::Containment; use super::ContainmentOptions; use super::Field; @@ -53,7 +52,6 @@ use super::Fragment; use super::FragmentSpreadSelection; use super::InlineFragmentSelection; use super::NamedFragments; -use super::NormalizeSelectionOption; use super::Operation; use super::Selection; use super::SelectionKey; @@ -78,15 +76,17 @@ impl NamedFragments { let mut result = NamedFragments::default(); // Note: `self.fragments` has insertion order topologically sorted. for fragment in self.fragments.values() { - let expanded_selection_set = fragment.selection_set.expand_all_fragments()?.normalize( - &fragment.type_condition_position, - &Default::default(), - &fragment.schema, - NormalizeSelectionOption::NormalizeRecursively, - )?; + let expanded_selection_set = fragment + .selection_set + .expand_all_fragments()? + .flatten_unnecessary_fragments( + &fragment.type_condition_position, + &Default::default(), + &fragment.schema, + )?; let mut mapped_selection_set = mapper(&expanded_selection_set)?; // `mapped_selection_set` must be fragment-spread-free. - mapped_selection_set.optimize_at_root(&result)?; + mapped_selection_set.reuse_fragments(&result)?; let updated = Fragment { selection_set: mapped_selection_set, schema: fragment.schema.clone(), @@ -128,7 +128,7 @@ impl NamedFragments { // } // } // but that's not ideal because the inner-most `__typename` is already within `InnerX`. And that - // gets in the way to re-adding fragments (the `SelectionSet.optimize` method) because if we start + // gets in the way to re-adding fragments (the `SelectionSet::reuse_fragments` method) because if we start // with: // { // a { @@ -341,19 +341,17 @@ impl Fragment { } impl NamedFragments { - /// Returns a list of fragments that can be applied directly at the given type. - fn get_all_may_apply_directly_at_type( - &self, - ty: &CompositeTypeDefinitionPosition, - ) -> Result>, FederationError> { - self.iter() - .filter_map(|fragment| { - fragment - .can_apply_directly_at_type(ty) - .map(|can_apply| can_apply.then_some(fragment.clone())) - .transpose() - }) - .collect::, _>>() + /// Returns fragments that can be applied directly at the given type. + fn get_all_may_apply_directly_at_type<'a>( + &'a self, + ty: &'a CompositeTypeDefinitionPosition, + ) -> impl Iterator, FederationError>> + 'a { + self.iter().filter_map(|fragment| { + fragment + .can_apply_directly_at_type(ty) + .map(|can_apply| can_apply.then_some(fragment)) + .transpose() + }) } } @@ -369,31 +367,27 @@ struct FieldsConflictValidator { } impl FieldsConflictValidator { - // `selection_set` must be fragment-spread-free. + /// Build a field merging validator for a selection set. + /// + /// # Preconditions + /// The selection set must not contain named fragment spreads. fn from_selection_set(selection_set: &SelectionSet) -> Self { - Self::for_level(&selection_set.fields_in_set()) + Self::for_level(&[selection_set]) } - fn for_level(level: &[CollectedFieldInSet]) -> Self { + fn for_level<'a>(level: &[&'a SelectionSet]) -> Self { // Group `level`'s fields by the response-name/field - let mut at_level: HashMap>>> = - HashMap::new(); - for collected_field in level { - let response_name = collected_field.field().field.data().response_name(); - let at_response_name = at_level.entry(response_name).or_default(); - if let Some(ref field_selection_set) = collected_field.field().selection_set { - at_response_name - .entry(collected_field.field().field.clone()) - .or_default() - .get_or_insert_with(Default::default) - .extend(field_selection_set.fields_in_set()); - } else { - // Note that whether a `FieldSelection` has a sub-selection set or not is entirely - // determined by whether the field type is a composite type or not, so even if - // we've seen a previous version of `field` before, we know it's guaranteed to have - // no selection set here, either. So the `set` below may overwrite a previous - // entry, but it would be a `None` so no harm done. - at_response_name.insert(collected_field.field().field.clone(), None); + let mut at_level: HashMap>> = HashMap::new(); + for selection_set in level { + for field_selection in selection_set.field_selections() { + let response_name = field_selection.field.response_name(); + let at_response_name = at_level.entry(response_name).or_default(); + let entry = at_response_name + .entry(field_selection.field.clone()) + .or_default(); + if let Some(ref field_selection_set) = field_selection.selection_set { + entry.push(field_selection_set); + } } } @@ -402,22 +396,26 @@ impl FieldsConflictValidator { for (response_name, fields) in at_level { let mut at_response_name: HashMap>> = HashMap::new(); - for (field, collected_fields) in fields { - let validator = collected_fields - .map(|collected_fields| Arc::new(Self::for_level(&collected_fields))); - at_response_name.insert(field, validator); + for (field, selection_sets) in fields { + if selection_sets.is_empty() { + at_response_name.insert(field, None); + } else { + let validator = Arc::new(Self::for_level(&selection_sets)); + at_response_name.insert(field, Some(validator)); + } } by_response_name.insert(response_name, at_response_name); } Self { by_response_name } } - fn for_field(&self, field: &Field) -> Vec> { - let Some(by_response_name) = self.by_response_name.get(&field.data().response_name()) - else { - return Vec::new(); - }; - by_response_name.values().flatten().cloned().collect() + fn for_field<'v>(&'v self, field: &Field) -> impl Iterator> + 'v { + self.by_response_name + .get(&field.response_name()) + .into_iter() + .flat_map(|by_response_name| by_response_name.values()) + .flatten() + .cloned() } fn has_same_response_shape( @@ -469,26 +467,26 @@ impl FieldsConflictValidator { if p1 == p2 || !p1.is_object_type() || !p2.is_object_type() { // Additional checks of `FieldsInSetCanMerge` when same parent type or one // isn't object - if self_field.data().name() != other_field.data().name() - || self_field.data().arguments != other_field.data().arguments + if self_field.name() != other_field.name() + || self_field.arguments != other_field.arguments { return Ok(false); } - if let Some(self_validator) = self_validator { - if let Some(other_validator) = other_validator { - if !self_validator.do_merge_with(other_validator)? { - return Ok(false); - } + if let (Some(self_validator), Some(other_validator)) = + (self_validator, other_validator) + { + if !self_validator.do_merge_with(other_validator)? { + return Ok(false); } } } else { // Otherwise, the sub-selection must pass // [SameResponseShape](https://spec.graphql.org/draft/#SameResponseShape()). - if let Some(self_validator) = self_validator { - if let Some(other_validator) = other_validator { - if !self_validator.has_same_response_shape(other_validator)? { - return Ok(false); - } + if let (Some(self_validator), Some(other_validator)) = + (self_validator, other_validator) + { + if !self_validator.has_same_response_shape(other_validator)? { + return Ok(false); } } } @@ -502,7 +500,7 @@ impl FieldsConflictValidator { &self, mut iter: impl Iterator, ) -> Result { - iter.try_fold(true, |acc, v| Ok(acc && self.do_merge_with(v)?)) + iter.try_fold(true, |acc, v| Ok(acc && v.do_merge_with(self)?)) } } @@ -602,6 +600,29 @@ struct FragmentRestrictionAtType { validator: Option>, } +#[derive(Default)] +struct FragmentRestrictionAtTypeCache { + map: HashMap<(Name, CompositeTypeDefinitionPosition), Arc>, +} + +impl FragmentRestrictionAtTypeCache { + fn expanded_selection_set_at_type( + &mut self, + fragment: &Fragment, + ty: &CompositeTypeDefinitionPosition, + ) -> Result, FederationError> { + // I would like to avoid the Arc here, it seems unnecessary, but with `.entry()` + // the lifetime does not really want to work out. + // (&'cache mut self) -> Result<&'cache FragmentRestrictionAtType> + match self.map.entry((fragment.name.clone(), ty.clone())) { + std::collections::hash_map::Entry::Occupied(entry) => Ok(Arc::clone(entry.get())), + std::collections::hash_map::Entry::Vacant(entry) => Ok(Arc::clone( + entry.insert(Arc::new(fragment.expanded_selection_set_at_type(ty)?)), + )), + } + } +} + impl FragmentRestrictionAtType { fn new(selections: SelectionSet, validator: Option) -> Self { Self { @@ -631,7 +652,6 @@ impl FragmentRestrictionAtType { fn is_useless(&self) -> bool { match self.selections.selections.as_slice().split_first() { None => true, - Some((first, rest)) => rest.is_empty() && first.0.is_typename_field(), } } @@ -640,18 +660,15 @@ impl FragmentRestrictionAtType { impl Fragment { /// Computes the expanded selection set of this fragment along with its validator to check /// against other fragments applied under the same selection set. - // PORT_NOTE: The JS version memoizes the result of this function. But, the current Rust port - // does not. fn expanded_selection_set_at_type( &self, ty: &CompositeTypeDefinitionPosition, ) -> Result { let expanded_selection_set = self.selection_set.expand_all_fragments()?; - let normalized_selection_set = expanded_selection_set.normalize( + let normalized_selection_set = expanded_selection_set.flatten_unnecessary_fragments( ty, /*named_fragments*/ &Default::default(), &self.schema, - NormalizeSelectionOption::NormalizeRecursively, )?; if !self.type_condition_position.is_object_type() { @@ -817,7 +834,7 @@ impl SelectionSet { // technically better to return only `F4`. However, this feels niche, and it might be // costly to verify such inclusions, so not doing it for now. fn reduce_applicable_fragments( - applicable_fragments: &mut Vec<(Node, FragmentRestrictionAtType)>, + applicable_fragments: &mut Vec<(Node, Arc)>, ) { // Note: It's not possible for two fragments to include each other. So, we don't need to // worry about inclusion cycles. @@ -834,17 +851,19 @@ impl SelectionSet { applicable_fragments.retain(|(fragment, _)| !included_fragments.contains(&fragment.name)); } - /// Try to optimize the selection set by re-using existing fragments. + /// Try to reuse existing fragments to optimize this selection set. /// Returns either /// - a new selection set partially optimized by re-using given `fragments`, or /// - a single fragment that covers the full selection set. // PORT_NOTE: Moved from `Selection` class in JS code to SelectionSet struct in Rust. // PORT_NOTE: `parent_type` argument seems always to be the same as `self.type_position`. - fn try_optimize_with_fragments( + // PORT_NOTE: In JS, this was called `tryOptimizeWithFragments`. + fn try_apply_fragments( &self, parent_type: &CompositeTypeDefinitionPosition, fragments: &NamedFragments, validator: &mut FieldsConflictMultiBranchValidator, + fragments_at_type: &mut FragmentRestrictionAtTypeCache, full_match_condition: FullMatchingFragmentCondition, ) -> Result { // We limit to fragments whose selection could be applied "directly" at `parent_type`, @@ -854,10 +873,7 @@ impl SelectionSet { // fragment whose type _is_ the fragment condition (at which point, this // `can_apply_directly_at_type` method will apply. Also note that this is because we have // this restriction that calling `expanded_selection_set_at_type` is ok. - let candidates = fragments.get_all_may_apply_directly_at_type(parent_type)?; - if candidates.is_empty() { - return Ok(self.clone().into()); // Not optimizable - } + let candidates = fragments.get_all_may_apply_directly_at_type(parent_type); // First, we check which of the candidates do apply inside the selection set, if any. If we // find a candidate that applies to the whole selection set, then we stop and only return @@ -865,14 +881,12 @@ impl SelectionSet { // that applies to a subset. let mut applicable_fragments = Vec::new(); for candidate in candidates { - let at_type = candidate.expanded_selection_set_at_type(parent_type)?; + let candidate = candidate?; + let at_type = + fragments_at_type.expanded_selection_set_at_type(candidate, parent_type)?; if at_type.is_useless() { continue; } - if !validator.check_can_reuse_fragment_and_track_it(&at_type)? { - // We cannot use it at all, so no point in adding to `applicable_fragments`. - continue; - } // As we check inclusion, we ignore the case where the fragment queries __typename // but the `self` does not. The rational is that querying `__typename` @@ -895,19 +909,25 @@ impl SelectionSet { ignore_missing_typename: true, }, ); - if matches!(res, Containment::NotContained) { - continue; // Not eligible; Skip it. - } - if matches!(res, Containment::Equal) && full_match_condition.check(&candidate) { - // Special case: Found a fragment that covers the full selection set. - return Ok(candidate.into()); - } - // Note that if a fragment applies to only a subset of the sub-selections, then we - // really only can use it if that fragment is defined _without_ directives. - if !candidate.directives.is_empty() { - continue; // Not eligible as a partial selection; Skip it. + match res { + Containment::Equal if full_match_condition.check(candidate) => { + if !validator.check_can_reuse_fragment_and_track_it(&at_type)? { + // We cannot use it at all, so no point in adding to `applicable_fragments`. + continue; + } + // Special case: Found a fragment that covers the full selection set. + return Ok(candidate.clone().into()); + } + // Note that if a fragment applies to only a subset of the sub-selections, then we + // really only can use it if that fragment is defined _without_ directives. + Containment::Equal | Containment::StrictlyContained + if candidate.directives.is_empty() => + { + applicable_fragments.push((candidate.clone(), at_type)); + } + // Not eligible; Skip it. + _ => (), } - applicable_fragments.push((candidate, at_type)); } if applicable_fragments.is_empty() { @@ -922,6 +942,9 @@ impl SelectionSet { let mut not_covered_so_far = self.clone(); let mut optimized = SelectionSet::empty(self.schema.clone(), self.type_position.clone()); for (fragment, at_type) in applicable_fragments { + if !validator.check_can_reuse_fragment_and_track_it(&at_type)? { + continue; + } let not_covered = self.minus(&at_type.selections)?; not_covered_so_far = not_covered_so_far.intersection(¬_covered)?; @@ -936,13 +959,7 @@ impl SelectionSet { } optimized.add_local_selection_set(¬_covered_so_far)?; - Ok(SelectionSet::make_selection_set( - &self.schema, - parent_type, - optimized.selections.values().map(std::iter::once), - fragments, - )? - .into()) + Ok(optimized.into()) } } @@ -960,15 +977,15 @@ impl Selection { ) -> Result { match self { Selection::FragmentSpread(fragment) => { - if fragments_to_keep.contains(&fragment.spread.data().fragment_name) { + if fragments_to_keep.contains(&fragment.spread.fragment_name) { // Keep this spread Ok(self.clone().into()) } else { // Expand the fragment let expanded_sub_selections = fragment.selection_set.retain_fragments(fragments_to_keep)?; - if *parent_type == fragment.spread.data().type_condition_position - && fragment.spread.data().directives.is_empty() + if *parent_type == fragment.spread.type_condition_position + && fragment.spread.directives.is_empty() { // The fragment is of the same type as the parent, so we can just use // the expanded sub-selections directly. @@ -978,7 +995,7 @@ impl Selection { let inline = InlineFragmentSelection::from_selection_set( parent_type.clone(), expanded_sub_selections, - fragment.spread.data().directives.clone(), + fragment.spread.directives.clone(), ); Ok(Selection::from(inline).into()) } @@ -1086,11 +1103,11 @@ impl NamedFragments { // If we've expanded some fragments but kept others, then it's not 100% impossible that // some fragment was used multiple times in some expanded fragment(s), but that // post-expansion all of it's usages are "dead" branches that are removed by the final - // `normalize`. In that case though, we need to ensure we don't include the now-unused + // `flatten_unnecessary_fragments`. In that case though, we need to ensure we don't include the now-unused // fragment in the final list of fragments. // TODO: remark that the same reasoning could leave a single instance of a fragment // usage, so if we really really want to never have less than `minUsagesToOptimize`, we - // could do some loop of `expand then normalize` unless all fragments are provably used + // could do some loop of `expand then flatten` unless all fragments are provably used // enough. We don't bother, because leaving this is not a huge deal and it's not worth // the complexity, but it could be that we can refactor all this later to avoid this // case without additional complexity. @@ -1162,11 +1179,10 @@ impl NamedFragments { Node::make_mut(fragment).selection_set = fragment .selection_set .retain_fragments(&fragments_to_keep)? - .normalize( + .flatten_unnecessary_fragments( &fragment.selection_set.type_position, &fragments_to_keep, &fragment.schema, - NormalizeSelectionOption::NormalizeRecursively, )?; } @@ -1179,13 +1195,12 @@ impl NamedFragments { let reduced_selection_set = selection_set.retain_fragments(self)?; // Expanding fragments could create some "inefficiencies" that we wouldn't have if we - // hadn't re-optimized the fragments to de-optimize it later, so we do a final "normalize" + // hadn't re-optimized the fragments to de-optimize it later, so we do a final "flatten" // pass to remove those. - reduced_selection_set.normalize( + reduced_selection_set.flatten_unnecessary_fragments( &reduced_selection_set.type_position, self, &selection_set.schema, - NormalizeSelectionOption::NormalizeRecursively, ) } @@ -1200,32 +1215,36 @@ impl NamedFragments { } //============================================================================= -// `optimize` methods (putting everything together) +// `reuse_fragments` methods (putting everything together) impl Selection { - fn optimize( + fn reuse_fragments_inner( &self, fragments: &NamedFragments, validator: &mut FieldsConflictMultiBranchValidator, + fragments_at_type: &mut FragmentRestrictionAtTypeCache, ) -> Result { match self { - Selection::Field(field) => Ok(field.optimize(fragments, validator)?.into()), + Selection::Field(field) => Ok(field + .reuse_fragments_inner(fragments, validator, fragments_at_type)? + .into()), Selection::FragmentSpread(_) => Ok(self.clone()), // Do nothing - Selection::InlineFragment(inline_fragment) => { - Ok(inline_fragment.optimize(fragments, validator)?.into()) - } + Selection::InlineFragment(inline_fragment) => Ok(inline_fragment + .reuse_fragments_inner(fragments, validator, fragments_at_type)? + .into()), } } } impl FieldSelection { - fn optimize( + fn reuse_fragments_inner( &self, fragments: &NamedFragments, validator: &mut FieldsConflictMultiBranchValidator, + fragments_at_type: &mut FragmentRestrictionAtTypeCache, ) -> Result { let Some(base_composite_type): Option = - self.field.data().output_base_type()?.try_into().ok() + self.field.output_base_type()?.try_into().ok() else { return Ok(self.clone()); }; @@ -1236,10 +1255,11 @@ impl FieldSelection { let mut field_validator = validator.for_field(&self.field); // First, see if we can reuse fragments for the selection of this field. - let opt = selection_set.try_optimize_with_fragments( + let opt = selection_set.try_apply_fragments( &base_composite_type, fragments, &mut field_validator, + fragments_at_type, FullMatchingFragmentCondition::ForFieldSelection, )?; @@ -1257,45 +1277,48 @@ impl FieldSelection { optimized = selection_set; } } - optimized = optimized.optimize(fragments, &mut field_validator)?; + optimized = + optimized.reuse_fragments_inner(fragments, &mut field_validator, fragments_at_type)?; Ok(self.with_updated_selection_set(Some(optimized))) } } -/// Return type for `InlineFragmentSelection::optimize`. +/// Return type for `InlineFragmentSelection::reuse_fragments`. #[derive(derive_more::From)] -enum InlineOrFragmentSelection { +enum FragmentSelection { // Note: Enum variants are named to match those of `Selection`. InlineFragment(InlineFragmentSelection), FragmentSpread(FragmentSpreadSelection), } -impl From for Selection { - fn from(value: InlineOrFragmentSelection) -> Self { +impl From for Selection { + fn from(value: FragmentSelection) -> Self { match value { - InlineOrFragmentSelection::InlineFragment(inline_fragment) => inline_fragment.into(), - InlineOrFragmentSelection::FragmentSpread(fragment_spread) => fragment_spread.into(), + FragmentSelection::InlineFragment(inline_fragment) => inline_fragment.into(), + FragmentSelection::FragmentSpread(fragment_spread) => fragment_spread.into(), } } } impl InlineFragmentSelection { - fn optimize( + fn reuse_fragments_inner( &self, fragments: &NamedFragments, validator: &mut FieldsConflictMultiBranchValidator, - ) -> Result { + fragments_at_type: &mut FragmentRestrictionAtTypeCache, + ) -> Result { let mut optimized = self.selection_set.clone(); - let type_condition_position = &self.inline_fragment.data().type_condition_position; + let type_condition_position = &self.inline_fragment.type_condition_position; if let Some(type_condition_position) = type_condition_position { - let opt = self.selection_set.try_optimize_with_fragments( + let opt = self.selection_set.try_apply_fragments( type_condition_position, fragments, validator, + fragments_at_type, FullMatchingFragmentCondition::ForInlineFragmentSelection { type_condition_position, - directives: &self.inline_fragment.data().directives, + directives: &self.inline_fragment.directives, }, )?; @@ -1316,7 +1339,6 @@ impl InlineFragmentSelection { // is handled differently in Rust version (see `FragmentSpreadData`). let directives: executable::DirectiveList = self .inline_fragment - .data() .directives .iter() .filter(|d1| !fragment.directives.iter().any(|d2| *d1 == d2)) @@ -1347,36 +1369,48 @@ impl InlineFragmentSelection { } // Then, recurse inside the field sub-selection (note that if we matched some fragments - // above, this recursion will "ignore" those as `FragmentSpreadSelection`'s `optimize()` is - // a no-op). - optimized = optimized.optimize(fragments, validator)?; + // above, this recursion will "ignore" those as `FragmentSpreadSelection`'s + // `reuse_fragments()` is a no-op). + optimized = optimized.reuse_fragments_inner(fragments, validator, fragments_at_type)?; Ok(InlineFragmentSelection::new(self.inline_fragment.clone(), optimized).into()) } } impl SelectionSet { - /// Recursively call `optimize` on each selection in the selection set. - fn optimize( + fn reuse_fragments_inner( &self, fragments: &NamedFragments, validator: &mut FieldsConflictMultiBranchValidator, + fragments_at_type: &mut FragmentRestrictionAtTypeCache, ) -> Result { self.lazy_map(fragments, |selection| { - Ok(vec![selection.optimize(fragments, validator)?].into()) + Ok(selection + .reuse_fragments_inner(fragments, validator, fragments_at_type)? + .into()) }) } - // Specialized version of `optimize` for top-level sub-selections under Operation - // or Fragment. - // - `self` must be fragment-spread-free. - pub(crate) fn optimize_at_root( - &mut self, - fragments: &NamedFragments, - ) -> Result<(), FederationError> { + fn contains_fragment_spread(&self) -> bool { + self.iter().any(|selection| { + matches!(selection, Selection::FragmentSpread(_)) + || selection + .try_selection_set() + .map(|subselection| subselection.contains_fragment_spread()) + .unwrap_or(false) + }) + } + + /// ## Errors + /// Returns an error if the selection set contains a named fragment spread. + fn reuse_fragments(&mut self, fragments: &NamedFragments) -> Result<(), FederationError> { if fragments.is_empty() { return Ok(()); } + if self.contains_fragment_spread() { + return Err(FederationError::internal("optimize() must only be used on selection sets that do not contain named fragment spreads")); + } + // Calling optimize() will not match a fragment that would have expanded at // top-level. That is, say we have the selection set `{ x y }` for a top-level `Query`, and // we have a fragment @@ -1401,25 +1435,22 @@ impl SelectionSet { let mut validator = FieldsConflictMultiBranchValidator::from_initial_validator( FieldsConflictValidator::from_selection_set(self), ); - let optimized = wrapped.optimize(fragments, &mut validator)?; + let optimized = wrapped.reuse_fragments_inner( + fragments, + &mut validator, + &mut FragmentRestrictionAtTypeCache::default(), + )?; // Now, it's possible we matched a full fragment, in which case `optimized` will be just // the named fragment, and in that case we return a singleton selection with just that. // Otherwise, it's our wrapping inline fragment with the sub-selections optimized, and we // just return that subselection. - match optimized { - InlineOrFragmentSelection::FragmentSpread(_) => { - let self_selections = Arc::make_mut(&mut self.selections); - self_selections.clear(); - self_selections.insert(optimized.into()); + *self = match optimized { + FragmentSelection::FragmentSpread(spread) => { + SelectionSet::from_selection(self.type_position.clone(), spread.into()) } - - InlineOrFragmentSelection::InlineFragment(inline_fragment) => { - // Note: `inline_fragment.selection_set` can't be moved (since it's inside Arc). - // So, it's cloned. - *self = inline_fragment.selection_set.clone(); - } - } + FragmentSelection::InlineFragment(inline_fragment) => inline_fragment.selection_set, + }; Ok(()) } } @@ -1431,7 +1462,7 @@ impl Operation { // `fragments` - rebased fragment definitions for the operation's subgraph // - `self.selection_set` must be fragment-spread-free. - fn optimize_internal( + fn reuse_fragments_inner( &mut self, fragments: &NamedFragments, min_usages_to_optimize: u32, @@ -1442,7 +1473,7 @@ impl Operation { // Optimize the operation's selection set by re-using existing fragments. let before_optimization = self.selection_set.clone(); - self.selection_set.optimize_at_root(fragments)?; + self.selection_set.reuse_fragments(fragments)?; if before_optimization == self.selection_set { return Ok(()); } @@ -1457,29 +1488,41 @@ impl Operation { Ok(()) } + /// Optimize the parsed size of the operation by applying fragment spreads. Fragment spreads + /// are reused from the original user-provided fragments. + /// /// `fragments` - rebased fragment definitions for the operation's subgraph - pub(crate) fn optimize(&mut self, fragments: &NamedFragments) -> Result<(), FederationError> { - self.optimize_internal(fragments, Self::DEFAULT_MIN_USAGES_TO_OPTIMIZE) + /// + // PORT_NOTE: In JS, this function was called "optimize". + pub(crate) fn reuse_fragments( + &mut self, + fragments: &NamedFragments, + ) -> Result<(), FederationError> { + self.reuse_fragments_inner(fragments, Self::DEFAULT_MIN_USAGES_TO_OPTIMIZE) } /// Used by legacy roundtrip tests. /// - This lowers `min_usages_to_optimize` to `1` in order to make it easier to write unit tests. - fn optimize_for_roundtrip_test( + #[cfg(test)] + fn reuse_fragments_for_roundtrip_test( &mut self, fragments: &NamedFragments, ) -> Result<(), FederationError> { - self.optimize_internal(fragments, /*min_usages_to_optimize*/ 1) + self.reuse_fragments_inner(fragments, /*min_usages_to_optimize*/ 1) } // PORT_NOTE: This mirrors the JS version's `Operation.expandAllFragments`. But this method is // mainly for unit tests. The actual port of `expandAllFragments` is in `normalize_operation`. + #[cfg(test)] fn expand_all_fragments_and_normalize(&self) -> Result { - let selection_set = self.selection_set.expand_all_fragments()?.normalize( - &self.selection_set.type_position, - &self.named_fragments, - &self.schema, - NormalizeSelectionOption::NormalizeRecursively, - )?; + let selection_set = self + .selection_set + .expand_all_fragments()? + .flatten_unnecessary_fragments( + &self.selection_set.type_position, + &self.named_fragments, + &self.schema, + )?; Ok(Self { named_fragments: Default::default(), selection_set, @@ -1507,7 +1550,7 @@ mod tests { macro_rules! assert_optimized { ($operation: expr, $named_fragments: expr, @$expected: literal) => {{ let mut optimized = $operation.clone(); - optimized.optimize(&$named_fragments).unwrap(); + optimized.reuse_fragments(&$named_fragments).unwrap(); validate_operation(&$operation.schema, &optimized.to_string()); insta::assert_snapshot!(optimized, @$expected) }}; @@ -1885,14 +1928,14 @@ mod tests { insta::assert_snapshot!(without_fragments, @$expanded); let mut optimized = without_fragments; - optimized.optimize(&operation.named_fragments).unwrap(); + optimized.reuse_fragments(&operation.named_fragments).unwrap(); validate_operation(&operation.schema, &optimized.to_string()); assert_eq!(optimized.to_string(), operation.to_string()); }}; } /// Tests ported from JS codebase rely on special behavior of - /// `Operation::optimize_for_roundtrip_test` that is specific for testing, since it makes it + /// `Operation::reuse_fragments_for_roundtrip_test` that is specific for testing, since it makes it /// easier to write tests. macro_rules! test_fragments_roundtrip_legacy { ($schema_doc: expr, $query: expr, @$expanded: literal) => {{ @@ -1902,7 +1945,7 @@ mod tests { insta::assert_snapshot!(without_fragments, @$expanded); let mut optimized = without_fragments; - optimized.optimize_for_roundtrip_test(&operation.named_fragments).unwrap(); + optimized.reuse_fragments_for_roundtrip_test(&operation.named_fragments).unwrap(); validate_operation(&operation.schema, &optimized.to_string()); assert_eq!(optimized.to_string(), operation.to_string()); }}; @@ -2673,7 +2716,7 @@ mod tests { // } // } // and so `Inner` will not be expanded (it's used twice). Except that - // the `normalize` code is apply then and will _remove_ both instances + // the `flatten_unnecessary_fragments` code is apply then and will _remove_ both instances // of `.... Inner`. Which is ok, but we must make sure the fragment // itself is removed since it is not used now, which this test ensures. assert_optimized!(expanded, operation.named_fragments, @r###" diff --git a/apollo-federation/src/operation/rebase.rs b/apollo-federation/src/operation/rebase.rs index 42cbaf9bae..1c9da5dc0d 100644 --- a/apollo-federation/src/operation/rebase.rs +++ b/apollo-federation/src/operation/rebase.rs @@ -1,5 +1,9 @@ -use std::sync::Arc; +//! Rebasing takes a selection or a selection set and updates its parent type. +//! +//! Often, the change is between equivalent types from different schemas, but selections can also +//! be rebased from one type to another in the same schema. +use apollo_compiler::Name; use itertools::Itertools; use super::runtime_types_intersect; @@ -13,7 +17,6 @@ use super::InlineFragment; use super::InlineFragmentData; use super::InlineFragmentSelection; use super::NamedFragments; -use super::NormalizeSelectionOption; use super::OperationElement; use super::Selection; use super::SelectionId; @@ -43,33 +46,56 @@ fn print_possible_runtimes( } /// Options for handling rebasing errors. -#[derive(Clone, Copy)] -pub enum RebaseErrorHandlingOption { - IgnoreError, - ThrowError, +#[derive(Clone, Copy, Default)] +enum OnNonRebaseableSelection { + /// Drop the selection that can't be rebased and continue. + Drop, + /// Propagate the rebasing error. + #[default] + Error, } impl Selection { - pub fn rebase_on( + fn rebase_inner( &self, parent_type: &CompositeTypeDefinitionPosition, named_fragments: &NamedFragments, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { + on_non_rebaseable_selection: OnNonRebaseableSelection, + ) -> Result { match self { - Selection::Field(field) => { - field.rebase_on(parent_type, named_fragments, schema, error_handling) - } - Selection::FragmentSpread(spread) => { - spread.rebase_on(parent_type, named_fragments, schema, error_handling) - } - Selection::InlineFragment(inline) => { - inline.rebase_on(parent_type, named_fragments, schema, error_handling) - } + Selection::Field(field) => field + .rebase_inner( + parent_type, + named_fragments, + schema, + on_non_rebaseable_selection, + ) + .map(|field| field.into()), + Selection::FragmentSpread(spread) => spread.rebase_inner( + parent_type, + named_fragments, + schema, + on_non_rebaseable_selection, + ), + Selection::InlineFragment(inline) => inline.rebase_inner( + parent_type, + named_fragments, + schema, + on_non_rebaseable_selection, + ), } } + pub(crate) fn rebase_on( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + self.rebase_inner(parent_type, named_fragments, schema, Default::default()) + } + fn can_add_to( &self, parent_type: &CompositeTypeDefinitionPosition, @@ -86,20 +112,70 @@ impl Selection { } } +#[derive(Debug, Clone, thiserror::Error)] +pub(crate) enum RebaseError { + #[error("Cannot add selection of field `{field_position}` to selection set of parent type `{parent_type}`")] + CannotRebase { + field_position: crate::schema::position::FieldDefinitionPosition, + parent_type: CompositeTypeDefinitionPosition, + }, + #[error("Cannot add selection of field `{field_position}` to selection set of parent type `{parent_type}` that is potentially an interface object type at runtime")] + InterfaceObjectTypename { + field_position: crate::schema::position::FieldDefinitionPosition, + parent_type: CompositeTypeDefinitionPosition, + }, + #[error("Cannot rebase composite field selection because its subselection is empty")] + EmptySelectionSet, + #[error("Cannot rebase {fragment_name} fragment if it isn't part of the provided fragments")] + MissingFragment { fragment_name: Name }, + #[error( + "Cannot add fragment of condition `{}` (runtimes: [{}]) to parent type `{}` (runtimes: [{}])", + type_condition.as_ref().map_or_else(Default::default, |t| t.to_string()), + type_condition.as_ref().map_or_else( + || "undefined".to_string(), + |t| print_possible_runtimes(t, schema), + ), + parent_type, + print_possible_runtimes(parent_type, schema) + )] + NonIntersectingCondition { + type_condition: Option, + parent_type: CompositeTypeDefinitionPosition, + schema: ValidFederationSchema, + }, +} + +impl FederationError { + fn is_rebase_error(&self) -> bool { + matches!( + self, + crate::error::FederationError::SingleFederationError { + inner: crate::error::SingleFederationError::InternalRebaseError(_), + .. + } + ) + } +} + +impl From for FederationError { + fn from(value: RebaseError) -> Self { + crate::error::SingleFederationError::from(value).into() + } +} + impl Field { - pub fn rebase_on( + pub(crate) fn rebase_on( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { - let field_parent = self.data().field_position.parent(); - if self.data().schema == *schema && field_parent == *parent_type { + ) -> Result { + let field_parent = self.field_position.parent(); + if self.schema == *schema && field_parent == *parent_type { // pointing to the same parent -> return self - return Ok(Some(self.clone())); + return Ok(self.clone()); } - if self.data().name() == &TYPENAME_FIELD { + if self.name() == &TYPENAME_FIELD { // TODO interface object info should be precomputed in QP constructor return if schema .possible_runtime_types(parent_type.clone())? @@ -107,39 +183,33 @@ impl Field { .map(|t| schema.is_interface_object_type(t.clone().into())) .process_results(|mut iter| iter.any(|b| b))? { - if let RebaseErrorHandlingOption::ThrowError = error_handling { - Err(FederationError::internal( - format!("Cannot add selection of field \"{}\" to selection set of parent type \"{}\" that is potentially an interface object type at runtime", - self.data().field_position, - parent_type - ))) - } else { - Ok(None) + Err(RebaseError::InterfaceObjectTypename { + field_position: self.field_position.clone(), + parent_type: parent_type.clone(), } + .into()) } else { let mut updated_field_data = self.data().clone(); updated_field_data.schema = schema.clone(); updated_field_data.field_position = parent_type.introspection_typename_field(); - Ok(Some(Field::new(updated_field_data))) + Ok(Field::new(updated_field_data)) }; } - let field_from_parent = parent_type.field(self.data().name().clone())?; + let field_from_parent = parent_type.field(self.name().clone())?; return if field_from_parent.try_get(schema.schema()).is_some() && self.can_rebase_on(parent_type)? { let mut updated_field_data = self.data().clone(); updated_field_data.schema = schema.clone(); updated_field_data.field_position = field_from_parent; - Ok(Some(Field::new(updated_field_data))) - } else if let RebaseErrorHandlingOption::IgnoreError = error_handling { - Ok(None) + Ok(Field::new(updated_field_data)) } else { - Err(FederationError::internal(format!( - "Cannot add selection of field \"{}\" to selection set of parent type \"{}\"", - self.data().field_position, - parent_type - ))) + Err(RebaseError::CannotRebase { + field_position: self.field_position.clone(), + parent_type: parent_type.clone(), + } + .into()) }; } @@ -157,14 +227,13 @@ impl Field { &self, parent_type: &CompositeTypeDefinitionPosition, ) -> Result { - let field_parent_type = self.data().field_position.parent(); + let field_parent_type = self.field_position.parent(); // case 1 if field_parent_type.type_name() == parent_type.type_name() { return Ok(true); } // case 2 let is_interface_object_type = self - .data() .schema .is_interface_object_type(field_parent_type.clone().into())?; Ok(field_parent_type.is_interface_type() || is_interface_object_type) @@ -175,7 +244,7 @@ impl Field { parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, ) -> Result, FederationError> { - let data = self.data(); + let data = self; if data.field_position.parent() == *parent_type && data.schema == *schema { let base_ty_name = data .field_position @@ -213,37 +282,28 @@ impl Field { } impl FieldSelection { - /// Returns a field selection "equivalent" to the one represented by this object, but such that its parent type - /// is the one provided as argument. - /// - /// Obviously, this operation will only succeed if this selection (both the field itself and its subselections) - /// make sense from the provided parent type. If this is not the case, this method will throw. - pub fn rebase_on( + fn rebase_inner( &self, parent_type: &CompositeTypeDefinitionPosition, named_fragments: &NamedFragments, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { - if &self.field.data().schema == schema - && &self.field.data().field_position.parent() == parent_type - { + on_non_rebaseable_selection: OnNonRebaseableSelection, + ) -> Result { + if &self.field.schema == schema && &self.field.field_position.parent() == parent_type { // we are rebasing field on the same parent within the same schema - we can just return self - return Ok(Some(Selection::from(self.clone()))); + return Ok(self.clone()); } - let Some(rebased) = self.field.rebase_on(parent_type, schema, error_handling)? else { - // rebasing failed but we are ignoring errors - return Ok(None); - }; - + let rebased = self.field.rebase_on(parent_type, schema)?; let Some(selection_set) = &self.selection_set else { // leaf field - return Ok(Some(Selection::from_field(rebased, None))); + return Ok(FieldSelection { + field: rebased, + selection_set: None, + }); }; let rebased_type_name = rebased - .data() .field_position .get(schema.schema())? .ty @@ -252,37 +312,50 @@ impl FieldSelection { schema.get_type(rebased_type_name.clone())?.try_into()?; let selection_set_type = &selection_set.type_position; - if self.field.data().schema == rebased.data().schema - && &rebased_base_type == selection_set_type - { + if self.field.schema == rebased.schema && &rebased_base_type == selection_set_type { // we are rebasing within the same schema and the same base type - return Ok(Some(Selection::from_field( - rebased.clone(), - self.selection_set.clone(), - ))); + return Ok(FieldSelection { + field: rebased, + selection_set: self.selection_set.clone(), + }); } - let rebased_selection_set = - selection_set.rebase_on(&rebased_base_type, named_fragments, schema, error_handling)?; + let rebased_selection_set = selection_set.rebase_inner( + &rebased_base_type, + named_fragments, + schema, + on_non_rebaseable_selection, + )?; if rebased_selection_set.selections.is_empty() { - // empty selection set - Ok(None) + Err(RebaseError::EmptySelectionSet.into()) } else { - Ok(Some(Selection::from_field( - rebased.clone(), - Some(rebased_selection_set), - ))) + Ok(FieldSelection { + field: rebased, + selection_set: Some(rebased_selection_set), + }) } } + /// Returns a field selection "equivalent" to the one represented by this object, but such that its parent type + /// is the one provided as argument. + /// + /// Obviously, this operation will only succeed if this selection (both the field itself and its subselections) + /// make sense from the provided parent type. If this is not the case, this method will throw. + pub(crate) fn rebase_on( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + self.rebase_inner(parent_type, named_fragments, schema, Default::default()) + } + fn can_add_to( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, ) -> Result { - if self.field.data().schema == *schema - && self.field.data().field_position.parent() == *parent_type - { + if self.field.schema == *schema && self.field.field_position.parent() == *parent_type { return Ok(true); } @@ -309,48 +382,49 @@ impl FragmentSpread { parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, named_fragments: &NamedFragments, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { - let Some(named_fragment) = named_fragments.get(&self.data().fragment_name) else { - return if let RebaseErrorHandlingOption::ThrowError = error_handling { - Err(FederationError::internal(format!( - "Cannot rebase {} fragment if it isn't part of the provided fragments", - self.data().fragment_name - ))) - } else { - Ok(None) - }; + ) -> Result { + let Some(named_fragment) = named_fragments.get(&self.fragment_name) else { + return Err(RebaseError::MissingFragment { + fragment_name: self.fragment_name.clone(), + } + .into()); }; debug_assert_eq!( - *schema, - self.data().schema, + *schema, self.schema, "Fragment spread should only be rebased within the same subgraph" ); debug_assert_eq!( *schema, named_fragment.schema, "Referenced named fragment should've been rebased for the subgraph" ); - if !runtime_types_intersect( + if runtime_types_intersect( parent_type, &named_fragment.type_condition_position, - &self.data().schema, + &self.schema, ) { - return Ok(None); + Ok(FragmentSpread::new(FragmentSpreadData::from_fragment( + &named_fragment, + &self.directives, + ))) + } else { + Err(RebaseError::NonIntersectingCondition { + type_condition: named_fragment.type_condition_position.clone().into(), + parent_type: parent_type.clone(), + schema: schema.clone(), + } + .into()) } - Ok(Some(FragmentSpread::new( - FragmentSpreadData::from_fragment(&named_fragment, &self.data().directives), - ))) } } impl FragmentSpreadSelection { - pub(crate) fn rebase_on( + fn rebase_inner( &self, parent_type: &CompositeTypeDefinitionPosition, named_fragments: &NamedFragments, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { + on_non_rebaseable_selection: OnNonRebaseableSelection, + ) -> Result { // We preserve the parent type here, to make sure we don't lose context, but we actually don't // want to expand the spread as that would compromise the code that optimize subgraph fetches to re-use named // fragments. @@ -360,26 +434,20 @@ impl FragmentSpreadSelection { // QP code works on selections with fully expanded fragments, so this code (and that of `can_add_to` // on come into play in the code for reusing fragments, and that code calls those methods // appropriately. - if self.spread.data().schema == *schema - && self.spread.data().type_condition_position == *parent_type - { - return Ok(Some(Selection::FragmentSpread(Arc::new(self.clone())))); + if self.spread.schema == *schema && self.spread.type_condition_position == *parent_type { + return Ok(self.clone().into()); } - let rebase_on_same_schema = self.spread.data().schema == *schema; - let Some(named_fragment) = named_fragments.get(&self.spread.data().fragment_name) else { + let rebase_on_same_schema = self.spread.schema == *schema; + let Some(named_fragment) = named_fragments.get(&self.spread.fragment_name) else { // If we're rebasing on another schema (think a subgraph), then named fragments will have been rebased on that, and some // of them may not contain anything that is on that subgraph, in which case they will not have been included at all. // If so, then as long as we're not asked to error if we cannot rebase, then we're happy to skip that spread (since again, // it expands to nothing that applies on the schema). - return if let RebaseErrorHandlingOption::ThrowError = error_handling { - Err(FederationError::internal(format!( - "Cannot rebase {} fragment if it isn't part of the provided fragments", - self.spread.data().fragment_name - ))) - } else { - Ok(None) - }; + return Err(RebaseError::MissingFragment { + fragment_name: self.spread.fragment_name.clone(), + } + .into()); }; // Lastly, if we rebase on a different schema, it's possible the fragment type does not intersect the @@ -403,46 +471,52 @@ impl FragmentSpreadSelection { // important because the very logic we're hitting here may need to happen inside the rebase on the // fragment selection, but that logic would not be triggered if we used the rebased `named_fragment` since // `rebase_on_same_schema` would then be 'true'. - let expanded_selection_set = self.selection_set.rebase_on( + let expanded_selection_set = self.selection_set.rebase_inner( parent_type, named_fragments, schema, - error_handling, + on_non_rebaseable_selection, )?; // In theory, we could return the selection set directly, but making `SelectionSet.rebase_on` sometimes // return a `SelectionSet` complicate things quite a bit. So instead, we encapsulate the selection set // in an "empty" inline fragment. This make for non-really-optimal selection sets in the (relatively // rare) case where this is triggered, but in practice this "inefficiency" is removed by future calls - // to `normalize`. + // to `flatten_unnecessary_fragments`. return if expanded_selection_set.selections.is_empty() { - Ok(None) + Err(RebaseError::EmptySelectionSet.into()) } else { - Ok(Some( - InlineFragmentSelection::new( - InlineFragment::new(InlineFragmentData { - schema: schema.clone(), - parent_type_position: parent_type.clone(), - type_condition_position: None, - directives: Default::default(), - selection_id: SelectionId::new(), - }), - expanded_selection_set, - ) - .into(), - )) + Ok(InlineFragmentSelection::new( + InlineFragment::new(InlineFragmentData { + schema: schema.clone(), + parent_type_position: parent_type.clone(), + type_condition_position: None, + directives: Default::default(), + selection_id: SelectionId::new(), + }), + expanded_selection_set, + ) + .into()) }; } let spread = FragmentSpread::new(FragmentSpreadData::from_fragment( &named_fragment, - &self.spread.data().directives, + &self.spread.directives, )); - Ok(Some(Selection::FragmentSpread(Arc::new( - FragmentSpreadSelection { - spread, - selection_set: named_fragment.selection_set.clone(), - }, - )))) + Ok(FragmentSpreadSelection { + spread, + selection_set: named_fragment.selection_set.clone(), + } + .into()) + } + + pub(crate) fn rebase_on( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + self.rebase_inner(parent_type, named_fragments, schema, Default::default()) } } @@ -472,58 +546,42 @@ impl InlineFragmentData { }; match schema .get_type(ty.type_name().clone()) - .and_then(CompositeTypeDefinitionPosition::try_from) + .ok() + .and_then(|ty| CompositeTypeDefinitionPosition::try_from(ty).ok()) { - Ok(ty) if runtime_types_intersect(parent_type, &ty, schema) => (true, Some(ty)), + Some(ty) if runtime_types_intersect(parent_type, &ty, schema) => (true, Some(ty)), _ => (false, None), } } } impl InlineFragment { - pub fn rebase_on( + pub(crate) fn rebase_on( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { - if self.data().schema == *schema && self.data().parent_type_position == *parent_type { - return Ok(Some(self.clone())); + ) -> Result { + if self.schema == *schema && self.parent_type_position == *parent_type { + return Ok(self.clone()); } - let type_condition = self.data().type_condition_position.clone(); + let type_condition = self.type_condition_position.clone(); // This usually imply that the fragment is not from the same subgraph than the selection. So we need // to update the source type of the fragment, but also "rebase" the condition to the selection set // schema. let (can_rebase, rebased_condition) = self.can_rebase_on(parent_type, schema); if !can_rebase { - if let RebaseErrorHandlingOption::ThrowError = error_handling { - let printable_type_condition = self - .data() - .type_condition_position - .clone() - .map_or_else(|| "".to_string(), |t| t.to_string()); - let printable_runtimes = type_condition.map_or_else( - || "undefined".to_string(), - |t| print_possible_runtimes(&t, schema), - ); - let printable_parent_runtimes = print_possible_runtimes(parent_type, schema); - Err(FederationError::internal( - format!("Cannot add fragment of condition \"{}\" (runtimes: [{}]) to parent type \"{}\" (runtimes: [{}])", - printable_type_condition, - printable_runtimes, - parent_type, - printable_parent_runtimes, - ), - )) - } else { - Ok(None) + Err(RebaseError::NonIntersectingCondition { + type_condition, + parent_type: parent_type.clone(), + schema: schema.clone(), } + .into()) } else { let mut rebased_fragment_data = self.data().clone(); rebased_fragment_data.type_condition_position = rebased_condition; rebased_fragment_data.schema = schema.clone(); - Ok(Some(InlineFragment::new(rebased_fragment_data))) + Ok(InlineFragment::new(rebased_fragment_data)) } } @@ -532,13 +590,12 @@ impl InlineFragment { parent_type: &CompositeTypeDefinitionPosition, parent_schema: &ValidFederationSchema, ) -> (bool, Option) { - if self.data().type_condition_position.is_none() { + if self.type_condition_position.is_none() { // can_rebase = true, condition = undefined return (true, None); } if let Some(Ok(rebased_condition)) = self - .data() .type_condition_position .clone() .and_then(|condition_position| { @@ -564,67 +621,64 @@ impl InlineFragment { } impl InlineFragmentSelection { - pub fn rebase_on( + fn rebase_inner( &self, parent_type: &CompositeTypeDefinitionPosition, named_fragments: &NamedFragments, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, - ) -> Result, FederationError> { - if &self.inline_fragment.data().schema == schema - && self.inline_fragment.data().parent_type_position == *parent_type + on_non_rebaseable_selection: OnNonRebaseableSelection, + ) -> Result { + if &self.inline_fragment.schema == schema + && self.inline_fragment.parent_type_position == *parent_type { // we are rebasing inline fragment on the same parent within the same schema - we can just return self - return Ok(Some(Selection::from(self.clone()))); + return Ok(self.clone().into()); } - let Some(rebased_fragment) = - self.inline_fragment - .rebase_on(parent_type, schema, error_handling)? - else { - // rebasing failed but we are ignoring errors - return Ok(None); - }; - - let rebased_casted_type = rebased_fragment.data().casted_type(); - if &self.inline_fragment.data().schema == schema - && self.inline_fragment.data().casted_type() == rebased_casted_type + let rebased_fragment = self.inline_fragment.rebase_on(parent_type, schema)?; + let rebased_casted_type = rebased_fragment.casted_type(); + if &self.inline_fragment.schema == schema + && self.inline_fragment.casted_type() == rebased_casted_type { // we are within the same schema - selection set does not have to be rebased - Ok(Some( - InlineFragmentSelection::new(rebased_fragment, self.selection_set.clone()).into(), - )) + Ok(InlineFragmentSelection::new(rebased_fragment, self.selection_set.clone()).into()) } else { - let rebased_selection_set = self.selection_set.rebase_on( + let rebased_selection_set = self.selection_set.rebase_inner( &rebased_casted_type, named_fragments, schema, - error_handling, + on_non_rebaseable_selection, )?; if rebased_selection_set.selections.is_empty() { // empty selection set - Ok(None) + Err(RebaseError::EmptySelectionSet.into()) } else { - Ok(Some( - InlineFragmentSelection::new(rebased_fragment, rebased_selection_set).into(), - )) + Ok(InlineFragmentSelection::new(rebased_fragment, rebased_selection_set).into()) } } } + pub(crate) fn rebase_on( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + self.rebase_inner(parent_type, named_fragments, schema, Default::default()) + } + fn can_add_to( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, ) -> Result { - if self.inline_fragment.data().schema == *schema - && self.inline_fragment.data().parent_type_position == *parent_type + if self.inline_fragment.schema == *schema + && self.inline_fragment.parent_type_position == *parent_type { return Ok(true); } let Some(ty) = self .inline_fragment - .data() .casted_type_if_add_to(parent_type, schema) else { return Ok(false); @@ -648,70 +702,78 @@ impl InlineFragmentSelection { } impl OperationElement { - pub(crate) fn rebase_on_or_error( + pub(crate) fn rebase_on( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, named_fragments: &NamedFragments, ) -> Result { - let result: Option = match self { - OperationElement::Field(field) => field - .rebase_on(parent_type, schema, RebaseErrorHandlingOption::ThrowError) - .map(|val| val.map(Into::into)), - OperationElement::FragmentSpread(fragment) => fragment - .rebase_on( - parent_type, - schema, - named_fragments, - RebaseErrorHandlingOption::ThrowError, - ) - .map(|val| val.map(Into::into)), - OperationElement::InlineFragment(inline) => inline - .rebase_on(parent_type, schema, RebaseErrorHandlingOption::ThrowError) - .map(|val| val.map(Into::into)), - }?; - result.ok_or_else(|| { - FederationError::internal(format!( - "Cannot rebase operation element {} on {}", - self, parent_type - )) - }) + match self { + OperationElement::Field(field) => Ok(field.rebase_on(parent_type, schema)?.into()), + OperationElement::FragmentSpread(fragment) => Ok(fragment + .rebase_on(parent_type, schema, named_fragments)? + .into()), + OperationElement::InlineFragment(inline) => { + Ok(inline.rebase_on(parent_type, schema)?.into()) + } + } } pub(crate) fn sub_selection_type_position( &self, ) -> Result, FederationError> { match self { - OperationElement::Field(field) => Ok(field.data().output_base_type()?.try_into().ok()), + OperationElement::Field(field) => Ok(field.output_base_type()?.try_into().ok()), OperationElement::FragmentSpread(_) => Ok(None), // No sub-selection set - OperationElement::InlineFragment(inline) => Ok(Some(inline.data().casted_type())), + OperationElement::InlineFragment(inline) => Ok(Some(inline.casted_type())), } } } impl SelectionSet { - /// Rebase this selection set so it applies to the given schema and type. - pub fn rebase_on( + fn rebase_inner( &self, parent_type: &CompositeTypeDefinitionPosition, named_fragments: &NamedFragments, schema: &ValidFederationSchema, - error_handling: RebaseErrorHandlingOption, + on_non_rebaseable_selection: OnNonRebaseableSelection, ) -> Result { let rebased_results = self .selections .iter() - .filter_map(|(_, selection)| { - selection - .rebase_on(parent_type, named_fragments, schema, error_handling) - .transpose() + .map(|(_, selection)| { + selection.rebase_inner( + parent_type, + named_fragments, + schema, + on_non_rebaseable_selection, + ) }) - .collect::, _>>()?; - Ok(SelectionSet::from_raw_selections( - schema.clone(), - parent_type.clone(), - rebased_results, - )) + // Remove selections with rebase errors if requested + .filter(|result| { + matches!(on_non_rebaseable_selection, OnNonRebaseableSelection::Error) + || !result.as_ref().is_err_and(|err| err.is_rebase_error()) + }); + + Ok(SelectionSet { + schema: schema.clone(), + type_position: parent_type.clone(), + selections: rebased_results + .collect::>()? + .into(), + }) + } + + /// Rebase this selection set so it applies to the given schema and type. + /// + /// This can return an empty selection set. + pub(crate) fn rebase_on( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + self.rebase_inner(parent_type, named_fragments, schema, Default::default()) } /// Returns true if the selection set would select cleanly from the given type in the given @@ -737,23 +799,23 @@ impl NamedFragments { ) -> Result { let mut rebased_fragments = NamedFragments::default(); for fragment in self.fragments.values() { - if let Ok(rebased_type) = schema + if let Some(rebased_type) = schema .get_type(fragment.type_condition_position.type_name().clone()) - .and_then(CompositeTypeDefinitionPosition::try_from) + .ok() + .and_then(|ty| CompositeTypeDefinitionPosition::try_from(ty).ok()) { - if let Ok(mut rebased_selection) = fragment.selection_set.rebase_on( + if let Ok(mut rebased_selection) = fragment.selection_set.rebase_inner( &rebased_type, &rebased_fragments, schema, - RebaseErrorHandlingOption::IgnoreError, + OnNonRebaseableSelection::Drop, ) { // Rebasing can leave some inefficiencies in some case (particularly when a spread has to be "expanded", see `FragmentSpreadSelection.rebaseOn`), // so we do a top-level normalization to keep things clean. - rebased_selection = rebased_selection.normalize( + rebased_selection = rebased_selection.flatten_unnecessary_fragments( &rebased_type, &rebased_fragments, schema, - NormalizeSelectionOption::NormalizeRecursively, )?; if NamedFragments::is_selection_set_worth_using(&rebased_selection) { let fragment = Fragment { diff --git a/apollo-federation/src/operation/simplify.rs b/apollo-federation/src/operation/simplify.rs new file mode 100644 index 0000000000..95ae8ad243 --- /dev/null +++ b/apollo-federation/src/operation/simplify.rs @@ -0,0 +1,484 @@ +use std::sync::Arc; + +use apollo_compiler::executable; +use apollo_compiler::name; +use apollo_compiler::Node; + +use super::runtime_types_intersect; +use super::Field; +use super::FieldData; +use super::FieldSelection; +use super::FragmentSpreadSelection; +use super::InlineFragmentSelection; +use super::NamedFragments; +use super::Selection; +use super::SelectionMap; +use super::SelectionSet; +use crate::error::FederationError; +use crate::schema::position::CompositeTypeDefinitionPosition; +use crate::schema::ValidFederationSchema; + +#[derive(Debug, Clone, PartialEq, Eq, derive_more::From)] +pub(crate) enum SelectionOrSet { + Selection(Selection), + SelectionSet(SelectionSet), +} + +impl Selection { + fn flatten_unnecessary_fragments( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result, FederationError> { + match self { + Selection::Field(field) => { + field.flatten_unnecessary_fragments(parent_type, named_fragments, schema) + } + Selection::FragmentSpread(spread) => { + spread.flatten_unnecessary_fragments(parent_type, named_fragments, schema) + } + Selection::InlineFragment(inline) => { + inline.flatten_unnecessary_fragments(parent_type, named_fragments, schema) + } + } + } +} + +impl FieldSelection { + fn flatten_unnecessary_fragments( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result, FederationError> { + let field_position = + if self.field.schema() == schema && self.field.parent_type_position() == *parent_type { + self.field.field_position.clone() + } else { + parent_type.field(self.field.name().clone())? + }; + + let field_element = + if self.field.schema() == schema && self.field.field_position == field_position { + self.field.data().clone() + } else { + self.field + .with_updated_position(schema.clone(), field_position) + }; + + if let Some(selection_set) = &self.selection_set { + let field_composite_type_position: CompositeTypeDefinitionPosition = + field_element.output_base_type()?.try_into()?; + let mut normalized_selection: SelectionSet = selection_set + .flatten_unnecessary_fragments( + &field_composite_type_position, + named_fragments, + schema, + )?; + + let mut selection = self.with_updated_element(field_element); + if normalized_selection.is_empty() { + // In rare cases, it's possible that everything in the sub-selection was trimmed away and so the + // sub-selection is empty. Which suggest something may be wrong with this part of the query + // intent, but the query was valid while keeping an empty sub-selection isn't. So in that + // case, we just add some "non-included" __typename field just to keep the query valid. + let directives = + executable::DirectiveList(vec![Node::new(executable::Directive { + name: name!("include"), + arguments: vec![Node::new(executable::Argument { + name: name!("if"), + value: Node::new(executable::Value::Boolean(false)), + })], + })]); + let non_included_typename = Selection::from_field( + Field::new(FieldData { + schema: schema.clone(), + field_position: field_composite_type_position + .introspection_typename_field(), + alias: None, + arguments: Arc::new(vec![]), + directives: Arc::new(directives), + sibling_typename: None, + }), + None, + ); + let mut typename_selection = SelectionMap::new(); + typename_selection.insert(non_included_typename); + + normalized_selection.selections = Arc::new(typename_selection); + selection.selection_set = Some(normalized_selection); + } else { + selection.selection_set = Some(normalized_selection); + } + Ok(Some(SelectionOrSet::Selection(Selection::from(selection)))) + } else { + Ok(Some(SelectionOrSet::Selection(Selection::from( + self.with_updated_element(field_element), + )))) + } + } +} + +impl FragmentSpreadSelection { + fn flatten_unnecessary_fragments( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result, FederationError> { + let this_condition = self.spread.type_condition_position.clone(); + // This method assumes by contract that `parent_type` runtimes intersects `self.inline_fragment.parent_type_position`'s, + // but `parent_type` runtimes may be a subset. So first check if the selection should not be discarded on that account (that + // is, we should not keep the selection if its condition runtimes don't intersect at all with those of + // `parent_type` as that would ultimately make an invalid selection set). + if (self.spread.schema != *schema || this_condition != *parent_type) + && !runtime_types_intersect(&this_condition, parent_type, schema) + { + return Ok(None); + } + + // We must update the spread parent type if necessary since we're not going deeper, + // or we'll be fundamentally losing context. + if self.spread.schema != *schema { + return Err(FederationError::internal( + "Should not try to flatten_unnecessary_fragments using a type from another schema", + )); + } + + let rebased_fragment_spread = self.rebase_on(parent_type, named_fragments, schema)?; + Ok(Some(SelectionOrSet::Selection(rebased_fragment_spread))) + } +} + +impl InlineFragmentSelection { + fn flatten_unnecessary_fragments( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result, FederationError> { + let this_condition = self.inline_fragment.type_condition_position.clone(); + // This method assumes by contract that `parent_type` runtimes intersects `self.inline_fragment.parent_type_position`'s, + // but `parent_type` runtimes may be a subset. So first check if the selection should not be discarded on that account (that + // is, we should not keep the selection if its condition runtimes don't intersect at all with those of + // `parent_type` as that would ultimately make an invalid selection set). + if let Some(ref type_condition) = this_condition { + if (self.inline_fragment.schema != *schema + || self.inline_fragment.parent_type_position != *parent_type) + && !runtime_types_intersect(type_condition, parent_type, schema) + { + return Ok(None); + } + } + + // We know the condition is "valid", but it may not be useful. That said, if the condition has directives, + // we preserve the fragment no matter what. + if self.inline_fragment.directives.is_empty() { + // There is a number of cases where a fragment is not useful: + // 1. if there is no type condition (remember it also has no directives). + // 2. if it's the same type as the current type: it's not restricting types further. + // 3. if the current type is an object more generally: because in that case the condition + // cannot be restricting things further (it's typically a less precise interface/union). + let useless_fragment = match this_condition { + None => true, + Some(ref c) => self.inline_fragment.schema == *schema && c == parent_type, + }; + if useless_fragment || parent_type.is_object_type() { + // Try to skip this fragment and flatten_unnecessary_fragments self.selection_set with `parent_type`, + // instead of its original type. + let selection_set = self.selection_set.flatten_unnecessary_fragments( + parent_type, + named_fragments, + schema, + )?; + return if selection_set.is_empty() { + Ok(None) + } else { + // We need to rebase since the parent type for the selection set could be + // changed. + // Note: Rebasing after flattening, since rebasing before that can error out. + // Or, `flatten_unnecessary_fragments` could `rebase` at the same time. + let selection_set = if useless_fragment { + selection_set.clone() + } else { + selection_set.rebase_on(parent_type, named_fragments, schema)? + }; + Ok(Some(SelectionOrSet::SelectionSet(selection_set))) + }; + } + } + + // Note: This selection_set is not rebased here yet. It will be rebased later as necessary. + let selection_set = self.selection_set.flatten_unnecessary_fragments( + &self.selection_set.type_position, + named_fragments, + &self.selection_set.schema, + )?; + // It could be that nothing was satisfiable. + if selection_set.is_empty() { + if self.inline_fragment.directives.is_empty() { + return Ok(None); + } else { + let rebased_fragment = self.inline_fragment.rebase_on(parent_type, schema)?; + // We should be able to rebase, or there is a bug, so error if that is the case. + // If we rebased successfully then we add "non-included" __typename field selection + // just to keep the query valid. + let directives = + executable::DirectiveList(vec![Node::new(executable::Directive { + name: name!("include"), + arguments: vec![Node::new(executable::Argument { + name: name!("if"), + value: Node::new(executable::Value::Boolean(false)), + })], + })]); + let parent_typename_field = if let Some(condition) = this_condition { + condition.introspection_typename_field() + } else { + parent_type.introspection_typename_field() + }; + let typename_field_selection = Selection::from_field( + Field::new(FieldData { + schema: schema.clone(), + field_position: parent_typename_field, + alias: None, + arguments: Arc::new(vec![]), + directives: Arc::new(directives), + sibling_typename: None, + }), + None, + ); + + // Return `... [on ] { __typename @include(if: false) }` + let rebased_casted_type = rebased_fragment.casted_type(); + return Ok(Some(SelectionOrSet::Selection( + InlineFragmentSelection::new( + rebased_fragment, + SelectionSet::from_selection(rebased_casted_type, typename_field_selection), + ) + .into(), + ))); + } + } + + // Second, we check if some of the sub-selection fragments can be "lifted" outside of this fragment. This can happen if: + // 1. the current fragment is an abstract type, + // 2. the sub-fragment is an object type, + // 3. the sub-fragment type is a valid runtime of the current type. + if self.inline_fragment.directives.is_empty() + && this_condition.is_some_and(|c| c.is_abstract_type()) + { + let mut liftable_selections = SelectionMap::new(); + for (_, selection) in selection_set.selections.iter() { + match selection { + Selection::FragmentSpread(spread_selection) => { + let type_condition = + spread_selection.spread.type_condition_position.clone(); + if type_condition.is_object_type() + && runtime_types_intersect(parent_type, &type_condition, schema) + { + liftable_selections + .insert(Selection::FragmentSpread(spread_selection.clone())); + } + } + Selection::InlineFragment(inline_fragment_selection) => { + if let Some(type_condition) = inline_fragment_selection + .inline_fragment + .type_condition_position + .clone() + { + if type_condition.is_object_type() + && runtime_types_intersect(parent_type, &type_condition, schema) + { + liftable_selections.insert(Selection::InlineFragment( + inline_fragment_selection.clone(), + )); + } + }; + } + _ => continue, + } + } + + // If we can lift all selections, then that just mean we can get rid of the current fragment altogether + if liftable_selections.len() == selection_set.selections.len() { + // Rebasing is necessary since this normalized sub-selection set changed its parent. + let rebased_selection_set = + selection_set.rebase_on(parent_type, named_fragments, schema)?; + return Ok(Some(SelectionOrSet::SelectionSet(rebased_selection_set))); + } + + // Otherwise, if there are "liftable" selections, we must return a set comprised of those lifted selection, + // and the current fragment _without_ those lifted selections. + if liftable_selections.len() > 0 { + // Converting `... [on T] { }` into + // `{ ... [on T] { } }`. + // PORT_NOTE: It appears that this lifting could be repeatable (meaning lifted + // selection could be broken down further and lifted again), but + // flatten_unnecessary_fragments is not + // applied recursively. This could be worth investigating. + let rebased_inline_fragment = + self.inline_fragment.rebase_on(parent_type, schema)?; + let mut mutable_selections = self.selection_set.selections.clone(); + let final_fragment_selections = Arc::make_mut(&mut mutable_selections); + final_fragment_selections.retain(|k, _| !liftable_selections.contains_key(k)); + let rebased_casted_type = rebased_inline_fragment.casted_type(); + let final_inline_fragment: Selection = InlineFragmentSelection::new( + rebased_inline_fragment, + SelectionSet { + schema: schema.clone(), + type_position: rebased_casted_type, + selections: Arc::new(final_fragment_selections.clone()), + }, + ) + .into(); + + // Since liftable_selections are changing their parent, we need to rebase them. + liftable_selections = liftable_selections + .into_iter() + .map(|(_key, sel)| sel.rebase_on(parent_type, named_fragments, schema)) + .collect::>()?; + + let mut final_selection_map = SelectionMap::new(); + final_selection_map.insert(final_inline_fragment); + final_selection_map.extend(liftable_selections); + let final_selections = SelectionSet { + schema: schema.clone(), + type_position: parent_type.clone(), + selections: final_selection_map.into(), + }; + return Ok(Some(SelectionOrSet::SelectionSet(final_selections))); + } + } + + if self.inline_fragment.schema == *schema + && self.inline_fragment.parent_type_position == *parent_type + && self.selection_set == selection_set + { + // flattening did not change the fragment + // TODO(@goto-bus-stop): no change, but we still create a non-trivial clone here + Ok(Some(SelectionOrSet::Selection(Selection::InlineFragment( + Arc::new(self.clone()), + )))) + } else { + let rebased_inline_fragment = self.inline_fragment.rebase_on(parent_type, schema)?; + let rebased_casted_type = rebased_inline_fragment.casted_type(); + let rebased_selection_set = + selection_set.rebase_on(&rebased_casted_type, named_fragments, schema)?; + Ok(Some(SelectionOrSet::Selection(Selection::InlineFragment( + Arc::new(InlineFragmentSelection::new( + rebased_inline_fragment, + rebased_selection_set, + )), + )))) + } + } +} + +impl SelectionSet { + /// Simplify this selection set in the context of the provided `parent_type`. + /// + /// This removes unnecessary/redundant inline fragments, so that for instance, with a schema: + /// ```graphql + /// type Query { + /// t1: T1 + /// i: I + /// } + /// + /// interface I { + /// id: ID! + /// } + /// + /// type T1 implements I { + /// id: ID! + /// v1: Int + /// } + /// + /// type T2 implements I { + /// id: ID! + /// v2: Int + /// } + /// ``` + /// We can perform following simplification: + /// ```graphql + /// flatten_unnecessary_fragments({ + /// t1 { + /// ... on I { + /// id + /// } + /// } + /// i { + /// ... on T1 { + /// ... on I { + /// ... on T1 { + /// v1 + /// } + /// ... on T2 { + /// v2 + /// } + /// } + /// } + /// ... on T2 { + /// ... on I { + /// id + /// } + /// } + /// } + /// }) === { + /// t1 { + /// id + /// } + /// i { + /// ... on T1 { + /// v1 + /// } + /// ... on T2 { + /// id + /// } + /// } + /// } + /// ``` + /// + /// For this operation to be valid (to not throw), `parent_type` must be such that every field selection in + /// this selection set is such that its type position intersects with passed `parent_type` (there is no limitation + /// on the fragment selections, though any fragment selections whose condition do not intersects `parent_type` + /// will be discarded). Note that `self.flatten_unnecessary_fragments(self.type_condition)` is always valid and useful, but it is + /// also possible to pass a `parent_type` that is more "restrictive" than the selection current type position + /// (as long as the top-level fields of this selection set can be rebased on that type). + /// + // PORT_NOTE: this is now module-private, because it looks like it *can* be. If some place + // outside this module *does* need it, feel free to mark it pub(crate). + // PORT_NOTE: in JS, this was called "normalize". + // PORT_NOTE: in JS, this had a `recursive: false` flag, which would only apply the + // simplification at the top level. This appears to be unused. + pub(super) fn flatten_unnecessary_fragments( + &self, + parent_type: &CompositeTypeDefinitionPosition, + named_fragments: &NamedFragments, + schema: &ValidFederationSchema, + ) -> Result { + let mut normalized_selections = Self { + schema: schema.clone(), + type_position: parent_type.clone(), + selections: Default::default(), // start empty + }; + for selection in self.selections.values() { + if let Some(selection_or_set) = + selection.flatten_unnecessary_fragments(parent_type, named_fragments, schema)? + { + match selection_or_set { + SelectionOrSet::Selection(normalized_selection) => { + normalized_selections.add_local_selection(&normalized_selection)?; + } + SelectionOrSet::SelectionSet(normalized_set) => { + // Since the `selection` has been expanded/lifted, we use + // `add_selection_set_with_fragments` to make sure it's rebased. + normalized_selections + .add_selection_set_with_fragments(&normalized_set, named_fragments)?; + } + } + } + } + Ok(normalized_selections) + } +} diff --git a/apollo-federation/src/operation/tests/mod.rs b/apollo-federation/src/operation/tests/mod.rs index 59b2355ff6..aac5dabbb6 100644 --- a/apollo-federation/src/operation/tests/mod.rs +++ b/apollo-federation/src/operation/tests/mod.rs @@ -1365,8 +1365,8 @@ mod lazy_map_tests { fn field_element( schema: &ValidFederationSchema, - object: apollo_compiler::schema::Name, - field: apollo_compiler::schema::Name, + object: apollo_compiler::Name, + field: apollo_compiler::Name, ) -> OpPathElement { OpPathElement::Field(super::Field::new(super::FieldData { schema: schema.clone(), diff --git a/apollo-federation/src/query_graph/build_query_graph.rs b/apollo-federation/src/query_graph/build_query_graph.rs index 67383512b7..972a2046a0 100644 --- a/apollo-federation/src/query_graph/build_query_graph.rs +++ b/apollo-federation/src/query_graph/build_query_graph.rs @@ -2,9 +2,8 @@ use std::sync::Arc; use apollo_compiler::schema::DirectiveList as ComponentDirectiveList; use apollo_compiler::schema::ExtendedType; -use apollo_compiler::schema::Name; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use apollo_compiler::Schema; use indexmap::IndexMap; use indexmap::IndexSet; @@ -60,7 +59,7 @@ pub fn build_federated_query_graph( let for_query_planning = for_query_planning.unwrap_or(true); let mut query_graph = QueryGraph { // Note this name is a dummy initial name that gets overridden as we build the query graph. - current_source: NodeStr::new(""), + current_source: "".into(), graph: Default::default(), sources: Default::default(), subgraphs_by_name: Default::default(), @@ -73,7 +72,7 @@ pub fn build_federated_query_graph( for (subgraph_name, subgraph) in subgraphs { let builder = SchemaQueryGraphBuilder::new( query_graph, - NodeStr::new(&subgraph_name), + subgraph_name, subgraph.schema, Some(api_schema.clone()), for_query_planning, @@ -89,12 +88,12 @@ pub fn build_federated_query_graph( /// /// Assumes the given schemas have been validated. pub fn build_query_graph( - name: NodeStr, + name: Arc, schema: ValidFederationSchema, ) -> Result { let mut query_graph = QueryGraph { // Note this name is a dummy initial name that gets overridden as we build the query graph. - current_source: NodeStr::new(""), + current_source: "".into(), graph: Default::default(), sources: Default::default(), subgraphs_by_name: Default::default(), @@ -112,7 +111,7 @@ struct BaseQueryGraphBuilder { } impl BaseQueryGraphBuilder { - fn new(mut query_graph: QueryGraph, source: NodeStr, schema: ValidFederationSchema) -> Self { + fn new(mut query_graph: QueryGraph, source: Arc, schema: ValidFederationSchema) -> Self { query_graph.current_source = source.clone(); query_graph.sources.insert(source.clone(), schema); query_graph @@ -242,7 +241,7 @@ impl SchemaQueryGraphBuilder { /// a subgraph query graph is being built. fn new( query_graph: QueryGraph, - source: NodeStr, + source: Arc, schema: ValidFederationSchema, api_schema: Option, for_query_planning: bool, @@ -963,7 +962,7 @@ impl FederatedQueryGraphBuilder { ) -> Result { let base = BaseQueryGraphBuilder::new( query_graph, - NodeStr::new(FEDERATED_GRAPH_ROOT_SOURCE), + FEDERATED_GRAPH_ROOT_SOURCE.into(), // This is a dummy schema that should never be used, so it's fine if we assume validity // here (note that empty schemas have no Query type, making them invalid GraphQL). ValidFederationSchema::new(Valid::assume_valid(Schema::new()))?, @@ -1153,7 +1152,7 @@ impl FederatedQueryGraphBuilder { let conditions = Arc::new(parse_field_set( schema, type_pos.type_name().clone(), - &application.fields, + application.fields, )?); // Note that each subgraph has a key edge to itself (when head == tail below). @@ -1278,7 +1277,7 @@ impl FederatedQueryGraphBuilder { implementation_type_in_other_subgraph_pos .type_name() .clone(), - &application.fields, + application.fields, ) else { // Ignored on purpose: it just means the key is not usable on this // subgraph. @@ -1343,7 +1342,7 @@ impl FederatedQueryGraphBuilder { let conditions = parse_field_set( schema, field_definition_position.parent().type_name().clone(), - &application.fields, + application.fields, )?; all_conditions.push(conditions); } @@ -1408,7 +1407,7 @@ impl FederatedQueryGraphBuilder { let conditions = parse_field_set( schema, field_type_pos.type_name().clone(), - &application.fields, + application.fields, )?; all_conditions.push(conditions); } @@ -1458,7 +1457,7 @@ impl FederatedQueryGraphBuilder { fn add_provides_edges( base: &mut BaseQueryGraphBuilder, - source: &NodeStr, + source: &Arc, head: NodeIndex, provided: &SelectionSet, provide_id: u32, @@ -1484,7 +1483,7 @@ impl FederatedQueryGraphBuilder { return None; }; if field_definition_position.field_name() - == field_selection.field.data().name() + == field_selection.field.name() { Some((edge_ref.id(), edge_ref.target())) } else { @@ -1517,9 +1516,8 @@ impl FederatedQueryGraphBuilder { // fix this below by filtering by provide_id. let field = field_selection .field - .data() .field_position - .get(field_selection.field.data().schema.schema())?; + .get(field_selection.field.schema.schema())?; let tail_type = field.ty.inner_named_type(); let possible_tails = base .query_graph @@ -1561,7 +1559,6 @@ impl FederatedQueryGraphBuilder { source: source.clone(), field_definition_position: field_selection .field - .data() .field_position .clone(), is_part_of_provides: true, @@ -1578,7 +1575,6 @@ impl FederatedQueryGraphBuilder { Selection::InlineFragment(inline_fragment_selection) => { if let Some(type_condition_pos) = &inline_fragment_selection .inline_fragment - .data() .type_condition_position { // We should always have an edge: otherwise it would mean we list a type @@ -1951,7 +1947,7 @@ impl FederatedQueryGraphBuilder { const FEDERATED_GRAPH_ROOT_SOURCE: &str = "_"; struct FederatedQueryGraphBuilderSubgraphs { - map: IndexMap, + map: IndexMap, FederatedQueryGraphBuilderSubgraphData>, } impl FederatedQueryGraphBuilderSubgraphs { @@ -2038,11 +2034,11 @@ impl QueryGraphEdgeData { } } -fn resolvable_key_applications( - directives: &ComponentDirectiveList, +fn resolvable_key_applications<'doc>( + directives: &'doc ComponentDirectiveList, key_directive_definition_name: &Name, federation_spec_definition: &'static FederationSpecDefinition, -) -> Result, FederationError> { +) -> Result>, FederationError> { let mut applications = Vec::new(); for directive in directives.get_all(key_directive_definition_name) { let key_directive_application = @@ -2058,8 +2054,7 @@ fn resolvable_key_applications( #[cfg(test)] mod tests { use apollo_compiler::name; - use apollo_compiler::schema::Name; - use apollo_compiler::NodeStr; + use apollo_compiler::Name; use apollo_compiler::Schema; use indexmap::IndexMap; use indexmap::IndexSet; @@ -2080,12 +2075,12 @@ mod tests { use crate::schema::position::SchemaRootDefinitionKind; use crate::schema::ValidFederationSchema; - const SCHEMA_NAME: NodeStr = NodeStr::from_static(&"test"); + const SCHEMA_NAME: &str = "test"; fn test_query_graph_from_schema_sdl(sdl: &str) -> Result { let schema = ValidFederationSchema::new(Schema::parse_and_validate(sdl, "schema.graphql")?)?; - build_query_graph(SCHEMA_NAME, schema) + build_query_graph(SCHEMA_NAME.into(), schema) } fn assert_node_type( @@ -2098,7 +2093,7 @@ mod tests { *query_graph.node_weight(node)?, QueryGraphNode { type_: QueryGraphNodeType::SchemaType(output_type_definition_position), - source: SCHEMA_NAME, + source: SCHEMA_NAME.into(), has_reachable_cross_subgraph_edges: false, provide_id: None, root_kind, @@ -2137,7 +2132,7 @@ mod tests { let schema = query_graph.schema()?; field_pos.get(schema.schema())?; let expected_field_transition = QueryGraphEdgeTransition::FieldCollection { - source: SCHEMA_NAME, + source: SCHEMA_NAME.into(), field_definition_position: field_pos.clone().into(), is_part_of_provides: false, }; diff --git a/apollo-federation/src/query_graph/extract_subgraphs_from_supergraph.rs b/apollo-federation/src/query_graph/extract_subgraphs_from_supergraph.rs index 973681a5ad..9a4f6f72f0 100644 --- a/apollo-federation/src/query_graph/extract_subgraphs_from_supergraph.rs +++ b/apollo-federation/src/query_graph/extract_subgraphs_from_supergraph.rs @@ -2,6 +2,7 @@ use std::collections::BTreeMap; use std::fmt; use std::fmt::Write; use std::ops::Deref; +use std::sync::Arc; use apollo_compiler::ast::FieldDefinition; use apollo_compiler::executable; @@ -19,7 +20,6 @@ use apollo_compiler::schema::ExtensionId; use apollo_compiler::schema::InputObjectType; use apollo_compiler::schema::InputValueDefinition; use apollo_compiler::schema::InterfaceType; -use apollo_compiler::schema::Name; use apollo_compiler::schema::NamedType; use apollo_compiler::schema::ObjectType; use apollo_compiler::schema::ScalarType; @@ -27,8 +27,8 @@ use apollo_compiler::schema::SchemaBuilder; use apollo_compiler::schema::Type; use apollo_compiler::schema::UnionType; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use indexmap::IndexMap; use indexmap::IndexSet; use lazy_static::lazy_static; @@ -169,7 +169,7 @@ pub(crate) fn extract_subgraphs_from_supergraph( type CollectEmptySubgraphsOk = ( FederationSubgraphs, IndexMap, - IndexMap, + IndexMap>, ); fn collect_empty_subgraphs( supergraph_schema: &FederationSchema, @@ -193,8 +193,8 @@ fn collect_empty_subgraphs( })?; let graph_arguments = join_spec_definition.graph_directive_arguments(graph_application)?; let subgraph = FederationSubgraph { - name: graph_arguments.name.as_str().to_owned(), - url: graph_arguments.url.as_str().to_owned(), + name: graph_arguments.name.to_owned(), + url: graph_arguments.url.to_owned(), schema: new_empty_fed_2_subgraph_schema()?, }; let federation_link = &subgraph @@ -213,7 +213,7 @@ fn collect_empty_subgraphs( })?; subgraphs.add(subgraph)?; graph_enum_value_name_to_subgraph_name - .insert(enum_value_name.clone(), graph_arguments.name); + .insert(enum_value_name.clone(), graph_arguments.name.into()); federation_spec_definitions.insert(enum_value_name.clone(), federation_spec_definition); } Ok(( @@ -264,7 +264,7 @@ pub(crate) fn new_empty_fed_2_subgraph_schema() -> Result, + graph_enum_value_name_to_subgraph_name: &IndexMap>, federation_spec_definitions: &IndexMap, join_spec_definition: &'static JoinSpecDefinition, filtered_types: &Vec, @@ -418,7 +418,7 @@ fn extract_subgraphs_from_fed_2_supergraph( fn add_all_empty_subgraph_types( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, federation_spec_definitions: &IndexMap, join_spec_definition: &'static JoinSpecDefinition, filtered_types: &Vec, @@ -493,7 +493,7 @@ fn add_empty_type( type_definition_position: TypeDefinitionPosition, type_directive_applications: &Vec, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, federation_spec_definitions: &IndexMap, ) -> Result { // In fed2, we always mark all types with `@join__type` but making sure. @@ -659,7 +659,7 @@ fn add_empty_type( if let Some(key) = &type_directive_application.key { let mut key_directive = Component::new(federation_spec_definition.key_directive( &subgraph.schema, - key.clone(), + key, type_directive_application.resolvable, )?); if type_directive_application.extension { @@ -701,7 +701,7 @@ fn add_empty_type( fn extract_object_type_content( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, federation_spec_definitions: &IndexMap, join_spec_definition: &JoinSpecDefinition, info: &[TypeInfo], @@ -845,7 +845,7 @@ fn extract_object_type_content( fn extract_interface_type_content( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, federation_spec_definitions: &IndexMap, join_spec_definition: &JoinSpecDefinition, info: &[TypeInfo], @@ -936,17 +936,13 @@ fn extract_interface_type_content( ObjectOrInterfaceTypeDefinitionPosition::Object(pos) => { pos.insert_implements_interface( &mut subgraph.schema, - ComponentName::from(Name::new( - &implements_directive_application.interface, - )?), + ComponentName::from(Name::new(implements_directive_application.interface)?), )?; } ObjectOrInterfaceTypeDefinitionPosition::Interface(pos) => { pos.insert_implements_interface( &mut subgraph.schema, - ComponentName::from(Name::new( - &implements_directive_application.interface, - )?), + ComponentName::from(Name::new(implements_directive_application.interface)?), )?; } } @@ -1036,7 +1032,7 @@ fn extract_interface_type_content( fn extract_union_type_content( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, join_spec_definition: &JoinSpecDefinition, info: &[TypeInfo], ) -> Result<(), FederationError> { @@ -1116,7 +1112,7 @@ fn extract_union_type_content( // broken @join__unionMember). pos.insert_member( &mut subgraph.schema, - ComponentName::from(Name::new(&union_member_directive_application.member)?), + ComponentName::from(Name::new(union_member_directive_application.member)?), )?; } } @@ -1128,7 +1124,7 @@ fn extract_union_type_content( fn extract_enum_type_content( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, join_spec_definition: &JoinSpecDefinition, info: &[TypeInfo], ) -> Result<(), FederationError> { @@ -1212,7 +1208,7 @@ fn extract_enum_type_content( fn extract_input_object_type_content( supergraph_schema: &FederationSchema, subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, join_spec_definition: &JoinSpecDefinition, info: &[TypeInfo], ) -> Result<(), FederationError> { @@ -1303,6 +1299,7 @@ fn add_subgraph_field( type_: None, external: None, override_: None, + override_label: None, user_overridden: None, }); let subgraph_field_type = match &field_directive_application.type_ { @@ -1332,12 +1329,14 @@ fn add_subgraph_field( } if let Some(requires) = &field_directive_application.requires { subgraph_field.directives.push(Node::new( - federation_spec_definition.requires_directive(&subgraph.schema, requires.clone())?, + federation_spec_definition + .requires_directive(&subgraph.schema, requires.to_string())?, )); } if let Some(provides) = &field_directive_application.provides { subgraph_field.directives.push(Node::new( - federation_spec_definition.provides_directive(&subgraph.schema, provides.clone())?, + federation_spec_definition + .provides_directive(&subgraph.schema, provides.to_string())?, )); } let external = field_directive_application.external.unwrap_or(false); @@ -1350,13 +1349,17 @@ fn add_subgraph_field( if user_overridden { subgraph_field.directives.push(Node::new( federation_spec_definition - .external_directive(&subgraph.schema, Some(NodeStr::new("[overridden]")))?, + .external_directive(&subgraph.schema, Some("[overridden]".to_string()))?, )); } if let Some(override_) = &field_directive_application.override_ { - subgraph_field.directives.push(Node::new( - federation_spec_definition.override_directive(&subgraph.schema, override_.clone())?, - )); + subgraph_field + .directives + .push(Node::new(federation_spec_definition.override_directive( + &subgraph.schema, + override_.to_string(), + &field_directive_application.override_label, + )?)); } if is_shareable && !external && !user_overridden { subgraph_field.directives.push(Node::new( @@ -1390,6 +1393,7 @@ fn add_subgraph_input_field( type_: None, external: None, override_: None, + override_label: None, user_overridden: None, }); let subgraph_input_field_type = match &field_directive_application.type_ { @@ -1422,7 +1426,7 @@ fn decode_type(type_: &str) -> Result { fn get_subgraph<'subgraph>( subgraphs: &'subgraph mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap, + graph_enum_value_name_to_subgraph_name: &IndexMap>, graph_enum_value: &Name, ) -> Result<&'subgraph mut FederationSubgraph, FederationError> { let subgraph_name = graph_enum_value_name_to_subgraph_name @@ -1500,7 +1504,7 @@ pub struct ValidFederationSubgraph { } pub struct ValidFederationSubgraphs { - subgraphs: BTreeMap, + subgraphs: BTreeMap, ValidFederationSubgraph>, } impl fmt::Debug for ValidFederationSubgraphs { @@ -1518,13 +1522,14 @@ impl ValidFederationSubgraphs { } pub(crate) fn add(&mut self, subgraph: ValidFederationSubgraph) -> Result<(), FederationError> { - if self.subgraphs.contains_key(&subgraph.name) { + if self.subgraphs.contains_key(subgraph.name.as_str()) { return Err(SingleFederationError::InvalidFederationSupergraph { message: format!("A subgraph named \"{}\" already exists", subgraph.name), } .into()); } - self.subgraphs.insert(subgraph.name.clone(), subgraph); + self.subgraphs + .insert(subgraph.name.as_str().into(), subgraph); Ok(()) } @@ -1534,8 +1539,8 @@ impl ValidFederationSubgraphs { } impl IntoIterator for ValidFederationSubgraphs { - type Item = as IntoIterator>::Item; - type IntoIter = as IntoIterator>::IntoIter; + type Item = , ValidFederationSubgraph> as IntoIterator>::Item; + type IntoIter = , ValidFederationSubgraph> as IntoIterator>::IntoIter; fn into_iter(self) -> Self::IntoIter { self.subgraphs.into_iter() @@ -1907,14 +1912,14 @@ fn remove_inactive_applications( let mut fields = parse_field_set_without_normalization( valid_schema, parent_type_pos.type_name().clone(), - &fields, + fields, )?; let is_modified = remove_non_external_leaf_fields(schema, &mut fields)?; if is_modified { let replacement_directive = if fields.selections.is_empty() { None } else { - let fields = NodeStr::from(fields.serialize().no_indent().to_string()); + let fields = fields.serialize().no_indent().to_string(); Some(Node::new(match directive_kind { FieldSetDirectiveKind::Provides => { federation_spec_definition.provides_directive(schema, fields)? diff --git a/apollo-federation/src/query_graph/graph_path.rs b/apollo-federation/src/query_graph/graph_path.rs index 2af0b4259d..7be5026d17 100644 --- a/apollo-federation/src/query_graph/graph_path.rs +++ b/apollo-federation/src/query_graph/graph_path.rs @@ -10,7 +10,6 @@ use std::sync::Arc; use apollo_compiler::ast::Value; use apollo_compiler::executable::DirectiveList; -use apollo_compiler::NodeStr; use indexmap::IndexMap; use indexmap::IndexSet; use petgraph::graph::EdgeIndex; @@ -31,7 +30,6 @@ use crate::operation::FieldData; use crate::operation::HasSelectionKey; use crate::operation::InlineFragment; use crate::operation::InlineFragmentData; -use crate::operation::RebaseErrorHandlingOption; use crate::operation::SelectionId; use crate::operation::SelectionKey; use crate::operation::SelectionSet; @@ -299,8 +297,8 @@ impl HasSelectionKey for OpPathElement { impl OpPathElement { pub(crate) fn directives(&self) -> &Arc { match self { - OpPathElement::Field(field) => &field.data().directives, - OpPathElement::InlineFragment(inline_fragment) => &inline_fragment.data().directives, + OpPathElement::Field(field) => &field.directives, + OpPathElement::InlineFragment(inline_fragment) => &inline_fragment.directives, } } @@ -313,7 +311,7 @@ impl OpPathElement { pub(crate) fn is_terminal(&self) -> Result { match self { - OpPathElement::Field(field) => field.data().is_leaf(), + OpPathElement::Field(field) => field.is_leaf(), OpPathElement::InlineFragment(_) => Ok(false), } } @@ -327,8 +325,8 @@ impl OpPathElement { pub(crate) fn parent_type_position(&self) -> CompositeTypeDefinitionPosition { match self { - OpPathElement::Field(field) => field.data().field_position.parent(), - OpPathElement::InlineFragment(inline) => inline.data().parent_type_position.clone(), + OpPathElement::Field(field) => field.field_position.parent(), + OpPathElement::InlineFragment(inline) => inline.parent_type_position.clone(), } } @@ -336,8 +334,8 @@ impl OpPathElement { &self, ) -> Result, FederationError> { match self { - OpPathElement::Field(field) => Ok(field.data().output_base_type()?.try_into().ok()), - OpPathElement::InlineFragment(inline) => Ok(Some(inline.data().casted_type())), + OpPathElement::Field(field) => Ok(field.output_base_type()?.try_into().ok()), + OpPathElement::InlineFragment(inline) => Ok(Some(inline.casted_type())), } } @@ -399,11 +397,9 @@ impl OpPathElement { pub(crate) fn defer_directive_args(&self) -> Option { match self { OpPathElement::Field(_) => None, // @defer cannot be on field at the moment - OpPathElement::InlineFragment(inline_fragment) => inline_fragment - .data() - .defer_directive_arguments() - .ok() - .flatten(), + OpPathElement::InlineFragment(inline_fragment) => { + inline_fragment.defer_directive_arguments().ok().flatten() + } } } @@ -417,17 +413,16 @@ impl OpPathElement { Self::Field(_) => Some(self.clone()), // unchanged Self::InlineFragment(inline_fragment) => { let updated_directives: DirectiveList = inline_fragment - .data() .directives .get_all("defer") .cloned() .collect(); - if inline_fragment.data().type_condition_position.is_none() + if inline_fragment.type_condition_position.is_none() && updated_directives.is_empty() { return None; } - if inline_fragment.data().directives.len() == updated_directives.len() { + if inline_fragment.directives.len() == updated_directives.len() { Some(self.clone()) } else { // PORT_NOTE: We won't need to port `this.copyAttachementsTo(updated);` line here @@ -439,25 +434,17 @@ impl OpPathElement { } } - pub(crate) fn rebase_on_or_error( + pub(crate) fn rebase_on( &self, parent_type: &CompositeTypeDefinitionPosition, schema: &ValidFederationSchema, ) -> Result { - let result: Option = match self { - OpPathElement::Field(field) => field - .rebase_on(parent_type, schema, RebaseErrorHandlingOption::ThrowError) - .map(|val| val.map(Into::into)), - OpPathElement::InlineFragment(inline) => inline - .rebase_on(parent_type, schema, RebaseErrorHandlingOption::ThrowError) - .map(|val| val.map(Into::into)), - }?; - result.ok_or_else(|| { - FederationError::internal(format!( - "Cannot rebase operation element {} on {}", - self, parent_type - )) - }) + match self { + OpPathElement::Field(field) => Ok(field.rebase_on(parent_type, schema)?.into()), + OpPathElement::InlineFragment(inline) => { + Ok(inline.rebase_on(parent_type, schema)?.into()) + } + } } } @@ -590,17 +577,19 @@ pub(crate) struct SimultaneousPathsWithLazyIndirectPaths { /// 2-3 max; even in completely unrealistic cases, it's hard bounded by the number of subgraphs), so /// a `Vec` is going to perform a lot better than `IndexSet` in practice. #[derive(Debug, Clone)] -pub(crate) struct ExcludedDestinations(Arc>); +pub(crate) struct ExcludedDestinations(Arc>>); impl ExcludedDestinations { - fn is_excluded(&self, destination: &NodeStr) -> bool { - self.0.contains(destination) + fn is_excluded(&self, destination: &str) -> bool { + self.0 + .iter() + .any(|excluded| excluded.as_ref() == destination) } - fn add_excluded(&self, destination: NodeStr) -> Self { - if !self.is_excluded(&destination) { + fn add_excluded(&self, destination: &Arc) -> Self { + if !self.is_excluded(destination) { let mut new = self.0.as_ref().clone(); - new.push(destination); + new.push(destination.clone()); Self(Arc::new(new)) } else { self.clone() @@ -693,7 +682,7 @@ impl OpIndirectPaths { field: &Field, ) -> Result { // We only handle leaves; Things are more complex for non-leaves. - if !field.data().is_leaf()? { + if !field.is_leaf()? { return Ok(self.clone()); } @@ -746,8 +735,8 @@ impl Display for Unadvanceables { #[derive(Debug, Clone)] struct Unadvanceable { reason: UnadvanceableReason, - from_subgraph: NodeStr, - to_subgraph: NodeStr, + from_subgraph: Arc, + to_subgraph: Arc, details: String, } @@ -942,7 +931,7 @@ where last_operation_element, )) = last_operation_element.as_ref() { - if last_operation_element.data().directives.is_empty() { + if last_operation_element.directives.is_empty() { // This mean we have 2 typecasts back-to-back, and that means the // previous operation element might not be useful on this path. More // precisely, the previous typecast was only useful if it restricted the @@ -1431,7 +1420,7 @@ where // be found). type BestPathInfo = Option<(Arc>, QueryPlanCost)>; - let mut best_path_by_source: IndexMap> = + let mut best_path_by_source: IndexMap, BestPathInfo> = IndexMap::new(); let dead_ends = vec![]; // Note that through `excluded` we avoid taking the same edge from multiple options. But @@ -1518,7 +1507,7 @@ where edge, condition_resolver, context, - &excluded_destinations.add_excluded(edge_tail_weight.source.clone()), + &excluded_destinations.add_excluded(&edge_tail_weight.source), excluded_conditions, )?; if let ConditionResolution::Satisfied { path_tree, cost } = condition_resolution { @@ -2218,7 +2207,7 @@ impl OpGraphPath { // PORT_NOTE: In the JS code, this method was a free-standing function called "anImplementationIsEntityWithFieldShareable". fn has_an_entity_implementation_with_shareable_field( &self, - source: &NodeStr, + source: &Arc, interface_field_pos: InterfaceFieldDefinitionPosition, ) -> Result { let fed_schema = self.graph.schema_by_source(source)?; @@ -2367,11 +2356,11 @@ impl OpGraphPath { // on triggers being valid within a subgraph. let mut operation_field = operation_field.clone(); if self.tail_is_interface_object()? - && *operation_field.data().field_position.type_name() + && *operation_field.field_position.type_name() != tail_type_pos.type_name { let field_on_tail_type = tail_type_pos - .field(operation_field.data().field_position.field_name().clone()); + .field(operation_field.field_position.field_name().clone()); if field_on_tail_type .try_get(self.graph.schema_by_source(&tail_weight.source)?.schema()) .is_none() @@ -2385,10 +2374,10 @@ impl OpGraphPath { operation_field = Field::new(FieldData { schema: self.graph.schema_by_source(&tail_weight.source)?.clone(), field_position: field_on_tail_type.into(), - alias: operation_field.data().alias.clone(), - arguments: operation_field.data().arguments.clone(), - directives: operation_field.data().directives.clone(), - sibling_typename: operation_field.data().sibling_typename.clone(), + alias: operation_field.alias.clone(), + arguments: operation_field.arguments.clone(), + directives: operation_field.directives.clone(), + sibling_typename: operation_field.sibling_typename.clone(), }) } @@ -2408,8 +2397,7 @@ impl OpGraphPath { // for a direct interface edge and simply cast into that implementation // below. let field_is_of_an_implementation = - *operation_field.data().field_position.type_name() - != tail_type_pos.type_name; + *operation_field.field_position.type_name() != tail_type_pos.type_name; // First, we check if there is a direct edge from the interface (which only // happens if we're in a subgraph that knows all of the implementations of @@ -2467,18 +2455,13 @@ impl OpGraphPath { // `direct_path_overrides_type_explosion` indicates that we're in // the 2nd case above, not the 1st one. operation_field - .data() .field_position .is_introspection_typename_field() || (!self.graph.is_provides_edge(*interface_edge)? && !self.graph.has_an_implementation_with_provides( &tail_weight.source, tail_type_pos.field( - operation_field - .data() - .field_position - .field_name() - .clone(), + operation_field.field_position.field_name().clone(), ), )?) } else { @@ -2498,14 +2481,12 @@ impl OpGraphPath { // if the direct edge cannot be satisfied? Probably depends on the exact // semantics of `@requires` on interface fields). let operation_field_type_name = operation_field - .data() .field_position - .get(operation_field.data().schema.schema())? + .get(operation_field.schema.schema())? .ty .inner_named_type(); let is_operation_field_type_leaf = matches!( operation_field - .data() .schema .get_type(operation_field_type_name.clone())?, TypeDefinitionPosition::Scalar(_) | TypeDefinitionPosition::Enum(_) @@ -2513,9 +2494,8 @@ impl OpGraphPath { if is_operation_field_type_leaf || !self.has_an_entity_implementation_with_shareable_field( &tail_weight.source, - tail_type_pos.field( - operation_field.data().field_position.field_name().clone(), - ), + tail_type_pos + .field(operation_field.field_position.field_name().clone()), )? { let Some(interface_path) = interface_path else { @@ -2537,14 +2517,14 @@ impl OpGraphPath { // that case, we only want to consider that one implementation. let implementations = if field_is_of_an_implementation { let CompositeTypeDefinitionPosition::Object(field_parent_pos) = - &operation_field.data().field_position.parent() + &operation_field.field_position.parent() else { return Err(FederationError::internal( format!( "{} requested on {}, but field's parent {} is not an object type", - operation_field.data().field_position, + operation_field.field_position, tail_type_pos, - operation_field.data().field_position.type_name() + operation_field.field_position.type_name() ) )); }; @@ -2552,9 +2532,9 @@ impl OpGraphPath { return Err(FederationError::internal( format!( "{} requested on {}, but field's parent {} is not an implementation type", - operation_field.data().field_position, + operation_field.field_position, tail_type_pos, - operation_field.data().field_position.type_name() + operation_field.field_position.type_name() ) )); } @@ -2693,7 +2673,6 @@ impl OpGraphPath { } OpPathElement::InlineFragment(operation_inline_fragment) => { let type_condition_name = operation_inline_fragment - .data() .type_condition_position .as_ref() .map(|pos| pos.type_name()) @@ -2704,16 +2683,14 @@ impl OpGraphPath { // on), it means we're essentially just applying some directives (could be a // `@skip`/`@include` for instance). This doesn't make us take any edge, but if // the operation element does has directives, we record it. - let fragment_path = if operation_inline_fragment.data().directives.is_empty() { + let fragment_path = if operation_inline_fragment.directives.is_empty() { self.clone() } else { self.add( operation_inline_fragment.clone().into(), None, ConditionResolution::no_conditions(), - operation_inline_fragment - .data() - .defer_directive_arguments()?, + operation_inline_fragment.defer_directive_arguments()?, )? }; return Ok((Some(vec![fragment_path.into()]), None)); @@ -2738,9 +2715,7 @@ impl OpGraphPath { operation_inline_fragment.clone().into(), Some(edge), ConditionResolution::no_conditions(), - operation_inline_fragment - .data() - .defer_directive_arguments()?, + operation_inline_fragment.defer_directive_arguments()?, )?; return Ok((Some(vec![fragment_path.into()]), None)); } @@ -2767,7 +2742,7 @@ impl OpGraphPath { type_condition_position: Some( implementation_type_pos.clone().into(), ), - directives: operation_inline_fragment.data().directives.clone(), + directives: operation_inline_fragment.directives.clone(), selection_id: SelectionId::new(), }); let implementation_options = @@ -2833,7 +2808,7 @@ impl OpGraphPath { // If the operation element has applied directives we need to // convert it to an inline fragment without type condition, // otherwise we ignore the fragment altogether. - if operation_inline_fragment.data().directives.is_empty() { + if operation_inline_fragment.directives.is_empty() { return Ok((Some(vec![self.clone().into()]), None)); } let operation_inline_fragment = @@ -2844,15 +2819,11 @@ impl OpGraphPath { .clone(), parent_type_position: tail_type_pos.clone().into(), type_condition_position: None, - directives: operation_inline_fragment - .data() - .directives - .clone(), + directives: operation_inline_fragment.directives.clone(), selection_id: SelectionId::new(), }); - let defer_directive_arguments = operation_inline_fragment - .data() - .defer_directive_arguments()?; + let defer_directive_arguments = + operation_inline_fragment.defer_directive_arguments()?; let fragment_path = self.add( operation_inline_fragment.into(), None, @@ -2897,9 +2868,7 @@ impl OpGraphPath { operation_inline_fragment.clone().into(), Some(fake_downcast_edge), condition_resolution, - operation_inline_fragment - .data() - .defer_directive_arguments()?, + operation_inline_fragment.defer_directive_arguments()?, )?; return Ok((Some(vec![fragment_path.into()]), None)); } @@ -3050,7 +3019,7 @@ impl Display for OpGraphPath { } None => write!(f, " ({}) ", self.edge_triggers[i].as_ref()), })?; - if let Some(label) = self.defer_on_tail.as_ref().and_then(|d| d.label()) { + if let Some(label) = self.defer_on_tail.as_ref().and_then(|d| d.label.as_ref()) { write!(f, "")?; } if !self.runtime_types_of_tail.is_empty() { @@ -3576,7 +3545,7 @@ impl OpPath { for element in &self.0 { match element.as_ref() { OpPathElement::InlineFragment(fragment) => { - if let Some(type_condition) = &fragment.data().type_condition_position { + if let Some(type_condition) = &fragment.type_condition_position { if schema.get_type(type_condition.type_name().clone()).is_err() { if element.directives().is_empty() { continue; // skip this element @@ -3677,8 +3646,8 @@ fn is_useless_followup_element( conditionals: &DirectiveList, ) -> Result { let type_of_first: Option = match first { - OpPathElement::Field(field) => Some(field.data().output_base_type()?.try_into()?), - OpPathElement::InlineFragment(fragment) => fragment.data().type_condition_position.clone(), + OpPathElement::Field(field) => Some(field.output_base_type()?.try_into()?), + OpPathElement::InlineFragment(fragment) => fragment.type_condition_position.clone(), }; let Some(type_of_first) = type_of_first else { @@ -3690,13 +3659,12 @@ fn is_useless_followup_element( return match followup { OpPathElement::Field(_) => Ok(false), OpPathElement::InlineFragment(fragment) => { - let Some(type_of_second) = fragment.data().type_condition_position.clone() else { + let Some(type_of_second) = fragment.type_condition_position.clone() else { return Ok(false); }; - let are_useless_directives = fragment.data().directives.is_empty() + let are_useless_directives = fragment.directives.is_empty() || fragment - .data() .directives .iter() .any(|d| !conditionals.contains(d)); @@ -3715,8 +3683,7 @@ mod tests { use std::sync::Arc; use apollo_compiler::executable::DirectiveList; - use apollo_compiler::schema::Name; - use apollo_compiler::NodeStr; + use apollo_compiler::Name; use apollo_compiler::Schema; use petgraph::stable_graph::EdgeIndex; use petgraph::stable_graph::NodeIndex; @@ -3748,7 +3715,7 @@ mod tests { "#; let schema = Schema::parse_and_validate(src, "./").unwrap(); let schema = ValidFederationSchema::new(schema).unwrap(); - let name = NodeStr::new("S1"); + let name = "S1".into(); let graph = build_query_graph(name, schema.clone()).unwrap(); let path = OpGraphPath::new(Arc::new(graph), NodeIndex::new(0)).unwrap(); // NOTE: in general GraphPath would be used against a federated supergraph which would have diff --git a/apollo-federation/src/query_graph/mod.rs b/apollo-federation/src/query_graph/mod.rs index 41efac1b67..29db17c129 100644 --- a/apollo-federation/src/query_graph/mod.rs +++ b/apollo-federation/src/query_graph/mod.rs @@ -3,9 +3,8 @@ use std::fmt::Formatter; use std::hash::Hash; use std::sync::Arc; -use apollo_compiler::schema::Name; use apollo_compiler::schema::NamedType; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use indexmap::IndexMap; use indexmap::IndexSet; use petgraph::graph::DiGraph; @@ -53,7 +52,7 @@ pub(crate) struct QueryGraphNode { pub(crate) type_: QueryGraphNodeType, /// An identifier of the underlying schema containing the `type_` this node points to. This is /// mainly used in federated query graphs, where the `source` is a subgraph name. - pub(crate) source: NodeStr, + pub(crate) source: Arc, /// True if there is a cross-subgraph edge that is reachable from this node. pub(crate) has_reachable_cross_subgraph_edges: bool, /// @provides works by creating duplicates of the node/type involved in the provides and adding @@ -109,7 +108,7 @@ impl TryFrom for CompositeTypeDefinitionPosition { fn try_from(value: QueryGraphNodeType) -> Result { match value { - QueryGraphNodeType::SchemaType(ty) => ty.try_into(), + QueryGraphNodeType::SchemaType(ty) => Ok(ty.try_into()?), QueryGraphNodeType::FederatedRootType(_) => Err(FederationError::internal(format!( r#"Type "{value}" was unexpectedly not a composite type"# ))), @@ -122,7 +121,7 @@ impl TryFrom for ObjectTypeDefinitionPosition { fn try_from(value: QueryGraphNodeType) -> Result { match value { - QueryGraphNodeType::SchemaType(ty) => ty.try_into(), + QueryGraphNodeType::SchemaType(ty) => Ok(ty.try_into()?), QueryGraphNodeType::FederatedRootType(_) => Err(FederationError::internal(format!( r#"Type "{value}" was unexpectedly not an object type"# ))), @@ -175,7 +174,7 @@ pub(crate) enum QueryGraphEdgeTransition { /// A field edge, going from (a node for) the field parent type to the field's (base) type. FieldCollection { /// The name of the schema containing the field. - source: NodeStr, + source: Arc, /// The object/interface field being collected. field_definition_position: FieldDefinitionPosition, /// Whether this field is part of an @provides. @@ -186,7 +185,7 @@ pub(crate) enum QueryGraphEdgeTransition { /// in common with it). Downcast { /// The name of the schema containing the from/to types. - source: NodeStr, + source: Arc, /// The parent type of the type condition, i.e. the type of the selection set containing /// the type condition. from_type_position: CompositeTypeDefinitionPosition, @@ -218,7 +217,7 @@ pub(crate) enum QueryGraphEdgeTransition { /// in which the corresponding edge will be found). InterfaceObjectFakeDownCast { /// The name of the schema containing the from type. - source: NodeStr, + source: Arc, /// The parent type of the type condition, i.e. the type of the selection set containing /// the type condition. from_type_position: CompositeTypeDefinitionPosition, @@ -276,24 +275,25 @@ pub struct QueryGraph { /// graph, this will only ever be one value, but it will change for "federated" query graphs /// while they're being built (and after construction, will become FEDERATED_GRAPH_ROOT_SOURCE, /// which is a reserved placeholder value). - current_source: NodeStr, + current_source: Arc, /// The nodes/edges of the query graph. Note that nodes/edges should never be removed, so /// indexes are immutable when a node/edge is created. graph: DiGraph, /// The sources on which the query graph was built, which is a set (potentially of size 1) of /// GraphQL schema keyed by the name identifying them. Note that the `source` strings in the /// nodes/edges of a query graph are guaranteed to be valid key in this map. - sources: IndexMap, + sources: IndexMap, ValidFederationSchema>, /// For federated query graphs, this is a map from subgraph names to their schemas. This is the /// same as `sources`, but is missing the dummy source FEDERATED_GRAPH_ROOT_SOURCE which isn't /// really a subgraph. - subgraphs_by_name: IndexMap, + subgraphs_by_name: IndexMap, ValidFederationSchema>, /// A map (keyed by source) that associates type names of the underlying schema on which this /// query graph was built to each of the nodes that points to a type of that name. Note that for /// a "federated" query graph source, each type name will only map to a single node. - types_to_nodes_by_source: IndexMap>>, + types_to_nodes_by_source: IndexMap, IndexMap>>, /// A map (keyed by source) that associates schema root kinds to root nodes. - root_kinds_to_nodes_by_source: IndexMap>, + root_kinds_to_nodes_by_source: + IndexMap, IndexMap>, /// Maps an edge to the possible edges that can follow it "productively", that is without /// creating a trivially inefficient path. /// @@ -320,7 +320,7 @@ pub struct QueryGraph { } impl QueryGraph { - pub(crate) fn name(&self) -> &str { + pub(crate) fn name(&self) -> &Arc { &self.current_source } @@ -400,11 +400,11 @@ impl QueryGraph { }) } - pub(crate) fn subgraph_schemas(&self) -> &IndexMap { + pub(crate) fn subgraph_schemas(&self) -> &IndexMap, ValidFederationSchema> { &self.subgraphs_by_name } - pub(crate) fn subgraphs(&self) -> impl Iterator { + pub(crate) fn subgraphs(&self) -> impl Iterator, &ValidFederationSchema)> { self.subgraphs_by_name.iter() } @@ -572,7 +572,7 @@ impl QueryGraph { let tail = edge_ref.target(); let tail_weight = self.node_weight(tail)?; - if tail_weight.source != to_subgraph { + if tail_weight.source.as_ref() != to_subgraph { continue; } @@ -628,7 +628,7 @@ impl QueryGraph { let selection = parse_field_set( subgraph_schema, composite_type_position.type_name().clone(), - &key_value.fields, + key_value.fields, )?; if !external_metadata.selects_any_external_field(&selection)? { return Ok(Some(selection)); @@ -649,7 +649,7 @@ impl QueryGraph { }; // We explicitly avoid comparing parent type's here, to allow interface object // fields to match operation fields with the same name but differing types. - if field.data().field_position.field_name() == field_definition_position.field_name() { + if field.field_position.field_name() == field_definition_position.field_name() { Some(edge_ref.id()) } else { None @@ -674,7 +674,7 @@ impl QueryGraph { node: NodeIndex, inline_fragment: &InlineFragment, ) -> Option { - let Some(type_condition_pos) = &inline_fragment.data().type_condition_position else { + let Some(type_condition_pos) = &inline_fragment.type_condition_position else { // No type condition means the type hasn't changed, meaning there is no edge to take. return None; }; @@ -720,7 +720,7 @@ impl QueryGraph { match op_path_element { OpPathElement::Field(field) => self.edge_for_field(node, field).map(Some), OpPathElement::InlineFragment(inline_fragment) => { - if inline_fragment.data().type_condition_position.is_some() { + if inline_fragment.type_condition_position.is_some() { self.edge_for_inline_fragment(node, inline_fragment) .map(Some) } else { @@ -851,14 +851,10 @@ impl QueryGraph { let ty = type_name.get(schema.schema())?; for key in ty.directives().get_all(&key_directive_definition.name) { - let Some(value) = key - .argument_by_name("fields") - .and_then(|arg| arg.as_node_str()) - .cloned() - else { + let Some(value) = key.argument_by_name("fields").and_then(|arg| arg.as_str()) else { continue; }; - let selection = parse_field_set(schema, ty.name().clone(), &value)?; + let selection = parse_field_set(schema, ty.name().clone(), value)?; let has_external = metadata .external_metadata() .selects_any_external_field(&selection)?; @@ -891,7 +887,7 @@ impl QueryGraph { pub(crate) fn has_an_implementation_with_provides( &self, - source: &NodeStr, + source: &Arc, interface_field_definition_position: InterfaceFieldDefinitionPosition, ) -> Result { let schema = self.schema_by_source(source)?; diff --git a/apollo-federation/src/query_graph/output.rs b/apollo-federation/src/query_graph/output.rs index 515979441c..b17390dd9d 100644 --- a/apollo-federation/src/query_graph/output.rs +++ b/apollo-federation/src/query_graph/output.rs @@ -2,8 +2,8 @@ // - Corresponds to the `graphviz` and `mermaid` modules from the JS federation. use std::fmt::Write; +use std::sync::Arc; -use apollo_compiler::NodeStr; use petgraph::dot::Config; use petgraph::dot::Dot; use petgraph::graph::DiGraph; @@ -53,7 +53,7 @@ pub fn to_dot(graph: &QueryGraph) -> String { fn to_dot_federated(graph: &QueryGraph) -> Result { fn edge_within_cluster( graph: &StableInnerGraph, - cluster_name: &NodeStr, + cluster_name: &Arc, edge_index: EdgeIndex, ) -> bool { graph.edge_endpoints(edge_index).is_some_and(|(n1, n2)| { @@ -127,7 +127,7 @@ fn to_dot_federated(graph: &QueryGraph) -> Result { // Supergraph nodes for i in stable_graph.node_indices() { let node = &stable_graph[i]; - if node.source == graph.name() { + if node.source == *graph.name() { writeln!(dot_str, " {} [{}]", i.index(), label_node(node))?; } } diff --git a/apollo-federation/src/query_graph/path_tree.rs b/apollo-federation/src/query_graph/path_tree.rs index 6eb2c9f675..02fbcf0ca7 100644 --- a/apollo-federation/src/query_graph/path_tree.rs +++ b/apollo-federation/src/query_graph/path_tree.rs @@ -3,7 +3,6 @@ use std::fmt::Formatter; use std::hash::Hash; use std::sync::Arc; -use apollo_compiler::NodeStr; use indexmap::map::Entry; use indexmap::IndexMap; use petgraph::graph::EdgeIndex; @@ -104,7 +103,7 @@ impl OpPathTree { self.is_all_in_same_subgraph_internal(&node_weight.source) } - fn is_all_in_same_subgraph_internal(&self, target: &NodeStr) -> Result { + fn is_all_in_same_subgraph_internal(&self, target: &Arc) -> Result { let node_weight = self.graph.node_weight(self.node)?; if node_weight.source != *target { return Ok(false); diff --git a/apollo-federation/src/query_plan/conditions.rs b/apollo-federation/src/query_plan/conditions.rs index 04e4d1a3ac..00b4b44543 100644 --- a/apollo-federation/src/query_plan/conditions.rs +++ b/apollo-federation/src/query_plan/conditions.rs @@ -2,8 +2,8 @@ use std::sync::Arc; use apollo_compiler::ast::Directive; use apollo_compiler::executable::DirectiveList; -use apollo_compiler::executable::Name; use apollo_compiler::executable::Value; +use apollo_compiler::Name; use apollo_compiler::Node; use indexmap::map::Entry; use indexmap::IndexMap; @@ -256,7 +256,7 @@ pub(crate) fn remove_unneeded_top_level_fragment_directives( selection_map.insert(selection.clone()); } Selection::InlineFragment(inline_fragment) => { - let fragment = inline_fragment.inline_fragment.data(); + let fragment = &inline_fragment.inline_fragment; if fragment.type_condition_position.is_none() { // if there is no type condition we should preserve the directive info selection_map.insert(selection.clone()); diff --git a/apollo-federation/src/query_plan/fetch_dependency_graph.rs b/apollo-federation/src/query_plan/fetch_dependency_graph.rs index 58110701c8..afb34c8b75 100644 --- a/apollo-federation/src/query_plan/fetch_dependency_graph.rs +++ b/apollo-federation/src/query_plan/fetch_dependency_graph.rs @@ -11,13 +11,12 @@ use apollo_compiler::ast::Argument; use apollo_compiler::ast::Directive; use apollo_compiler::ast::OperationType; use apollo_compiler::ast::Type; +use apollo_compiler::executable; use apollo_compiler::executable::VariableDefinition; -use apollo_compiler::executable::{self}; use apollo_compiler::name; -use apollo_compiler::schema::Name; -use apollo_compiler::schema::{self}; +use apollo_compiler::schema; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use indexmap::IndexMap; use indexmap::IndexSet; use itertools::Itertools; @@ -76,7 +75,7 @@ use crate::subgraph::spec::ANY_SCALAR_NAME; use crate::subgraph::spec::ENTITIES_QUERY; /// Represents the value of a `@defer(label:)` argument. -type DeferRef = NodeStr; +type DeferRef = String; /// Map of defer labels to nodes of the fetch dependency graph. type DeferredNodes = multimap::MultiMap>; @@ -90,7 +89,7 @@ type DeferredNodes = multimap::MultiMap>; #[derive(Debug, Clone)] pub(crate) struct FetchDependencyGraphNode { /// The subgraph this fetch is queried against. - pub(crate) subgraph_name: NodeStr, + pub(crate) subgraph_name: Arc, /// Which root operation kind the fetch should have. root_kind: SchemaRootDefinitionKind, /// The parent type of the fetch's selection set. For fetches against the root, this is the @@ -207,7 +206,7 @@ pub(crate) struct FetchDependencyGraph { graph: FetchDependencyGraphPetgraph, /// The root nodes by subgraph name, representing the fetches against root operation types of /// the subgraphs. - root_nodes_by_subgraph: IndexMap, + root_nodes_by_subgraph: IndexMap, NodeIndex>, /// Tracks metadata about deferred blocks and their dependencies on one another. pub(crate) defer_tracking: DeferTracking, /// The initial fetch ID generation (used when handling `@defer`). @@ -458,15 +457,9 @@ impl FetchDependencyGraphNodePath { ) -> Result, FederationError> { let mut new_path = self.response_path.clone(); if let OpPathElement::Field(field) = element { - new_path.push(FetchDataPathElement::Key( - field.data().response_name().into(), - )); + new_path.push(FetchDataPathElement::Key(field.response_name())); // TODO: is there a simpler we to find a field’s type from `&Field`? - let mut type_ = &field - .data() - .field_position - .get(field.data().schema.schema())? - .ty; + let mut type_ = &field.field_position.get(field.schema.schema())?.ty; loop { match type_ { schema::Type::Named(_) | schema::Type::NonNullNamed(_) => break, @@ -481,6 +474,16 @@ impl FetchDependencyGraphNodePath { } } +/// If the `iter` yields a single element, return it. Else return `None`. +fn iter_into_single_item(mut iter: impl Iterator) -> Option { + let item = iter.next()?; + if iter.next().is_none() { + Some(item) + } else { + None + } +} + impl FetchDependencyGraph { pub(crate) fn new( supergraph_schema: ValidFederationSchema, @@ -507,7 +510,7 @@ impl FetchDependencyGraph { pub(crate) fn root_node_by_subgraph_iter( &self, - ) -> impl Iterator { + ) -> impl Iterator, &NodeIndex)> { self.root_nodes_by_subgraph.iter() } @@ -520,7 +523,7 @@ impl FetchDependencyGraph { pub(crate) fn get_or_create_root_node( &mut self, - subgraph_name: &NodeStr, + subgraph_name: &Arc, root_kind: SchemaRootDefinitionKind, parent_type: CompositeTypeDefinitionPosition, ) -> Result { @@ -542,7 +545,7 @@ impl FetchDependencyGraph { fn new_root_type_node( &mut self, - subgraph_name: NodeStr, + subgraph_name: Arc, root_kind: SchemaRootDefinitionKind, parent_type: &ObjectTypeDefinitionPosition, merge_at: Option>, @@ -561,7 +564,7 @@ impl FetchDependencyGraph { pub(crate) fn new_node( &mut self, - subgraph_name: NodeStr, + subgraph_name: Arc, parent_type: CompositeTypeDefinitionPosition, has_inputs: bool, root_kind: SchemaRootDefinitionKind, @@ -631,7 +634,7 @@ impl FetchDependencyGraph { fn get_or_create_key_node( &mut self, - subgraph_name: &NodeStr, + subgraph_name: &Arc, merge_at: &[FetchDataPathElement], type_: &CompositeTypeDefinitionPosition, parent: ParentRelation, @@ -687,7 +690,7 @@ impl FetchDependencyGraph { fn new_key_node( &mut self, - subgraph_name: &NodeStr, + subgraph_name: &Arc, merge_at: Vec, defer_ref: Option, ) -> Result { @@ -883,9 +886,10 @@ impl FetchDependencyGraph { &self, type_name: &Name, ) -> Result { - self.supergraph_schema + Ok(self + .supergraph_schema .get_type(type_name.clone())? - .try_into() + .try_into()?) } /// Find redundant edges coming out of a node. See `remove_redundant_edges`. @@ -1138,19 +1142,17 @@ impl FetchDependencyGraph { let try_get_type_condition = |selection: &Selection| match selection { Selection::FragmentSpread(fragment) => { - Some(fragment.spread.data().type_condition_position.clone()) + Some(fragment.spread.type_condition_position.clone()) } - Selection::InlineFragment(inline) => inline - .inline_fragment - .data() - .type_condition_position - .clone(), + Selection::InlineFragment(inline) => { + inline.inline_fragment.type_condition_position.clone() + } _ => None, }; - let get_subgraph_schema = |subgraph_name: &NodeStr| { + let get_subgraph_schema = |subgraph_name: &Arc| { self.federated_query_graph .schema_by_source(subgraph_name) .map(|schema| schema.clone()) @@ -1503,7 +1505,7 @@ impl FetchDependencyGraph { if !node.selection_set.selection_set.selections.is_empty() { let id = *node.id.get_or_init(|| self.fetch_id_generation.next_id()); - defer_dependencies.push((child_defer_ref.clone(), format!("{id}").into())); + defer_dependencies.push((child_defer_ref.clone(), format!("{id}"))); } deferred_nodes.insert(child_defer_ref.clone(), child_index); } @@ -1820,16 +1822,29 @@ impl FetchDependencyGraph { sibling_id: NodeIndex, ) -> Result { let node = self.node_weight(node_id)?; - let own_parents: Vec = self.parents_relations_of(node_id).collect(); - let sibling = self.node_weight(sibling_id)?; - let sibling_parents: Vec = self.parents_relations_of(sibling_id).collect(); + + let own_parents_iter = self + .graph + .edges_directed(node_id, petgraph::Direction::Incoming); + let Some(own_parent_id) = iter_into_single_item(own_parents_iter).map(|node| node.source()) + else { + return Ok(false); + }; + + let sibling_parents_iter = self + .graph + .edges_directed(sibling_id, petgraph::Direction::Incoming); + let Some(sibling_parent_id) = + iter_into_single_item(sibling_parents_iter).map(|node| node.source()) + else { + return Ok(false); + }; + Ok(node.defer_ref == sibling.defer_ref && node.subgraph_name == sibling.subgraph_name && node.merge_at == sibling.merge_at - && own_parents.len() == 1 - && sibling_parents.len() == 1 - && own_parents[0].parent_node_id == sibling_parents[0].parent_node_id) + && own_parent_id == sibling_parent_id) } fn can_merge_grand_child_in( @@ -2124,7 +2139,7 @@ impl FetchDependencyGraph { for element in path.0.iter() { match &**element { OpPathElement::Field(field) => { - let field_position = type_.field(field.data().name().clone())?; + let field_position = type_.field(field.name().clone())?; let field_definition = field_position.get(schema.schema())?; let field_type = field_definition.ty.inner_named_type(); type_ = schema @@ -2141,8 +2156,7 @@ impl FetchDependencyGraph { )?; } OpPathElement::InlineFragment(fragment) => { - if let Some(type_condition_position) = &fragment.data().type_condition_position - { + if let Some(type_condition_position) = &fragment.type_condition_position { type_ = schema .get_type(type_condition_position.type_name().clone())? .try_into() @@ -2298,7 +2312,7 @@ impl FetchDependencyGraphNode { handled_conditions: &Conditions, variable_definitions: &[Node], fragments: Option<&mut RebasedFragments>, - operation_name: Option, + operation_name: Option, ) -> Result, FederationError> { if self.selection_set.selection_set.selections.is_empty() { return Ok(None); @@ -2337,7 +2351,7 @@ impl FetchDependencyGraphNode { if let Some(fragments) = fragments .map(|rebased| rebased.for_subgraph(self.subgraph_name.clone(), subgraph_schema)) { - operation.optimize(fragments)?; + operation.reuse_fragments(fragments)?; } let operation_document = operation.try_into()?; @@ -2501,7 +2515,7 @@ fn operation_for_entities_fetch( subgraph_schema: &ValidFederationSchema, selection_set: SelectionSet, all_variable_definitions: &[Node], - operation_name: &Option, + operation_name: &Option, ) -> Result { let mut variable_definitions: Vec> = Vec::with_capacity(all_variable_definitions.len() + 1); @@ -2575,7 +2589,7 @@ fn operation_for_entities_fetch( Ok(Operation { schema: subgraph_schema.clone(), root_kind: SchemaRootDefinitionKind::Query, - name: operation_name.clone().map(|n| n.try_into()).transpose()?, + name: operation_name.clone(), variables: Arc::new(variable_definitions), directives: Default::default(), selection_set, @@ -2588,7 +2602,7 @@ fn operation_for_query_fetch( root_kind: SchemaRootDefinitionKind, selection_set: SelectionSet, variable_definitions: &[Node], - operation_name: &Option, + operation_name: &Option, ) -> Result { let mut used_variables = HashSet::new(); selection_set.collect_variables(&mut used_variables)?; @@ -2601,7 +2615,7 @@ fn operation_for_query_fetch( Ok(Operation { schema: subgraph_schema.clone(), root_kind, - name: operation_name.clone().map(|n| n.try_into()).transpose()?, + name: operation_name.clone(), variables: Arc::new(variable_definitions), directives: Default::default(), selection_set, @@ -2829,7 +2843,8 @@ impl DeferTracking { }; let label = defer_args - .label() + .label + .as_ref() .expect("All @defer should have been labeled at this point"); let _deferred_block = self.deferred.entry(label.clone()).or_insert_with(|| { DeferredInfo::empty( @@ -3388,7 +3403,7 @@ fn compute_nodes_for_op_path_element<'a>( updated.node_path = require_path; } if let OpPathElement::Field(field) = &updated_operation { - if *field.data().name() == TYPENAME_FIELD { + if *field.name() == TYPENAME_FIELD { // Because of the optimization done in `QueryPlanner.optimizeSiblingTypenames`, // we will rarely get an explicit `__typename` edge here. // But one case where it can happen is where an @interfaceObject was involved, @@ -3438,7 +3453,7 @@ fn compute_nodes_for_op_path_element<'a>( "Unexpected operation {updated_operation} for edge {edge}" ))); }; - if !inline.data().directives.is_empty() { + if !inline.directives.is_empty() { // We want to keep the directives, but we clear the condition // since it's to a type that doesn't exists in the subgraph we're currently in. updated.node_path = updated @@ -3531,7 +3546,7 @@ fn wrap_input_selections( } } */ - let parent_type_position = fragment.data().parent_type_position.clone(); + let parent_type_position = fragment.parent_type_position.clone(); let selection = InlineFragmentSelection::new(fragment, sub_selections); SelectionSet::from_selection(parent_type_position, selection.into()) }, @@ -3550,8 +3565,8 @@ fn create_fetch_initial_path( // supergraph). Doing this make sure we can rely on things like checking subtyping between // the types of a given path. let rebased_type: CompositeTypeDefinitionPosition = supergraph_schema - .get_type(dest_type.type_name().clone()) - .and_then(|res| res.try_into())?; + .get_type(dest_type.type_name().clone())? + .try_into()?; Ok(Arc::new(wrap_selection_with_type_and_conditions( supergraph_schema, &rebased_type, @@ -3566,7 +3581,7 @@ fn create_fetch_initial_path( } fn compute_input_rewrites_on_key_fetch( - input_type_name: &NodeStr, + input_type_name: &Name, dest_type: &CompositeTypeDefinitionPosition, dest_schema: &ValidFederationSchema, ) -> Result>>, FederationError> { @@ -3581,7 +3596,7 @@ fn compute_input_rewrites_on_key_fetch( { // rewrite path: [ ... on , __typename ] let type_cond = FetchDataPathElement::TypenameEquals(input_type_name.clone()); - let typename_field_elem = FetchDataPathElement::Key(TYPENAME_FIELD.into()); + let typename_field_elem = FetchDataPathElement::Key(TYPENAME_FIELD); let rewrite = FetchDataRewrite::ValueSetter(FetchDataValueSetter { path: vec![type_cond, typename_field_elem], set_value_to: dest_type.type_name().to_string().into(), @@ -3615,7 +3630,7 @@ fn extract_defer_from_operation( return Ok((Some(operation.clone()), updated_context)); }; - let updated_defer_ref = defer_args.label().ok_or_else(|| + let updated_defer_ref = defer_args.label.as_ref().ok_or_else(|| // PORT_NOTE: The original TypeScript code has an assertion here. FederationError::internal( "All defers should have a label at this point", @@ -3680,9 +3695,7 @@ fn handle_requires( // the edge `0 --- 2` is removed (since the dependency of 2 on 0 is already provide transitively through 1). dependency_graph.reduce(); - let parents: Vec = dependency_graph - .parents_relations_of(fetch_node_id) - .collect(); + let single_parent = iter_into_single_item(dependency_graph.parents_relations_of(fetch_node_id)); // In general, we should do like for an edge, and create a new node _for the current subgraph_ // that depends on the created_nodes and have the created nodes depend on the current one. // However, we can be more efficient in general (and this is expected by the user) because @@ -3692,8 +3705,9 @@ fn handle_requires( // node we're coming from is our "direct parent", we can merge it to said direct parent (which // effectively means that the parent node will collect the provides before taking the edge // to our current node). - if parents.len() == 1 && fetch_node_path.path_in_node.has_only_fragments() { - let parent = &parents[0]; + if single_parent.is_some() && fetch_node_path.path_in_node.has_only_fragments() { + // Should do `if let` but it requires extra indentation. + let parent = single_parent.unwrap(); // We start by computing the nodes for the conditions. We do this using a copy of the current // node (with only the inputs) as that allows to modify this copy without modifying `node`. @@ -3747,7 +3761,7 @@ fn handle_requires( // Note: it is to be sure this test is not polluted by other things in `node` that we created `new_node`. dependency_graph.remove_inputs_from_selection(new_node_id)?; - let new_node_is_not_needed = dependency_graph.is_node_unneeded(new_node_id, parent)?; + let new_node_is_not_needed = dependency_graph.is_node_unneeded(new_node_id, &parent)?; let mut unmerged_node_ids: Vec = Vec::new(); if new_node_is_not_needed { // Up to this point, `new_node` had no parent, so let's first merge `new_node` to the parent, thus "rooting" diff --git a/apollo-federation/src/query_plan/fetch_dependency_graph_processor.rs b/apollo-federation/src/query_plan/fetch_dependency_graph_processor.rs index 1c45288f76..4fa0250f46 100644 --- a/apollo-federation/src/query_plan/fetch_dependency_graph_processor.rs +++ b/apollo-federation/src/query_plan/fetch_dependency_graph_processor.rs @@ -1,9 +1,8 @@ use std::collections::HashSet; -use apollo_compiler::ast::Name; use apollo_compiler::executable::VariableDefinition; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use crate::error::FederationError; use crate::operation::RebasedFragments; @@ -49,7 +48,7 @@ pub(crate) struct FetchDependencyGraphToQueryPlanProcessor { variable_definitions: Vec>, fragments: Option, operation_name: Option, - assigned_defer_labels: Option>, + assigned_defer_labels: Option>, counter: u32, } @@ -246,7 +245,7 @@ impl FetchDependencyGraphToQueryPlanProcessor { variable_definitions: Vec>, fragments: Option, operation_name: Option, - assigned_defer_labels: Option>, + assigned_defer_labels: Option>, ) -> Self { Self { variable_definitions, @@ -271,7 +270,8 @@ impl FetchDependencyGraphProcessor, DeferredDeferBlock> let counter = self.counter; self.counter += 1; let subgraph = to_valid_graphql_name(&node.subgraph_name).unwrap_or("".into()); - format!("{name}__{subgraph}__{counter}").into() + // `name` was already a valid name so this concatenation should be too + Name::new(&format!("{name}__{subgraph}__{counter}")).unwrap() }); node.to_plan_node( query_graph, diff --git a/apollo-federation/src/query_plan/mod.rs b/apollo-federation/src/query_plan/mod.rs index 9633d7cb19..162e563b3d 100644 --- a/apollo-federation/src/query_plan/mod.rs +++ b/apollo-federation/src/query_plan/mod.rs @@ -1,10 +1,9 @@ use std::sync::Arc; use apollo_compiler::executable; -use apollo_compiler::executable::Name; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use crate::query_plan::query_planner::QueryPlanningStatistics; @@ -58,7 +57,7 @@ pub enum PlanNode { #[derive(Debug, Clone, PartialEq)] pub struct FetchNode { - pub subgraph_name: NodeStr, + pub subgraph_name: Arc, /// Optional identifier for the fetch for defer support. All fetches of a given plan will be /// guaranteed to have a unique `id`. pub id: Option, @@ -73,7 +72,7 @@ pub struct FetchNode { // nodes are meant for direct consumption by router (without any serdes), so we leave the // question of whether it needs to be serialized to router. pub operation_document: Valid, - pub operation_name: Option, + pub operation_name: Option, pub operation_kind: executable::OperationType, /// Optionally describe a number of "rewrites" that query plan executors should apply to the /// data that is sent as the input of this fetch. Note that such rewrites should only impact the @@ -155,7 +154,7 @@ pub struct DeferredDeferBlock { /// this deferred part should not be started until all such fetches return. pub depends: Vec, /// The optional defer label. - pub label: Option, + pub label: Option, /// Path, in the query, to the `@defer` application this corresponds to. The `sub_selection` /// starts at this `query_path`. pub query_path: Vec, @@ -174,7 +173,7 @@ pub struct DeferredDeferBlock { #[derive(Debug, Clone, PartialEq)] pub struct DeferredDependency { /// A `FetchNode` ID. - pub id: NodeStr, + pub id: String, } #[derive(Debug, Clone, PartialEq)] @@ -210,7 +209,7 @@ pub struct FetchDataKeyRenamer { /// Path to the key that is renamed by this "rewrite". pub path: Vec, /// The key to rename to at `path`. - pub rename_key_to: NodeStr, + pub rename_key_to: Name, } /// Vectors of this element match path(s) to a value in fetch data. Each element is (1) a key in @@ -230,9 +229,9 @@ pub struct FetchDataKeyRenamer { /// elements. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FetchDataPathElement { - Key(NodeStr), + Key(Name), AnyIndex, - TypenameEquals(NodeStr), + TypenameEquals(Name), } /// Vectors of this element match a path in a query. Each element is (1) a field in a query, or (2) diff --git a/apollo-federation/src/query_plan/query_planner.rs b/apollo-federation/src/query_plan/query_planner.rs index 45102bcc62..f0df9fab42 100644 --- a/apollo-federation/src/query_plan/query_planner.rs +++ b/apollo-federation/src/query_plan/query_planner.rs @@ -2,10 +2,9 @@ use std::cell::Cell; use std::num::NonZeroU32; use std::sync::Arc; -use apollo_compiler::schema::Name; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use indexmap::IndexMap; use indexmap::IndexSet; use itertools::Itertools; @@ -187,7 +186,8 @@ pub struct QueryPlanner { federated_query_graph: Arc, supergraph_schema: ValidFederationSchema, api_schema: ValidFederationSchema, - subgraph_federation_spec_definitions: Arc>, + subgraph_federation_spec_definitions: + Arc, &'static FederationSpecDefinition>>, /// A set of the names of interface types for which at least one subgraph use an /// @interfaceObject to abstract that interface. interface_types_with_interface_objects: IndexSet, @@ -306,7 +306,7 @@ impl QueryPlanner { }) } - pub fn subgraph_schemas(&self) -> &IndexMap { + pub fn subgraph_schemas(&self) -> &IndexMap, ValidFederationSchema> { self.federated_query_graph.subgraph_schemas() } @@ -341,7 +341,7 @@ impl QueryPlanner { let node = FetchNode { subgraph_name: subgraph_name.clone(), operation_document: document.clone(), - operation_name: operation.name.as_deref().cloned(), + operation_name: operation.name.clone(), operation_kind: operation.operation_type, id: None, variable_usages: operation @@ -1313,4 +1313,51 @@ type User } "###); } + + #[test] + fn drop_operation_root_level_typename() { + let subgraph1 = Subgraph::parse_and_expand( + "Subgraph1", + "https://Subgraph1", + r#" + type Query { + t: T + } + + type T @key(fields: "id") { + id: ID! + x: Int + } + "#, + ) + .unwrap(); + let subgraphs = vec![&subgraph1]; + let supergraph = Supergraph::compose(subgraphs).unwrap(); + let planner = QueryPlanner::new(&supergraph, Default::default()).unwrap(); + let document = ExecutableDocument::parse_and_validate( + planner.api_schema().schema(), + r#" + query { + __typename + t { + x + } + } + "#, + "operation.graphql", + ) + .unwrap(); + let plan = planner.build_query_plan(&document, None).unwrap(); + insta::assert_snapshot!(plan, @r###" + QueryPlan { + Fetch(service: "Subgraph1") { + { + t { + x + } + } + }, + } + "###); + } } diff --git a/apollo-federation/src/query_plan/query_planning_traversal.rs b/apollo-federation/src/query_plan/query_planning_traversal.rs index 582d416701..07072a94c4 100644 --- a/apollo-federation/src/query_plan/query_planning_traversal.rs +++ b/apollo-federation/src/query_plan/query_planning_traversal.rs @@ -480,7 +480,7 @@ impl<'a: 'b, 'b> QueryPlanningTraversal<'a, 'b> { None => { let check_result = selection.any_element(&mut |element| match element { OpPathElement::InlineFragment(inline_fragment) => { - match &inline_fragment.data().type_condition_position { + match &inline_fragment.type_condition_position { Some(type_condition) => Ok(self .parameters .abstract_types_with_inconsistent_runtime_types diff --git a/apollo-federation/src/schema/definitions.rs b/apollo-federation/src/schema/definitions.rs index d8ed8014b6..7df65bfc2f 100644 --- a/apollo-federation/src/schema/definitions.rs +++ b/apollo-federation/src/schema/definitions.rs @@ -4,35 +4,8 @@ use apollo_compiler::Schema; use crate::error::FederationError; use crate::error::SingleFederationError; -use crate::schema::position::CompositeTypeDefinitionPosition; -use crate::schema::position::InterfaceTypeDefinitionPosition; -use crate::schema::position::TypeDefinitionPosition; -use crate::schema::position::UnionTypeDefinitionPosition; -#[derive(derive_more::From)] -pub(crate) enum AbstractType { - Interface(InterfaceTypeDefinitionPosition), - Union(UnionTypeDefinitionPosition), -} - -impl From for CompositeTypeDefinitionPosition { - fn from(value: AbstractType) -> Self { - match value { - AbstractType::Interface(x) => Self::Interface(x), - AbstractType::Union(x) => Self::Union(x), - } - } -} - -pub(crate) fn is_abstract_type(ty: TypeDefinitionPosition) -> bool { - matches!( - ty, - crate::schema::position::TypeDefinitionPosition::Interface(_) - | crate::schema::position::TypeDefinitionPosition::Union(_) - ) -} - -pub(crate) fn is_composite_type(ty: &NamedType, schema: &Schema) -> Result { +fn is_composite_type(ty: &NamedType, schema: &Schema) -> Result { Ok(matches!( schema .types diff --git a/apollo-federation/src/schema/field_set.rs b/apollo-federation/src/schema/field_set.rs index bd451999b0..bdc9f5d299 100644 --- a/apollo-federation/src/schema/field_set.rs +++ b/apollo-federation/src/schema/field_set.rs @@ -3,7 +3,6 @@ use apollo_compiler::executable::FieldSet; use apollo_compiler::schema::ExtendedType; use apollo_compiler::schema::NamedType; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; use apollo_compiler::Schema; use indexmap::IndexMap; @@ -33,7 +32,7 @@ fn check_absence_of_aliases( let OpPathElement::Field(field) = elem else { return Ok(()); }; - let Some(alias) = &field.data().alias else { + let Some(alias) = &field.alias else { return Ok(()); }; alias_errors.push(SingleFederationError::UnsupportedFeature { @@ -99,16 +98,12 @@ pub(crate) fn parse_field_set_without_normalization( pub(crate) fn collect_target_fields_from_field_set( schema: &Valid, parent_type_name: NamedType, - value: NodeStr, + value: &str, ) -> Result, FederationError> { // Note this parsing takes care of adding curly braces ("{" and "}") if they aren't in the // string. - let field_set = FieldSet::parse_and_validate( - schema, - parent_type_name, - value.as_str(), - "field_set.graphql", - )?; + let field_set = + FieldSet::parse_and_validate(schema, parent_type_name, value, "field_set.graphql")?; let mut stack = vec![&field_set.selection_set]; let mut fields = vec![]; while let Some(selection_set) = stack.pop() { @@ -193,7 +188,7 @@ pub(crate) fn add_interface_field_implementations( #[cfg(test)] mod tests { - use apollo_compiler::schema::Name; + use apollo_compiler::Name; use crate::error::FederationError; use crate::query_graph::build_federated_query_graph; @@ -243,9 +238,7 @@ mod tests { assert_eq!( err.to_string(), r#"The following errors occurred: - - Cannot use alias "r1" in "r1: r s q1: q": aliases are not currently supported in the used directive - - Cannot use alias "q1" in "r1: r s q1: q": aliases are not currently supported in the used directive"# ); Ok(()) diff --git a/apollo-federation/src/schema/mod.rs b/apollo-federation/src/schema/mod.rs index 585f4e99aa..70f51dd335 100644 --- a/apollo-federation/src/schema/mod.rs +++ b/apollo-federation/src/schema/mod.rs @@ -4,8 +4,8 @@ use std::ops::Deref; use std::sync::Arc; use apollo_compiler::schema::ExtendedType; -use apollo_compiler::schema::Name; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use apollo_compiler::Schema; use indexmap::IndexSet; use referencer::Referencers; @@ -135,6 +135,12 @@ impl FederationSchema { self.get_type(type_name).ok() } + /// Return the possible runtime types for a definition. + /// + /// For a union, the possible runtime types are its members. + /// For an interface, the possible runtime types are its implementers. + /// + /// Note this always allocates a set for the result. Avoid calling it frequently. pub(crate) fn possible_runtime_types( &self, composite_type_definition_position: CompositeTypeDefinitionPosition, diff --git a/apollo-federation/src/schema/position.rs b/apollo-federation/src/schema/position.rs index 47ac1d2c5e..5ede64c640 100644 --- a/apollo-federation/src/schema/position.rs +++ b/apollo-federation/src/schema/position.rs @@ -16,11 +16,11 @@ use apollo_compiler::schema::FieldDefinition; use apollo_compiler::schema::InputObjectType; use apollo_compiler::schema::InputValueDefinition; use apollo_compiler::schema::InterfaceType; -use apollo_compiler::schema::Name; use apollo_compiler::schema::ObjectType; use apollo_compiler::schema::ScalarType; use apollo_compiler::schema::SchemaDefinition; use apollo_compiler::schema::UnionType; +use apollo_compiler::Name; use apollo_compiler::Node; use apollo_compiler::Schema; use indexmap::IndexSet; @@ -46,6 +46,114 @@ use crate::schema::FederationSchema; pub(crate) trait Captures {} impl Captures for T {} +/// A zero-allocation error representation for position lookups, +/// because many of these errors are actually immediately discarded. +/// +/// This type does still incur a few atomic refcount increments/decrements. +/// Maybe that could be improved in the future by borrowing from the position values, +/// if necessary. +#[derive(Debug, thiserror::Error)] +pub(crate) enum PositionLookupError { + #[error("Schema has no directive `{0}`")] + DirectiveMissing(DirectiveDefinitionPosition), + #[error("Schema has no type `{0}`")] + TypeMissing(Name), + #[error("Schema type `{0}` is not {1}")] + TypeWrongKind(Name, &'static str), + #[error("{0} type `{1}` has no field `{2}`")] + MissingField(&'static str, Name, Name), + #[error("Directive `{}` has no argument `{}`", .0.directive_name, .0.argument_name)] + MissingDirectiveArgument(DirectiveArgumentDefinitionPosition), + #[error("{0} `{1}.{2}` has no argument `{3}`")] + MissingFieldArgument(&'static str, Name, Name, Name), + #[error("Enum type `{}` has no value `{}`", .0.type_name, .0.value_name)] + MissingValue(EnumValueDefinitionPosition), + #[error("Cannot mutate reserved {0} `{1}.{2}`")] + MutateReservedField(&'static str, Name, Name), +} + +impl From for FederationError { + fn from(value: PositionLookupError) -> Self { + FederationError::internal(value.to_string()) + } +} + +/// The error type returned when a position conversion fails. +#[derive(Debug, thiserror::Error)] +#[error("Type `{actual}` was unexpectedly not {expected}")] +pub(crate) struct PositionConvertError { + actual: T, + expected: &'static str, +} + +impl From> for FederationError { + fn from(value: PositionConvertError) -> Self { + FederationError::internal(value.to_string()) + } +} + +/// To declare a conversion for a `Position::Branch(T) -> T`: +/// ```no_compile +/// fallible_conversions!(TypeDefinition::Scalar -> ScalarTypeDefinition); +/// ``` +/// +/// To declare a conversion from one enum to another, with a different set of branches: +/// ```no_compile +/// fallible_conversions!(TypeDefinition::{Scalar, Enum, InputObject} -> InputObjectTypeDefinition) +/// ``` +macro_rules! fallible_conversions { + ( $from:ident :: $branch:ident -> $to:ident ) => { + impl TryFrom<$from> for $to { + type Error = PositionConvertError<$from>; + + fn try_from(value: $from) -> Result { + match value { + $from::$branch(value) => Ok(value), + _ => Err(PositionConvertError { + actual: value, + expected: $to::EXPECTED, + }), + } + } + } + }; + ( $from:ident :: { $($branch:ident),+ } -> $to:ident ) => { + impl TryFrom<$from> for $to { + type Error = PositionConvertError<$from>; + + fn try_from(value: $from) -> Result { + match value { + $( + $from::$branch(value) => Ok($to::$branch(value)), + )+ + _ => Err(PositionConvertError { + actual: value, + expected: $to::EXPECTED, + }), + } + } + } + } +} + +/// To declare a conversion from a type to a superset type: +/// ```no_compile +/// infallible_conversions!(InputObjectTypeDefinition::{Scalar, Enum, InputObject} -> TypeDefinition) +/// ``` +macro_rules! infallible_conversions { + ( $from:ident :: { $($branch:ident),+ } -> $to:ident ) => { + impl From<$from> for $to { + fn from(value: $from) -> Self { + match value { + $( + $from::$branch(value) => $to::$branch(value) + ),+ + } + } + } + } +} + #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum TypeDefinitionPosition { Scalar(ScalarTypeDefinitionPosition), @@ -70,6 +178,15 @@ impl Debug for TypeDefinitionPosition { } impl TypeDefinitionPosition { + pub(crate) fn is_composite_type(&self) -> bool { + matches!( + self, + TypeDefinitionPosition::Object(_) + | TypeDefinitionPosition::Interface(_) + | TypeDefinitionPosition::Union(_) + ) + } + pub(crate) fn type_name(&self) -> &Name { match self { TypeDefinitionPosition::Scalar(type_) => &type_.type_name, @@ -81,14 +198,26 @@ impl TypeDefinitionPosition { } } + fn describe(&self) -> &'static str { + match self { + TypeDefinitionPosition::Scalar(_) => ScalarTypeDefinitionPosition::EXPECTED, + TypeDefinitionPosition::Object(_) => ObjectTypeDefinitionPosition::EXPECTED, + TypeDefinitionPosition::Interface(_) => InterfaceTypeDefinitionPosition::EXPECTED, + TypeDefinitionPosition::Union(_) => UnionTypeDefinitionPosition::EXPECTED, + TypeDefinitionPosition::Enum(_) => EnumTypeDefinitionPosition::EXPECTED, + TypeDefinitionPosition::InputObject(_) => InputObjectTypeDefinitionPosition::EXPECTED, + } + } + pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema ExtendedType, FederationError> { + ) -> Result<&'schema ExtendedType, PositionLookupError> { + let name = self.type_name(); let ty = schema .types - .get(self.type_name()) - .ok_or_else(|| FederationError::internal(format!(r#"Schema has no type "{self}""#)))?; + .get(name) + .ok_or_else(|| PositionLookupError::TypeMissing(name.clone()))?; match (ty, self) { (ExtendedType::Scalar(_), TypeDefinitionPosition::Scalar(_)) | (ExtendedType::Object(_), TypeDefinitionPosition::Object(_)) @@ -96,9 +225,10 @@ impl TypeDefinitionPosition { | (ExtendedType::Union(_), TypeDefinitionPosition::Union(_)) | (ExtendedType::Enum(_), TypeDefinitionPosition::Enum(_)) | (ExtendedType::InputObject(_), TypeDefinitionPosition::InputObject(_)) => Ok(ty), - _ => Err(FederationError::internal(format!( - r#"Schema type "{self}" is the wrong kind"# - ))), + _ => Err(PositionLookupError::TypeWrongKind( + name.clone(), + self.describe(), + )), } } @@ -110,123 +240,17 @@ impl TypeDefinitionPosition { } } -impl TryFrom for ScalarTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Scalar(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a scalar type"# - ))), - } - } -} - -impl TryFrom for ObjectTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Object(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object type"# - ))), - } - } -} - -impl TryFrom for InterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Interface(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an interface type"# - ))), - } - } -} - -impl TryFrom for UnionTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Union(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a union type"# - ))), - } - } -} - -impl TryFrom for EnumTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Enum(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an enum type"# - ))), - } - } -} - -impl TryFrom for InputObjectTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::InputObject(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an input object type"# - ))), - } - } -} - -impl From for TypeDefinitionPosition { - fn from(value: OutputTypeDefinitionPosition) -> Self { - match value { - OutputTypeDefinitionPosition::Scalar(value) => value.into(), - OutputTypeDefinitionPosition::Object(value) => value.into(), - OutputTypeDefinitionPosition::Interface(value) => value.into(), - OutputTypeDefinitionPosition::Union(value) => value.into(), - OutputTypeDefinitionPosition::Enum(value) => value.into(), - } - } -} - -impl From for TypeDefinitionPosition { - fn from(value: CompositeTypeDefinitionPosition) -> Self { - match value { - CompositeTypeDefinitionPosition::Object(value) => value.into(), - CompositeTypeDefinitionPosition::Interface(value) => value.into(), - CompositeTypeDefinitionPosition::Union(value) => value.into(), - } - } -} +fallible_conversions!(TypeDefinitionPosition::Scalar -> ScalarTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::Object -> ObjectTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::Interface -> InterfaceTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::Union -> UnionTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::Enum -> EnumTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::InputObject -> InputObjectTypeDefinitionPosition); -impl From for TypeDefinitionPosition { - fn from(value: AbstractTypeDefinitionPosition) -> Self { - match value { - AbstractTypeDefinitionPosition::Interface(value) => value.into(), - AbstractTypeDefinitionPosition::Union(value) => value.into(), - } - } -} - -impl From for TypeDefinitionPosition { - fn from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Self { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Object(value) => value.into(), - ObjectOrInterfaceTypeDefinitionPosition::Interface(value) => value.into(), - } - } -} +infallible_conversions!(OutputTypeDefinitionPosition::{Scalar, Object, Interface, Union, Enum} -> TypeDefinitionPosition); +infallible_conversions!(CompositeTypeDefinitionPosition::{Object, Interface, Union} -> TypeDefinitionPosition); +infallible_conversions!(AbstractTypeDefinitionPosition::{Interface, Union} -> TypeDefinitionPosition); +infallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::{Object, Interface} -> TypeDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum OutputTypeDefinitionPosition { @@ -250,6 +274,8 @@ impl Debug for OutputTypeDefinitionPosition { } impl OutputTypeDefinitionPosition { + const EXPECTED: &'static str = "an output type"; + pub(crate) fn type_name(&self) -> &Name { match self { OutputTypeDefinitionPosition::Scalar(type_) => &type_.type_name, @@ -260,23 +286,35 @@ impl OutputTypeDefinitionPosition { } } + fn describe(&self) -> &'static str { + match self { + OutputTypeDefinitionPosition::Scalar(_) => ScalarTypeDefinitionPosition::EXPECTED, + OutputTypeDefinitionPosition::Object(_) => ObjectTypeDefinitionPosition::EXPECTED, + OutputTypeDefinitionPosition::Interface(_) => InterfaceTypeDefinitionPosition::EXPECTED, + OutputTypeDefinitionPosition::Union(_) => UnionTypeDefinitionPosition::EXPECTED, + OutputTypeDefinitionPosition::Enum(_) => EnumTypeDefinitionPosition::EXPECTED, + } + } + pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema ExtendedType, FederationError> { + ) -> Result<&'schema ExtendedType, PositionLookupError> { + let name = self.type_name(); let ty = schema .types - .get(self.type_name()) - .ok_or_else(|| FederationError::internal(format!(r#"Schema has no type "{self}""#)))?; + .get(name) + .ok_or_else(|| PositionLookupError::TypeMissing(name.clone()))?; match (ty, self) { (ExtendedType::Scalar(_), OutputTypeDefinitionPosition::Scalar(_)) | (ExtendedType::Object(_), OutputTypeDefinitionPosition::Object(_)) | (ExtendedType::Interface(_), OutputTypeDefinitionPosition::Interface(_)) | (ExtendedType::Union(_), OutputTypeDefinitionPosition::Union(_)) | (ExtendedType::Enum(_), OutputTypeDefinitionPosition::Enum(_)) => Ok(ty), - _ => Err(FederationError::internal(format!( - r#"Schema type "{self}" is the wrong kind"# - ))), + _ => Err(PositionLookupError::TypeWrongKind( + name.clone(), + self.describe(), + )), } } @@ -288,115 +326,16 @@ impl OutputTypeDefinitionPosition { } } -impl TryFrom for ScalarTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Scalar(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a scalar type"# - ))), - } - } -} +fallible_conversions!(OutputTypeDefinitionPosition::Scalar -> ScalarTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::Object -> ObjectTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::Interface -> InterfaceTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::Union -> UnionTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::Enum -> EnumTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::{Scalar, Object, Interface, Enum, Union} -> OutputTypeDefinitionPosition); -impl TryFrom for ObjectTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Object(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object type"# - ))), - } - } -} - -impl TryFrom for InterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Interface(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an interface type"# - ))), - } - } -} - -impl TryFrom for UnionTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Union(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a union type"# - ))), - } - } -} - -impl TryFrom for EnumTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Enum(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an enum type"# - ))), - } - } -} - -impl TryFrom for OutputTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Scalar(value) => Ok(value.into()), - TypeDefinitionPosition::Object(value) => Ok(value.into()), - TypeDefinitionPosition::Interface(value) => Ok(value.into()), - TypeDefinitionPosition::Enum(value) => Ok(value.into()), - TypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an output type"# - ))), - } - } -} - -impl From for OutputTypeDefinitionPosition { - fn from(value: CompositeTypeDefinitionPosition) -> Self { - match value { - CompositeTypeDefinitionPosition::Object(value) => value.into(), - CompositeTypeDefinitionPosition::Interface(value) => value.into(), - CompositeTypeDefinitionPosition::Union(value) => value.into(), - } - } -} - -impl From for OutputTypeDefinitionPosition { - fn from(value: AbstractTypeDefinitionPosition) -> Self { - match value { - AbstractTypeDefinitionPosition::Interface(value) => value.into(), - AbstractTypeDefinitionPosition::Union(value) => value.into(), - } - } -} - -impl From for OutputTypeDefinitionPosition { - fn from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Self { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Object(value) => value.into(), - ObjectOrInterfaceTypeDefinitionPosition::Interface(value) => value.into(), - } - } -} +infallible_conversions!(CompositeTypeDefinitionPosition::{Object, Interface, Union} -> OutputTypeDefinitionPosition); +infallible_conversions!(AbstractTypeDefinitionPosition::{Interface, Union} -> OutputTypeDefinitionPosition); +infallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::{Object, Interface} -> OutputTypeDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum CompositeTypeDefinitionPosition { @@ -416,6 +355,8 @@ impl Debug for CompositeTypeDefinitionPosition { } impl CompositeTypeDefinitionPosition { + const EXPECTED: &'static str = "a composite type"; + pub(crate) fn is_object_type(&self) -> bool { matches!(self, CompositeTypeDefinitionPosition::Object(_)) } @@ -440,6 +381,16 @@ impl CompositeTypeDefinitionPosition { } } + fn describe(&self) -> &'static str { + match self { + CompositeTypeDefinitionPosition::Object(_) => ObjectTypeDefinitionPosition::EXPECTED, + CompositeTypeDefinitionPosition::Interface(_) => { + InterfaceTypeDefinitionPosition::EXPECTED + } + CompositeTypeDefinitionPosition::Union(_) => UnionTypeDefinitionPosition::EXPECTED, + } + } + pub(crate) fn field( &self, field_name: Name, @@ -479,18 +430,20 @@ impl CompositeTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema ExtendedType, FederationError> { + ) -> Result<&'schema ExtendedType, PositionLookupError> { + let name = self.type_name(); let ty = schema .types - .get(self.type_name()) - .ok_or_else(|| FederationError::internal(format!(r#"Schema has no type "{self}""#)))?; + .get(name) + .ok_or_else(|| PositionLookupError::TypeMissing(name.clone()))?; match (ty, self) { (ExtendedType::Object(_), CompositeTypeDefinitionPosition::Object(_)) | (ExtendedType::Interface(_), CompositeTypeDefinitionPosition::Interface(_)) | (ExtendedType::Union(_), CompositeTypeDefinitionPosition::Union(_)) => Ok(ty), - _ => Err(FederationError::internal(format!( - r#"Schema type "{self}" is the wrong kind"# - ))), + _ => Err(PositionLookupError::TypeWrongKind( + name.clone(), + self.describe(), + )), } } @@ -502,92 +455,14 @@ impl CompositeTypeDefinitionPosition { } } -impl TryFrom for ObjectTypeDefinitionPosition { - type Error = FederationError; +fallible_conversions!(CompositeTypeDefinitionPosition::Object -> ObjectTypeDefinitionPosition); +fallible_conversions!(CompositeTypeDefinitionPosition::Interface -> InterfaceTypeDefinitionPosition); +fallible_conversions!(CompositeTypeDefinitionPosition::Union -> UnionTypeDefinitionPosition); - fn try_from(value: CompositeTypeDefinitionPosition) -> Result { - match value { - CompositeTypeDefinitionPosition::Object(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object type"# - ))), - } - } -} - -impl TryFrom for InterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: CompositeTypeDefinitionPosition) -> Result { - match value { - CompositeTypeDefinitionPosition::Interface(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an interface type"# - ))), - } - } -} - -impl TryFrom for UnionTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: CompositeTypeDefinitionPosition) -> Result { - match value { - CompositeTypeDefinitionPosition::Union(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a union type"# - ))), - } - } -} - -impl TryFrom for CompositeTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Object(value) => Ok(value.into()), - TypeDefinitionPosition::Interface(value) => Ok(value.into()), - TypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a composite type"# - ))), - } - } -} - -impl TryFrom for CompositeTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Object(value) => Ok(value.into()), - OutputTypeDefinitionPosition::Interface(value) => Ok(value.into()), - OutputTypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a composite type"# - ))), - } - } -} - -impl From for CompositeTypeDefinitionPosition { - fn from(value: AbstractTypeDefinitionPosition) -> Self { - match value { - AbstractTypeDefinitionPosition::Interface(value) => value.into(), - AbstractTypeDefinitionPosition::Union(value) => value.into(), - } - } -} - -impl From for CompositeTypeDefinitionPosition { - fn from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Self { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Object(value) => value.into(), - ObjectOrInterfaceTypeDefinitionPosition::Interface(value) => value.into(), - } - } -} +fallible_conversions!(TypeDefinitionPosition::{Object, Interface, Union} -> CompositeTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::{Object, Interface, Union} -> CompositeTypeDefinitionPosition); +infallible_conversions!(AbstractTypeDefinitionPosition::{Interface, Union} -> CompositeTypeDefinitionPosition); +infallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::{Object, Interface} -> CompositeTypeDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum AbstractTypeDefinitionPosition { @@ -605,6 +480,8 @@ impl Debug for AbstractTypeDefinitionPosition { } impl AbstractTypeDefinitionPosition { + const EXPECTED: &'static str = "an abstract type"; + pub(crate) fn type_name(&self) -> &Name { match self { AbstractTypeDefinitionPosition::Interface(type_) => &type_.type_name, @@ -612,6 +489,15 @@ impl AbstractTypeDefinitionPosition { } } + fn describe(&self) -> &'static str { + match self { + AbstractTypeDefinitionPosition::Interface(_) => { + InterfaceTypeDefinitionPosition::EXPECTED + } + AbstractTypeDefinitionPosition::Union(_) => UnionTypeDefinitionPosition::EXPECTED, + } + } + pub(crate) fn field( &self, field_name: Name, @@ -647,17 +533,19 @@ impl AbstractTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema ExtendedType, FederationError> { + ) -> Result<&'schema ExtendedType, PositionLookupError> { + let name = self.type_name(); let ty = schema .types - .get(self.type_name()) - .ok_or_else(|| FederationError::internal(format!(r#"Schema has no type "{self}""#)))?; + .get(name) + .ok_or_else(|| PositionLookupError::TypeMissing(name.clone()))?; match (ty, self) { (ExtendedType::Interface(_), AbstractTypeDefinitionPosition::Interface(_)) | (ExtendedType::Union(_), AbstractTypeDefinitionPosition::Union(_)) => Ok(ty), - _ => Err(FederationError::internal(format!( - r#"Schema type "{self}" is the wrong kind"# - ))), + _ => Err(PositionLookupError::TypeWrongKind( + name.clone(), + self.describe(), + )), } } @@ -669,86 +557,12 @@ impl AbstractTypeDefinitionPosition { } } -impl TryFrom for InterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: AbstractTypeDefinitionPosition) -> Result { - match value { - AbstractTypeDefinitionPosition::Interface(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an interface type"# - ))), - } - } -} - -impl TryFrom for UnionTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: AbstractTypeDefinitionPosition) -> Result { - match value { - AbstractTypeDefinitionPosition::Union(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not a union type"# - ))), - } - } -} - -impl TryFrom for AbstractTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Interface(value) => Ok(value.into()), - TypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an abstract type"# - ))), - } - } -} - -impl TryFrom for AbstractTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Interface(value) => Ok(value.into()), - OutputTypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an abstract type"# - ))), - } - } -} - -impl TryFrom for AbstractTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: CompositeTypeDefinitionPosition) -> Result { - match value { - CompositeTypeDefinitionPosition::Interface(value) => Ok(value.into()), - CompositeTypeDefinitionPosition::Union(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an abstract type"# - ))), - } - } -} - -impl TryFrom for AbstractTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Result { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an abstract type"# - ))), - } - } -} +fallible_conversions!(AbstractTypeDefinitionPosition::Interface -> InterfaceTypeDefinitionPosition); +fallible_conversions!(AbstractTypeDefinitionPosition::Union -> UnionTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::{Interface, Union} -> AbstractTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::{Interface, Union} -> AbstractTypeDefinitionPosition); +fallible_conversions!(CompositeTypeDefinitionPosition::{Interface, Union} -> AbstractTypeDefinitionPosition); +fallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::{Interface} -> AbstractTypeDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum ObjectOrInterfaceTypeDefinitionPosition { @@ -766,6 +580,8 @@ impl Debug for ObjectOrInterfaceTypeDefinitionPosition { } impl ObjectOrInterfaceTypeDefinitionPosition { + const EXPECTED: &'static str = "an object/interface type"; + pub(crate) fn type_name(&self) -> &Name { match self { ObjectOrInterfaceTypeDefinitionPosition::Object(type_) => &type_.type_name, @@ -773,6 +589,17 @@ impl ObjectOrInterfaceTypeDefinitionPosition { } } + fn describe(&self) -> &'static str { + match self { + ObjectOrInterfaceTypeDefinitionPosition::Object(_) => { + ObjectTypeDefinitionPosition::EXPECTED + } + ObjectOrInterfaceTypeDefinitionPosition::Interface(_) => { + InterfaceTypeDefinitionPosition::EXPECTED + } + } + } + pub(crate) fn field(&self, field_name: Name) -> ObjectOrInterfaceFieldDefinitionPosition { match self { ObjectOrInterfaceTypeDefinitionPosition::Object(type_) => { @@ -815,19 +642,21 @@ impl ObjectOrInterfaceTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema ExtendedType, FederationError> { + ) -> Result<&'schema ExtendedType, PositionLookupError> { + let name = self.type_name(); let ty = schema .types - .get(self.type_name()) - .ok_or_else(|| FederationError::internal(format!(r#"Schema has no type "{self}""#)))?; + .get(name) + .ok_or_else(|| PositionLookupError::TypeMissing(name.clone()))?; match (ty, self) { (ExtendedType::Object(_), ObjectOrInterfaceTypeDefinitionPosition::Object(_)) | (ExtendedType::Interface(_), ObjectOrInterfaceTypeDefinitionPosition::Interface(_)) => { Ok(ty) } - _ => Err(FederationError::internal(format!( - r#"Schema type "{self}" is the wrong kind"# - ))), + _ => Err(PositionLookupError::TypeWrongKind( + name.clone(), + self.describe(), + )), } } @@ -839,86 +668,12 @@ impl ObjectOrInterfaceTypeDefinitionPosition { } } -impl TryFrom for ObjectTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Result { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Object(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object type"# - ))), - } - } -} - -impl TryFrom for InterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: ObjectOrInterfaceTypeDefinitionPosition) -> Result { - match value { - ObjectOrInterfaceTypeDefinitionPosition::Interface(value) => Ok(value), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an interface type"# - ))), - } - } -} - -impl TryFrom for ObjectOrInterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: TypeDefinitionPosition) -> Result { - match value { - TypeDefinitionPosition::Object(value) => Ok(value.into()), - TypeDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object/interface type"# - ))), - } - } -} - -impl TryFrom for ObjectOrInterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: OutputTypeDefinitionPosition) -> Result { - match value { - OutputTypeDefinitionPosition::Object(value) => Ok(value.into()), - OutputTypeDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object/interface type"# - ))), - } - } -} - -impl TryFrom for ObjectOrInterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: CompositeTypeDefinitionPosition) -> Result { - match value { - CompositeTypeDefinitionPosition::Object(value) => Ok(value.into()), - CompositeTypeDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object/interface type"# - ))), - } - } -} - -impl TryFrom for ObjectOrInterfaceTypeDefinitionPosition { - type Error = FederationError; - - fn try_from(value: AbstractTypeDefinitionPosition) -> Result { - match value { - AbstractTypeDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object/interface type"# - ))), - } - } -} +fallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::Object -> ObjectTypeDefinitionPosition); +fallible_conversions!(ObjectOrInterfaceTypeDefinitionPosition::Interface -> InterfaceTypeDefinitionPosition); +fallible_conversions!(TypeDefinitionPosition::{Object, Interface} -> ObjectOrInterfaceTypeDefinitionPosition); +fallible_conversions!(OutputTypeDefinitionPosition::{Object, Interface} -> ObjectOrInterfaceTypeDefinitionPosition); +fallible_conversions!(CompositeTypeDefinitionPosition::{Object, Interface} -> ObjectOrInterfaceTypeDefinitionPosition); +fallible_conversions!(AbstractTypeDefinitionPosition::{Interface} -> ObjectOrInterfaceTypeDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum FieldDefinitionPosition { @@ -969,7 +724,7 @@ impl FieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { match self { FieldDefinitionPosition::Object(field) => field.get(schema), FieldDefinitionPosition::Interface(field) => field.get(schema), @@ -985,14 +740,7 @@ impl FieldDefinitionPosition { } } -impl From for FieldDefinitionPosition { - fn from(value: ObjectOrInterfaceFieldDefinitionPosition) -> Self { - match value { - ObjectOrInterfaceFieldDefinitionPosition::Object(value) => value.into(), - ObjectOrInterfaceFieldDefinitionPosition::Interface(value) => value.into(), - } - } -} +infallible_conversions!(ObjectOrInterfaceFieldDefinitionPosition::{Object, Interface} -> FieldDefinitionPosition); #[derive(Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::Display)] pub(crate) enum ObjectOrInterfaceFieldDefinitionPosition { @@ -1010,6 +758,8 @@ impl Debug for ObjectOrInterfaceFieldDefinitionPosition { } impl ObjectOrInterfaceFieldDefinitionPosition { + const EXPECTED: &'static str = "an object/interface field"; + pub(crate) fn type_name(&self) -> &Name { match self { ObjectOrInterfaceFieldDefinitionPosition::Object(field) => &field.type_name, @@ -1038,7 +788,7 @@ impl ObjectOrInterfaceFieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { match self { ObjectOrInterfaceFieldDefinitionPosition::Object(field) => field.get(schema), ObjectOrInterfaceFieldDefinitionPosition::Interface(field) => field.get(schema), @@ -1084,19 +834,7 @@ impl ObjectOrInterfaceFieldDefinitionPosition { } } -impl TryFrom for ObjectOrInterfaceFieldDefinitionPosition { - type Error = FederationError; - - fn try_from(value: FieldDefinitionPosition) -> Result { - match value { - FieldDefinitionPosition::Object(value) => Ok(value.into()), - FieldDefinitionPosition::Interface(value) => Ok(value.into()), - _ => Err(FederationError::internal(format!( - r#"Type "{value}" was unexpectedly not an object/interface field"# - ))), - } - } -} +fallible_conversions!(FieldDefinitionPosition::{Object, Interface} -> ObjectOrInterfaceFieldDefinitionPosition); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct SchemaDefinitionPosition; @@ -1487,27 +1225,24 @@ pub(crate) struct ScalarTypeDefinitionPosition { } impl ScalarTypeDefinitionPosition { + const EXPECTED: &'static str = "a scalar type"; + pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Scalar(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not a scalar", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -1522,24 +1257,19 @@ impl ScalarTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Scalar(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not a scalar", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -1808,6 +1538,8 @@ pub(crate) struct ObjectTypeDefinitionPosition { } impl ObjectTypeDefinitionPosition { + const EXPECTED: &'static str = "an object type"; + pub(crate) fn new(type_name: Name) -> Self { Self { type_name } } @@ -1851,24 +1583,19 @@ impl ObjectTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Object(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an object", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -1883,24 +1610,19 @@ impl ObjectTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Object(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an object", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -2341,20 +2063,18 @@ impl ObjectFieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { let parent = self.parent(); parent.get(schema)?; schema .type_field(&self.type_name, &self.field_name) .map_err(|_| { - SingleFederationError::Internal { - message: format!( - "Object type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Object", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -2368,24 +2088,23 @@ impl ObjectFieldDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Component, FederationError> { + ) -> Result<&'schema mut Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); if is_graphql_reserved_name(&self.field_name) { - return Err(SingleFederationError::Internal { - message: format!("Cannot mutate reserved object field \"{}\"", self), - } - .into()); + return Err(PositionLookupError::MutateReservedField( + "object field", + self.type_name.clone(), + self.field_name.clone(), + )); } type_.fields.get_mut(&self.field_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Object type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Object", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -2695,23 +2414,17 @@ impl ObjectFieldArgumentDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { - let parent = self.parent(); - let type_ = parent.get(schema)?; - - type_ - .arguments - .iter() - .find(|a| a.name == self.argument_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Object field \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() - }) + ) -> Result<&'schema Node, PositionLookupError> { + let field = self.parent().get(schema)?; + + field.argument_by_name(&self.argument_name).ok_or_else(|| { + PositionLookupError::MissingFieldArgument( + "Object field", + self.type_name.clone(), + self.field_name.clone(), + self.argument_name.clone(), + ) + }) } pub(crate) fn try_get<'schema>( @@ -2724,7 +2437,7 @@ impl ObjectFieldArgumentDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); @@ -2733,13 +2446,12 @@ impl ObjectFieldArgumentDefinitionPosition { .iter_mut() .find(|a| a.name == self.argument_name) .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Object field \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() + PositionLookupError::MissingFieldArgument( + "Object field", + self.type_name.clone(), + self.field_name.clone(), + self.argument_name.clone(), + ) }) } @@ -3007,6 +2719,8 @@ pub(crate) struct InterfaceTypeDefinitionPosition { } impl InterfaceTypeDefinitionPosition { + const EXPECTED: &'static str = "an interface type"; + pub(crate) fn new(type_name: Name) -> Self { Self { type_name } } @@ -3042,24 +2756,19 @@ impl InterfaceTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Interface(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an interface", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -3074,24 +2783,19 @@ impl InterfaceTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Interface(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an interface", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -3462,20 +3166,18 @@ impl InterfaceFieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { let parent = self.parent(); parent.get(schema)?; schema .type_field(&self.type_name, &self.field_name) .map_err(|_| { - SingleFederationError::Internal { - message: format!( - "Interface type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Interface", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -3489,24 +3191,23 @@ impl InterfaceFieldDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Component, FederationError> { + ) -> Result<&'schema mut Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); if is_graphql_reserved_name(&self.field_name) { - return Err(SingleFederationError::Internal { - message: format!("Cannot mutate reserved interface field \"{}\"", self), - } - .into()); + return Err(PositionLookupError::MutateReservedField( + "interface field", + self.type_name.clone(), + self.field_name.clone(), + )); } type_.fields.get_mut(&self.field_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Interface type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Interface", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -3814,23 +3515,17 @@ impl InterfaceFieldArgumentDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { - let parent = self.parent(); - let type_ = parent.get(schema)?; - - type_ - .arguments - .iter() - .find(|a| a.name == self.argument_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Interface field \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() - }) + ) -> Result<&'schema Node, PositionLookupError> { + let field = self.parent().get(schema)?; + + field.argument_by_name(&self.argument_name).ok_or_else(|| { + PositionLookupError::MissingFieldArgument( + "Interface field", + self.type_name.clone(), + self.field_name.clone(), + self.argument_name.clone(), + ) + }) } pub(crate) fn try_get<'schema>( @@ -3843,7 +3538,7 @@ impl InterfaceFieldArgumentDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); @@ -3852,13 +3547,12 @@ impl InterfaceFieldArgumentDefinitionPosition { .iter_mut() .find(|a| a.name == self.argument_name) .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Interface field \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() + PositionLookupError::MissingFieldArgument( + "Interface field", + self.type_name.clone(), + self.field_name.clone(), + self.argument_name.clone(), + ) }) } @@ -4131,6 +3825,8 @@ pub(crate) struct UnionTypeDefinitionPosition { } impl UnionTypeDefinitionPosition { + const EXPECTED: &'static str = "a union type"; + pub(crate) fn new(type_name: Name) -> Self { Self { type_name } } @@ -4144,24 +3840,19 @@ impl UnionTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Union(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an union", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -4176,24 +3867,19 @@ impl UnionTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Union(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an union", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -4515,21 +4201,18 @@ impl UnionTypenameFieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { let parent = self.parent(); parent.get(schema)?; schema .type_field(&self.type_name, self.field_name()) .map_err(|_| { - SingleFederationError::Internal { - message: format!( - "Union type \"{}\" has no field \"{}\"", - parent, - self.field_name() - ), - } - .into() + PositionLookupError::MissingField( + "Union", + self.type_name.clone(), + name!("__typename"), + ) }) } @@ -4586,6 +4269,8 @@ pub(crate) struct EnumTypeDefinitionPosition { } impl EnumTypeDefinitionPosition { + const EXPECTED: &'static str = "an enum type"; + pub(crate) fn value(&self, value_name: Name) -> EnumValueDefinitionPosition { EnumValueDefinitionPosition { type_name: self.type_name.clone(), @@ -4596,24 +4281,19 @@ impl EnumTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Enum(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an enum", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -4628,24 +4308,19 @@ impl EnumTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::Enum(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an enum", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -4929,19 +4604,14 @@ impl EnumValueDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.get(schema)?; - type_.values.get(&self.value_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Enum type \"{}\" has no value \"{}\"", - parent, self.value_name - ), - } - .into() - }) + type_ + .values + .get(&self.value_name) + .ok_or_else(|| PositionLookupError::MissingValue(self.clone())) } pub(crate) fn try_get<'schema>( @@ -4954,19 +4624,14 @@ impl EnumValueDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Component, FederationError> { + ) -> Result<&'schema mut Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); - type_.values.get_mut(&self.value_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Enum type \"{}\" has no value \"{}\"", - parent, self.value_name - ), - } - .into() - }) + type_ + .values + .get_mut(&self.value_name) + .ok_or_else(|| PositionLookupError::MissingValue(self.clone())) } fn try_make_mut<'schema>( @@ -5171,6 +4836,8 @@ pub(crate) struct InputObjectTypeDefinitionPosition { } impl InputObjectTypeDefinitionPosition { + const EXPECTED: &'static str = "an input object type"; + pub(crate) fn field(&self, field_name: Name) -> InputObjectFieldDefinitionPosition { InputObjectFieldDefinitionPosition { type_name: self.type_name.clone(), @@ -5181,24 +4848,19 @@ impl InputObjectTypeDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .types .get(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::InputObject(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an input object", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -5213,24 +4875,19 @@ impl InputObjectTypeDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .types .get_mut(&self.type_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no type \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::TypeMissing(self.type_name.clone())) .and_then(|type_| { if let ExtendedType::InputObject(type_) = type_ { Ok(type_) } else { - Err(SingleFederationError::Internal { - message: format!("Schema type \"{}\" was not an input object", self), - } - .into()) + Err(PositionLookupError::TypeWrongKind( + self.type_name.clone(), + Self::EXPECTED, + )) } }) } @@ -5512,18 +5169,16 @@ impl InputObjectFieldDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Component, FederationError> { + ) -> Result<&'schema Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.get(schema)?; type_.fields.get(&self.field_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Input object type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Input object", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -5537,18 +5192,16 @@ impl InputObjectFieldDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Component, FederationError> { + ) -> Result<&'schema mut Component, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); type_.fields.get_mut(&self.field_name).ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Input object type \"{}\" has no field \"{}\"", - parent, self.field_name - ), - } - .into() + PositionLookupError::MissingField( + "Input object", + self.type_name.clone(), + self.field_name.clone(), + ) }) } @@ -5828,16 +5481,11 @@ impl DirectiveDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { schema .directive_definitions .get(&self.directive_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no directive \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::DirectiveMissing(self.clone())) } pub(crate) fn try_get<'schema>( @@ -5850,16 +5498,11 @@ impl DirectiveDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { schema .directive_definitions .get_mut(&self.directive_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!("Schema has no directive \"{}\"", self), - } - .into() - }) + .ok_or_else(|| PositionLookupError::DirectiveMissing(self.clone())) } fn try_make_mut<'schema>( @@ -6058,7 +5701,7 @@ impl DirectiveArgumentDefinitionPosition { pub(crate) fn get<'schema>( &self, schema: &'schema Schema, - ) -> Result<&'schema Node, FederationError> { + ) -> Result<&'schema Node, PositionLookupError> { let parent = self.parent(); let type_ = parent.get(schema)?; @@ -6066,15 +5709,7 @@ impl DirectiveArgumentDefinitionPosition { .arguments .iter() .find(|a| a.name == self.argument_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Directive \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() - }) + .ok_or_else(|| PositionLookupError::MissingDirectiveArgument(self.clone())) } pub(crate) fn try_get<'schema>( @@ -6087,7 +5722,7 @@ impl DirectiveArgumentDefinitionPosition { fn make_mut<'schema>( &self, schema: &'schema mut Schema, - ) -> Result<&'schema mut Node, FederationError> { + ) -> Result<&'schema mut Node, PositionLookupError> { let parent = self.parent(); let type_ = parent.make_mut(schema)?.make_mut(); @@ -6095,15 +5730,7 @@ impl DirectiveArgumentDefinitionPosition { .arguments .iter_mut() .find(|a| a.name == self.argument_name) - .ok_or_else(|| { - SingleFederationError::Internal { - message: format!( - "Directive \"{}\" has no argument \"{}\"", - parent, self.argument_name - ), - } - .into() - }) + .ok_or_else(|| PositionLookupError::MissingDirectiveArgument(self.clone())) } fn try_make_mut<'schema>( diff --git a/apollo-federation/src/schema/referencer.rs b/apollo-federation/src/schema/referencer.rs index a0855eec8e..eee5c395ba 100644 --- a/apollo-federation/src/schema/referencer.rs +++ b/apollo-federation/src/schema/referencer.rs @@ -1,4 +1,4 @@ -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use indexmap::IndexMap; use indexmap::IndexSet; diff --git a/apollo-federation/src/schema/subgraph_metadata.rs b/apollo-federation/src/schema/subgraph_metadata.rs index 3bca3855ca..a0352cac48 100644 --- a/apollo-federation/src/schema/subgraph_metadata.rs +++ b/apollo-federation/src/schema/subgraph_metadata.rs @@ -281,7 +281,7 @@ impl ExternalMetadata { ) -> Result { for selection in selection_set.selections.values() { if let Selection::Field(field_selection) = selection { - if self.is_external(&field_selection.field.data().field_position)? { + if self.is_external(&field_selection.field.field_position)? { return Ok(true); } } diff --git a/apollo-federation/src/schema/type_and_directive_specification.rs b/apollo-federation/src/schema/type_and_directive_specification.rs index e0629c101f..397e713aed 100644 --- a/apollo-federation/src/schema/type_and_directive_specification.rs +++ b/apollo-federation/src/schema/type_and_directive_specification.rs @@ -8,11 +8,11 @@ use apollo_compiler::schema::EnumType; use apollo_compiler::schema::EnumValueDefinition; use apollo_compiler::schema::ExtendedType; use apollo_compiler::schema::InputValueDefinition; -use apollo_compiler::schema::Name; use apollo_compiler::schema::ObjectType; use apollo_compiler::schema::ScalarType; use apollo_compiler::schema::Type; use apollo_compiler::schema::UnionType; +use apollo_compiler::Name; use apollo_compiler::Node; use indexmap::IndexMap; use indexmap::IndexSet; diff --git a/apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap b/apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap new file mode 100644 index 0000000000..1b63df9134 --- /dev/null +++ b/apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap @@ -0,0 +1,81 @@ +--- +source: apollo-federation/src/merge.rs +expression: schema.serialize() +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +enum join__Graph { + BASIC_1 @join__graph(name: "basic_1", url: "") + BASIC_2 @join__graph(name: "basic_2", url: "") +} + +type Query @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + i: I @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + u: U @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + f(x: ID, y: YInput): T @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) +} + +interface I @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + id: ID! +} + +type A implements I @join__type(graph: BASIC_1) @join__implements(graph: BASIC_1, interface: "I") @join__type(graph: BASIC_2) @join__implements(graph: BASIC_2, interface: "I") { + id: ID! @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + a: S @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) +} + +type B implements I @join__type(graph: BASIC_1) @join__implements(graph: BASIC_1, interface: "I") @join__type(graph: BASIC_2) @join__implements(graph: BASIC_2, interface: "I") { + id: ID! @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + b: E @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) +} + +union U @join__type(graph: BASIC_1) @join__unionMember(graph: BASIC_1, member: "A") @join__unionMember(graph: BASIC_1, member: "B") @join__type(graph: BASIC_2) @join__unionMember(graph: BASIC_2, member: "A") @join__unionMember(graph: BASIC_2, member: "B") = A | B + +scalar S @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) + +enum E @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + A @join__enumValue(graph: BASIC_1) @join__enumValue(graph: BASIC_2) + B @join__enumValue(graph: BASIC_1) @join__enumValue(graph: BASIC_2) +} + +type T @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + x: ID @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + y: Y @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) +} + +type Y @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + z: ID @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) +} + +input YInput @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + z: ID +} diff --git a/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap b/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap index 823e0e8ebe..c2efe7b3f4 100644 --- a/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap +++ b/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap @@ -12,7 +12,7 @@ directive @join__graph(name: String!, url: String!) on ENUM_VALUE directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION -directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT diff --git a/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql b/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql new file mode 100644 index 0000000000..e9a3099331 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql @@ -0,0 +1,41 @@ +type Query { + i: I + u: U + f(x: ID, y: YInput): T +} + +interface I { + id: ID! +} + +type A implements I { + id: ID! + a: S +} + +type B implements I { + id: ID! + b: E +} + +union U = A | B + +scalar S + +enum E { + A + B +} + +type T { + x: ID + y: Y +} + +type Y { + z: ID +} + +input YInput { + z: ID +} diff --git a/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql b/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql new file mode 100644 index 0000000000..e9a3099331 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql @@ -0,0 +1,41 @@ +type Query { + i: I + u: U + f(x: ID, y: YInput): T +} + +interface I { + id: ID! +} + +type A implements I { + id: ID! + a: S +} + +type B implements I { + id: ID! + b: E +} + +union U = A | B + +scalar S + +enum E { + A + B +} + +type T { + x: ID + y: Y +} + +type Y { + z: ID +} + +input YInput { + z: ID +} diff --git a/apollo-federation/src/sources/connect/json_selection/graphql.rs b/apollo-federation/src/sources/connect/json_selection/graphql.rs index e61cb3a7ce..b67935c05f 100644 --- a/apollo-federation/src/sources/connect/json_selection/graphql.rs +++ b/apollo-federation/src/sources/connect/json_selection/graphql.rs @@ -15,6 +15,7 @@ use apollo_compiler::ast; use apollo_compiler::ast::Selection as GraphQLSelection; +use apollo_compiler::Name; use super::parser::JSONSelection; use super::parser::NamedSelection; @@ -52,7 +53,7 @@ fn new_field(name: String, selection: Option) -> GraphQLSelec GraphQLSelection::Field( apollo_compiler::ast::Field { alias: None, - name: ast::Name::new_unchecked(name.into()), + name: Name::new_unchecked(&name), arguments: Default::default(), directives: Default::default(), selection_set: selection diff --git a/apollo-federation/src/subgraph/database.rs b/apollo-federation/src/subgraph/database.rs index 2745c1e8c9..e4f8776fd6 100644 --- a/apollo-federation/src/subgraph/database.rs +++ b/apollo-federation/src/subgraph/database.rs @@ -16,7 +16,7 @@ use std::sync::Arc; use apollo_compiler::executable::Directive; use apollo_compiler::executable::SelectionSet; use apollo_compiler::name; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Schema; use crate::link::database::links_metadata; diff --git a/apollo-federation/src/subgraph/spec.rs b/apollo-federation/src/subgraph/spec.rs index 84651379d5..75dc226a4b 100644 --- a/apollo-federation/src/subgraph/spec.rs +++ b/apollo-federation/src/subgraph/spec.rs @@ -7,8 +7,6 @@ use apollo_compiler::ast::DirectiveLocation; use apollo_compiler::ast::EnumValueDefinition; use apollo_compiler::ast::FieldDefinition; use apollo_compiler::ast::InputValueDefinition; -use apollo_compiler::ast::InvalidNameError; -use apollo_compiler::ast::Name; use apollo_compiler::ast::Type; use apollo_compiler::ast::Value; use apollo_compiler::name; @@ -20,8 +18,9 @@ use apollo_compiler::schema::ObjectType; use apollo_compiler::schema::ScalarType; use apollo_compiler::schema::UnionType; use apollo_compiler::ty; +use apollo_compiler::InvalidNameError; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use indexmap::IndexMap; use indexmap::IndexSet; use lazy_static::lazy_static; @@ -142,7 +141,7 @@ pub enum FederationSpecError { impl From for FederationSpecError { fn from(err: InvalidNameError) -> Self { - FederationSpecError::InvalidGraphQLName(format!("Invalid GraphQL name \"{}\"", err.0)) + FederationSpecError::InvalidGraphQLName(format!("Invalid GraphQL name \"{}\"", err.name)) } } @@ -201,8 +200,7 @@ macro_rules! applied_specification { if let Some(spec_alias) = &self.link.spec_alias { applied_link_directive.arguments.push(Argument { name: name!("as"), - // TODO `spec_alias.into()` when https://github.com/apollographql/apollo-rs/pull/773 is released - value: Value::String(>::as_ref(&spec_alias).clone()).into(), + value: spec_alias.as_str().into(), }.into()) } if let Some(purpose) = &self.link.purpose { @@ -276,7 +274,7 @@ impl FederationSpecDefinitions { name: &Name, alias: &Option, ) -> Result { - // TODO: NodeStr is not annotated with #[derive(PartialEq, Eq)], so Clippy warns it should + // TODO: `Name` has custom `PartialEq` and `Eq` impl so Clippy warns it should // not be used in pattern matching (as some future Rust version will likely turn this into // a hard error). We resort instead to indexing into a static IndexMap to get an enum, which // can be used in a match. diff --git a/apollo-federation/tests/api_schema.rs b/apollo-federation/tests/api_schema.rs index 7dffa92fe5..57d6961cfb 100644 --- a/apollo-federation/tests/api_schema.rs +++ b/apollo-federation/tests/api_schema.rs @@ -74,13 +74,9 @@ fn inaccessible_types_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Query` is @inaccessible but is the query root type, which must be in the API schema. - - Type `Object` is @inaccessible but is referenced by `Referencer1.someField`, which is in the API schema. - - Type `Object` is @inaccessible but is referenced by `Referencer2.someField`, which is in the API schema. - - Type `Referencer3` is in the API schema but all of its members are @inaccessible. "###); } @@ -202,9 +198,7 @@ fn inaccessible_interface_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Interface` is @inaccessible but is referenced by `Referencer1.someField`, which is in the API schema. - - Type `Interface` is @inaccessible but is referenced by `Referencer2.someField`, which is in the API schema. "###); } @@ -319,9 +313,7 @@ fn inaccessible_union_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Union` is @inaccessible but is referenced by `Referencer1.someField`, which is in the API schema. - - Type `Union` is @inaccessible but is referenced by `Referencer2.someField`, which is in the API schema. "###); } @@ -422,13 +414,9 @@ fn inaccessible_input_object_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `InputObject` is @inaccessible but is referenced by `Referencer3.someField`, which is in the API schema. - - Type `InputObject` is @inaccessible but is referenced by `Referencer1.someField(someArg:)`, which is in the API schema. - - Type `InputObject` is @inaccessible but is referenced by `Referencer2.someField(someArg:)`, which is in the API schema. - - Type `InputObject` is @inaccessible but is referenced by `@referencer4(someArg:)`, which is in the API schema. "###); } @@ -597,17 +585,11 @@ fn inaccessible_enum_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Enum` is @inaccessible but is referenced by `Referencer1.somefield`, which is in the API schema. - - Type `Enum` is @inaccessible but is referenced by `Referencer2.somefield`, which is in the API schema. - - Type `Enum` is @inaccessible but is referenced by `Referencer5.someField`, which is in the API schema. - - Type `Enum` is @inaccessible but is referenced by `Referencer3.someField(someArg:)`, which is in the API schema. - - Type `Enum` is @inaccessible but is referenced by `Referencer4.someField(someArg:)`, which is in the API schema. - - Type `Enum` is @inaccessible but is referenced by `@referencer6(someArg:)`, which is in the API schema. "###); } @@ -805,17 +787,11 @@ fn inaccessible_scalar_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Scalar` is @inaccessible but is referenced by `Referencer1.somefield`, which is in the API schema. - - Type `Scalar` is @inaccessible but is referenced by `Referencer2.somefield`, which is in the API schema. - - Type `Scalar` is @inaccessible but is referenced by `Referencer5.someField`, which is in the API schema. - - Type `Scalar` is @inaccessible but is referenced by `Referencer3.someField(someArg:)`, which is in the API schema. - - Type `Scalar` is @inaccessible but is referenced by `Referencer4.someField(someArg:)`, which is in the API schema. - - Type `Scalar` is @inaccessible but is referenced by `@referencer6(someArg:)`, which is in the API schema. "###); } @@ -1010,15 +986,10 @@ fn inaccessible_object_field_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Type `Query` is in the API schema but all of its members are @inaccessible. - - Type `Mutation` is in the API schema but all of its members are @inaccessible. - - Type `Subscription` is in the API schema but all of its members are @inaccessible. - - Field `Object.privateField` is @inaccessible but implements the interface field `Referencer1.privateField`, which is in the API schema. - - Type `Referencer2` is in the API schema but all of its members are @inaccessible. "###); } @@ -1133,9 +1104,7 @@ fn inaccessible_interface_field_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Field `Interface.privateField` is @inaccessible but implements the interface field `Referencer1.privateField`, which is in the API schema. - - Type `Referencer2` is in the API schema but all of its members are @inaccessible. "###); } @@ -1220,9 +1189,7 @@ fn inaccessible_object_field_arguments_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Argument `Object.someField(privateArg:)` is @inaccessible but implements the interface argument `Referencer1.someField(privateArg:)` which is in the API schema. - - Argument `ObjectRequired.someField(privateArg:)` is @inaccessible but is a required argument of its field. "###); } @@ -1369,13 +1336,9 @@ fn inaccessible_interface_field_arguments_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Argument `Interface.someField(privateArg:)` is @inaccessible but implements the interface argument `Referencer1.someField(privateArg:)` which is in the API schema. - - Argument `InterfaceRequired.someField(privateArg:)` is @inaccessible but is a required argument of its field. - - Argument `Interface.someField(privateArg:)` is @inaccessible but is implemented by the argument `Referencer2.someField(privateArg:)` which is in the API schema. - - Argument `Interface.someField(privateArg:)` is @inaccessible but is implemented by the argument `Referencer3.someField(privateArg:)` which is in the API schema. "###); } @@ -1547,17 +1510,11 @@ fn inaccessible_input_object_fields_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Input field `InputObject.privateField` is @inaccessible but is used in the default value of `Referencer1.someField(someArg:)`, which is in the API schema. - - Input field `InputObject.privateField` is @inaccessible but is used in the default value of `Referencer2.someField(someArg:)`, which is in the API schema. - - Input field `InputObject.privateField` is @inaccessible but is used in the default value of `Referencer3.someField`, which is in the API schema. - - Type `Referencer5` is in the API schema but all of its input fields are @inaccessible. - - Input field `InputObjectRequired` is @inaccessible but is a required input field of its type. - - Input field `InputObject.privateField` is @inaccessible but is used in the default value of `@referencer4(someArg:)`, which is in the API schema. "###); } @@ -1756,15 +1713,10 @@ fn inaccessible_enum_values_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Referencer1.someField(someArg:)`, which is in the API schema. - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Referencer2.someField(someArg:)`, which is in the API schema. - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Referencer3.someField`, which is in the API schema. - - Type `Referencer5` is in the API schema but all of its members are @inaccessible. - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `@referencer4(someArg:)`, which is in the API schema. "###); } @@ -1953,11 +1905,8 @@ fn inaccessible_complex_default_values() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Input field `NestedInputObject.privateField` is @inaccessible but is used in the default value of `Query.someField(arg1:)`, which is in the API schema. - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Query.someField(arg1:)`, which is in the API schema. - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Query.someField(arg1:)`, which is in the API schema. "###); } @@ -1983,7 +1932,6 @@ fn inaccessible_enum_value_as_string() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Enum value `Enum.PRIVATE_VALUE` is @inaccessible but is used in the default value of `Query.someField(arg1:)`, which is in the API schema. "###); } @@ -2010,7 +1958,6 @@ fn inaccessible_directive_arguments_with_accessible_references() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Argument `@directiveRequired(privateArg:)` is @inaccessible but is a required argument of its directive. "###); } @@ -2065,9 +2012,7 @@ fn inaccessible_directive_on_schema_elements() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Directive `@foo` cannot use @inaccessible because it may be applied to these type-system locations: OBJECT - - Directive `@bar` cannot use @inaccessible because it may be applied to these type-system locations: SCHEMA "###); } @@ -2094,7 +2039,6 @@ fn inaccessible_on_builtins() { // Note this is different from the JS implementation insta::assert_snapshot!(errors, @r###" The following errors occurred: - - built-in scalar definitions must be omitted "###); } @@ -2197,35 +2141,20 @@ fn inaccessible_on_imported_elements() { insta::assert_snapshot!(errors, @r###" The following errors occurred: - - Core feature type `link__Purpose` cannot use @inaccessible. - - Core feature type `foo__Object1` cannot use @inaccessible. - - Core feature type `foo__Object2` cannot use @inaccessible. - - Core feature type `foo__Object3` cannot use @inaccessible. - - Core feature type `foo__Interface1` cannot use @inaccessible. - - Core feature type `foo__Interface2` cannot use @inaccessible. - - Core feature type `foo__Interface3` cannot use @inaccessible. - - Core feature type `foo__Union` cannot use @inaccessible. - - Core feature type `foo__InputObject1` cannot use @inaccessible. - - Core feature type `foo__InputObject2` cannot use @inaccessible. - - Core feature type `foo__Enum1` cannot use @inaccessible. - - Core feature type `foo__Enum2` cannot use @inaccessible. - - Core feature type `foo__Scalar` cannot use @inaccessible. - - Core feature directive `@link` cannot use @inaccessible. - - Core feature directive `@foo` cannot use @inaccessible. "###); } diff --git a/apollo-federation/tests/query_plan/build_query_plan_support.rs b/apollo-federation/tests/query_plan/build_query_plan_support.rs index d0d2a60d63..8f594ef271 100644 --- a/apollo-federation/tests/query_plan/build_query_plan_support.rs +++ b/apollo-federation/tests/query_plan/build_query_plan_support.rs @@ -211,12 +211,12 @@ pub(crate) fn find_fetch_nodes_for_subgraph<'plan>( if let Some(node) = &plan.node { match node { TopLevelPlanNode::Fetch(inner) => { - if inner.subgraph_name == subgraph_name { + if inner.subgraph_name.as_ref() == subgraph_name { fetch_nodes.push(&**inner) } } TopLevelPlanNode::Subscription(inner) => { - if inner.primary.subgraph_name == subgraph_name { + if inner.primary.subgraph_name.as_ref() == subgraph_name { fetch_nodes.push(&inner.primary); } visit_node(subgraph_name, &mut fetch_nodes, inner.rest.as_deref()) @@ -261,7 +261,7 @@ pub(crate) fn find_fetch_nodes_for_subgraph<'plan>( let Some(node) = node else { return }; match node { PlanNode::Fetch(inner) => { - if inner.subgraph_name == subgraph_name { + if inner.subgraph_name.as_ref() == subgraph_name { fetch_nodes.push(&**inner) } } diff --git a/apollo-federation/tests/query_plan/build_query_plan_tests/fetch_operation_names.rs b/apollo-federation/tests/query_plan/build_query_plan_tests/fetch_operation_names.rs index 76cccfe5a2..5ef9d53de5 100644 --- a/apollo-federation/tests/query_plan/build_query_plan_tests/fetch_operation_names.rs +++ b/apollo-federation/tests/query_plan/build_query_plan_tests/fetch_operation_names.rs @@ -262,7 +262,7 @@ fn correctly_handle_case_where_there_is_too_many_plans_to_consider() { let Some(TopLevelPlanNode::Fetch(fetch)) = &plan.node else { panic!() }; - assert_eq!(fetch.subgraph_name, "S1"); + assert_eq!(fetch.subgraph_name.as_ref(), "S1"); assert!(fetch.requires.is_none()); assert!(fetch.operation_document.fragments.is_empty()); let mut operations = fetch.operation_document.all_operations(); diff --git a/apollo-federation/tests/query_plan/build_query_plan_tests/interface_object.rs b/apollo-federation/tests/query_plan/build_query_plan_tests/interface_object.rs index e0fbca7cdd..5a99022d4a 100644 --- a/apollo-federation/tests/query_plan/build_query_plan_tests/interface_object.rs +++ b/apollo-federation/tests/query_plan/build_query_plan_tests/interface_object.rs @@ -1,6 +1,5 @@ use std::ops::Deref; -use apollo_compiler::NodeStr; use apollo_federation::query_plan::FetchDataPathElement; use apollo_federation::query_plan::FetchDataRewrite; @@ -358,7 +357,7 @@ fn can_use_a_key_on_an_interface_object_type_even_for_a_concrete_implementation( assert_eq!(v.path.len(), 1); match &v.path[0] { FetchDataPathElement::TypenameEquals(typename) => { - assert_eq!(typename, &NodeStr::new("A")) + assert_eq!(*typename, apollo_compiler::name!("A")) } _ => unreachable!("Expected FetchDataPathElement::TypenameEquals path"), } diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph-2.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph-2.snap index 25157aa3ec..346b319428 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph-2.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph-2.snap @@ -1,12 +1,14 @@ --- source: apollo-federation/tests/composition_tests.rs -expression: print_sdl(&supergraph.to_api_schema()) +expression: "print_sdl(supergraph.to_api_schema(Default::default()).unwrap().schema())" --- enum E { V1 V2 } +scalar Import + type Query { t: T } @@ -22,4 +24,3 @@ type T { } union U = S | T - diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap index 6ae57f2c0a..e4dcf8530b 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap @@ -8,7 +8,7 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE -directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION directive @join__graph(name: String!, url: String!) on ENUM_VALUE @@ -25,6 +25,8 @@ enum E @join__type(graph: SUBGRAPH2) { V2 @join__enumValue(graph: SUBGRAPH2) } +scalar Import @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) + type Query @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) { t: T @join__field(graph: SUBGRAPH1) } diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs-2.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs-2.snap index a29eec9523..9880a0ce1f 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs-2.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs-2.snap @@ -1,7 +1,9 @@ --- source: apollo-federation/tests/composition_tests.rs -expression: print_sdl(&supergraph.to_api_schema()) +expression: "print_sdl(supergraph.to_api_schema(Default::default()).unwrap().schema())" --- +scalar Import + type Product { sku: String! name: String! @@ -15,4 +17,3 @@ type User { name: String email: String! } - diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap index 5ad9aed39e..dfe41eb6ab 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap @@ -8,7 +8,7 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE -directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION directive @join__graph(name: String!, url: String!) on ENUM_VALUE @@ -20,6 +20,8 @@ directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA +scalar Import @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) + type Product @join__type(graph: SUBGRAPHA) { sku: String! @join__field(graph: SUBGRAPHA) name: String! @join__field(graph: SUBGRAPHA) diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions-2.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions-2.snap index 4f16ebe955..c9bf048f25 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions-2.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions-2.snap @@ -1,6 +1,6 @@ --- -source: tests/composition_tests.rs -expression: print_sdl(&supergraph.to_api_schema()) +source: apollo-federation/tests/composition_tests.rs +expression: "print_sdl(supergraph.to_api_schema(Default::default()).unwrap().schema())" --- """A cool schema""" schema { @@ -18,6 +18,8 @@ enum E { B } +scalar Import + """ Available queries Not much yet @@ -29,4 +31,3 @@ type Query { x: String!, ): String } - diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap index 5f5b33458d..59ae3be771 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap @@ -1,6 +1,6 @@ --- -source: tests/composition_tests.rs -expression: print_sdl(&supergraph.schema) +source: apollo-federation/tests/composition_tests.rs +expression: print_sdl(supergraph.schema.schema()) --- """A cool schema""" schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { @@ -12,7 +12,7 @@ directive @foo(url: String) on FIELD directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE -directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION directive @join__graph(name: String!, url: String!) on ENUM_VALUE @@ -32,6 +32,8 @@ enum E @join__type(graph: SUBGRAPH2) { B @join__enumValue(graph: SUBGRAPH2) } +scalar Import @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) + """ Available queries Not much yet @@ -61,4 +63,3 @@ enum link__Purpose { """EXECUTION features provide metadata necessary for operation execution.""" EXECUTION } - diff --git a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives-2.snap b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives-2.snap index 02e107db45..107a139852 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives-2.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives-2.snap @@ -1,7 +1,9 @@ --- source: apollo-federation/tests/composition_tests.rs -expression: print_sdl(&supergraph.to_api_schema()) +expression: "print_sdl(supergraph.to_api_schema(Default::default()).unwrap().schema())" --- +scalar Import + type Product { sku: String! name: String! @@ -10,4 +12,3 @@ type Product { type Query { products: [Product!] } - diff --git a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap index 6709dd6b84..443ee630a4 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap @@ -8,7 +8,7 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE -directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION directive @join__graph(name: String!, url: String!) on ENUM_VALUE @@ -20,6 +20,8 @@ directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA +scalar Import @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) + type Product @join__type(graph: SUBGRAPHA, key: "sku") @join__type(graph: SUBGRAPHB, key: "sku") { sku: String! @join__field(graph: SUBGRAPHA) @join__field(graph: SUBGRAPHB) name: String! @join__field(graph: SUBGRAPHA, external: true) @join__field(graph: SUBGRAPHB) diff --git a/apollo-federation/tests/snapshots/main__extract_subgraphs__can_extract_subgraph.snap b/apollo-federation/tests/snapshots/main__extract_subgraphs__can_extract_subgraph.snap index 97609b3e52..f036c14999 100644 --- a/apollo-federation/tests/snapshots/main__extract_subgraphs__can_extract_subgraph.snap +++ b/apollo-federation/tests/snapshots/main__extract_subgraphs__can_extract_subgraph.snap @@ -1,5 +1,5 @@ --- -source: tests/extract_subgraphs.rs +source: apollo-federation/tests/extract_subgraphs.rs expression: snapshot --- Subgraph1: https://Subgraph1 @@ -28,7 +28,7 @@ directive @federation__shareable on OBJECT | FIELD_DEFINITION directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION -directive @federation__override(from: String!) on FIELD_DEFINITION +directive @federation__override(from: String!, label: String) on FIELD_DEFINITION directive @federation__composeDirective(name: String) repeatable on SCHEMA @@ -105,7 +105,7 @@ directive @federation__shareable on OBJECT | FIELD_DEFINITION directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION -directive @federation__override(from: String!) on FIELD_DEFINITION +directive @federation__override(from: String!, label: String) on FIELD_DEFINITION directive @federation__composeDirective(name: String) repeatable on SCHEMA diff --git a/apollo-router-benchmarks/Cargo.toml b/apollo-router-benchmarks/Cargo.toml index 9a01145dce..c2acf8a5c8 100644 --- a/apollo-router-benchmarks/Cargo.toml +++ b/apollo-router-benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-router-benchmarks" -version = "1.50.0" +version = "1.51.0" authors = ["Apollo Graph, Inc. "] edition = "2021" license = "Elastic-2.0" diff --git a/apollo-router-scaffold/Cargo.toml b/apollo-router-scaffold/Cargo.toml index c4514c7244..aa62f1b73d 100644 --- a/apollo-router-scaffold/Cargo.toml +++ b/apollo-router-scaffold/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-router-scaffold" -version = "1.50.0" +version = "1.51.0" authors = ["Apollo Graph, Inc. "] edition = "2021" license = "Elastic-2.0" diff --git a/apollo-router-scaffold/templates/base/Cargo.template.toml b/apollo-router-scaffold/templates/base/Cargo.template.toml index c4841a9859..4733b6c87d 100644 --- a/apollo-router-scaffold/templates/base/Cargo.template.toml +++ b/apollo-router-scaffold/templates/base/Cargo.template.toml @@ -22,7 +22,7 @@ apollo-router = { path ="{{integration_test}}apollo-router" } apollo-router = { git="https://github.com/apollographql/router.git", branch="{{branch}}" } {{else}} # Note if you update these dependencies then also update xtask/Cargo.toml -apollo-router = "1.50.0" +apollo-router = "1.51.0" {{/if}} {{/if}} async-trait = "0.1.52" diff --git a/apollo-router-scaffold/templates/base/rust-toolchain.toml b/apollo-router-scaffold/templates/base/rust-toolchain.toml new file mode 100644 index 0000000000..65542fa528 --- /dev/null +++ b/apollo-router-scaffold/templates/base/rust-toolchain.toml @@ -0,0 +1,6 @@ +# Note that the contents should be same as https://github.com/apollographql/router/blob/main/rust-toolchain.toml + +[toolchain] +# renovate-automation: rustc version +channel = "1.76.0" +components = [ "rustfmt", "clippy" ] diff --git a/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml b/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml index e29afe044f..d5c86be60b 100644 --- a/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml +++ b/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml @@ -13,7 +13,7 @@ apollo-router-scaffold = { path ="{{integration_test}}apollo-router-scaffold" } {{#if branch}} apollo-router-scaffold = { git="https://github.com/apollographql/router.git", branch="{{branch}}" } {{else}} -apollo-router-scaffold = { git = "https://github.com/apollographql/router.git", tag = "v1.50.0" } +apollo-router-scaffold = { git = "https://github.com/apollographql/router.git", tag = "v1.51.0" } {{/if}} {{/if}} anyhow = "1.0.58" diff --git a/apollo-router/Cargo.toml b/apollo-router/Cargo.toml index 1f1b0983f4..c0875539eb 100644 --- a/apollo-router/Cargo.toml +++ b/apollo-router/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-router" -version = "1.50.0" +version = "1.51.0" authors = ["Apollo Graph, Inc. "] repository = "https://github.com/apollographql/router/" documentation = "https://docs.rs/apollo-router" @@ -66,9 +66,9 @@ features = ["docs_rs"] [dependencies] askama = "0.12.1" access-json = "0.1.0" -anyhow = "1.0.80" +anyhow = "1.0.86" apollo-compiler.workspace = true -apollo-federation = { path = "../apollo-federation", version = "=1.50.0" } +apollo-federation = { path = "../apollo-federation", version = "=1.51.0" } arc-swap = "1.6.0" async-channel = "1.9.0" async-compression = { version = "0.4.6", features = [ @@ -83,7 +83,7 @@ base64 = "0.21.7" bloomfilter = "1.0.13" buildstructor = "0.5.4" bytes = "1.6.0" -clap = { version = "4.5.1", default-features = false, features = [ +clap = { version = "4.5.8", default-features = false, features = [ "env", "derive", "std", @@ -103,7 +103,7 @@ dhat = { version = "0.3.3", optional = true } diff = "0.1.13" directories = "5.0.1" displaydoc = "0.2" -flate2 = "1.0.28" +flate2 = "1.0.30" fred = { version = "7.1.2", features = ["enable-rustls"] } futures = { version = "0.3.30", features = ["thread-pool"] } graphql_client = "0.13.0" @@ -115,16 +115,16 @@ humantime = "2.1.0" humantime-serde = "1.1.1" hyper = { version = "0.14.28", features = ["server", "client", "stream"] } hyper-rustls = { version = "0.24.2", features = ["http1", "http2"] } -indexmap = { version = "2.2.3", features = ["serde"] } +indexmap = { version = "2.2.6", features = ["serde"] } itertools = "0.12.1" jsonpath_lib = "0.3.0" jsonpath-rust = "0.3.5" jsonschema = { version = "0.17.1", default-features = false } -jsonwebtoken = "9.2.0" +jsonwebtoken = "9.3.0" lazy_static = "1.4.0" -libc = "0.2.153" -linkme = "0.3.23" -lru = "0.12.2" +libc = "0.2.155" +linkme = "0.3.27" +lru = "0.12.3" maplit = "1.0.2" mediatype = "0.19.18" mockall = "0.11.4" @@ -179,28 +179,28 @@ opentelemetry-zipkin = { version = "0.18.0", default-features = false, features "reqwest-rustls", ] } opentelemetry-prometheus = "0.13.0" -paste = "1.0.14" -pin-project-lite = "0.2.13" +paste = "1.0.15" +pin-project-lite = "0.2.14" prometheus = "0.13" -prost = "0.12.3" -prost-types = "0.12.3" +prost = "0.12.6" +prost-types = "0.12.6" proteus = "0.5.0" rand = "0.8.5" rhai = { version = "=1.17.1", features = ["sync", "serde", "internals"] } -regex = "1.10.3" +regex = "1.10.5" reqwest.workspace = true # note: this dependency should _always_ be pinned, prefix the version with an `=` router-bridge = "=0.5.27+v2.8.1" -rust-embed = { version = "8.2.0", features = ["include-exclude"] } -rustls = "0.21.11" +rust-embed = { version = "8.4.0", features = ["include-exclude"] } +rustls = "0.21.12" rustls-native-certs = "0.6.3" rustls-pemfile = "1.0.4" schemars.workspace = true shellexpand = "3.1.0" sha2 = "0.10.8" -semver = "1.0.22" +semver = "1.0.23" serde.workspace = true serde_derive_default = "0.1" serde_json_bytes.workspace = true @@ -210,10 +210,10 @@ serde_yaml = "0.8.26" static_assertions = "1.1.0" strum_macros = "0.25.3" sys-info = "0.9.1" -thiserror = "1.0.57" +thiserror = "1.0.61" tokio.workspace = true -tokio-stream = { version = "0.1.14", features = ["sync", "net"] } -tokio-util = { version = "0.7.10", features = ["net", "codec", "time"] } +tokio-stream = { version = "0.1.15", features = ["sync", "net"] } +tokio-util = { version = "0.7.11", features = ["net", "codec", "time"] } tonic = { version = "0.9.2", features = [ "transport", "tls", @@ -234,14 +234,14 @@ tower-http = { version = "0.4.4", features = [ "timeout", ] } tower-service = "0.3.2" -tracing = "0.1.37" -tracing-core = "0.1.31" +tracing = "0.1.40" +tracing-core = "0.1.32" tracing-futures = { version = "0.2.5", features = ["futures-03"] } tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json"] } trust-dns-resolver = "0.23.2" -url = { version = "2.5.0", features = ["serde"] } +url = { version = "2.5.2", features = ["serde"] } urlencoding = "2.1.3" -uuid = { version = "1.7.0", features = ["serde", "v4"] } +uuid = { version = "1.9.1", features = ["serde", "v4"] } yaml-rust = "0.4.5" wiremock = "0.5.22" wsl = "0.1.0" @@ -251,11 +251,11 @@ tokio-tungstenite = { version = "0.20.1", features = [ tokio-rustls = "0.24.1" http-serde = "1.1.3" hmac = "0.12.1" -parking_lot = { version = "0.12.1", features = ["serde"] } -memchr = "2.7.1" -brotli = "3.4.0" -zstd = "0.13.0" -zstd-safe = "7.0.0" +parking_lot = { version = "0.12.3", features = ["serde"] } +memchr = "2.7.4" +brotli = "3.5.0" +zstd = "0.13.1" +zstd-safe = "7.1.0" # note: AWS dependencies should always use the same version aws-sigv4 = "1.1.6" aws-credential-types = "1.1.6" @@ -264,8 +264,8 @@ aws-types = "1.1.6" aws-smithy-runtime-api = { version = "1.1.6", features = ["client"] } sha1.workspace = true tracing-serde = "0.1.3" -time = { version = "0.3.34", features = ["serde"] } -similar = { version = "2.4.0", features = ["inline"] } +time = { version = "0.3.36", features = ["serde"] } +similar = { version = "2.5.0", features = ["inline"] } console = "0.15.8" bytesize = { version = "1.3.0", features = ["serde"] } @@ -279,7 +279,7 @@ hyperlocal = { version = "0.8.0", default-features = false, features = [ ] } [target.'cfg(target_os = "linux")'.dependencies] -tikv-jemallocator = "0.5" +tikv-jemallocator = "0.5.4" [dev-dependencies] axum = { version = "0.6.20", features = [ @@ -293,9 +293,9 @@ fred = { version = "7.1.2", features = ["enable-rustls", "mocks"] } futures-test = "0.3.30" insta.workspace = true maplit = "1.0.2" -memchr = { version = "2.7.1", default-features = false } +memchr = { version = "2.7.4", default-features = false } mockall = "0.11.4" -num-traits = "0.2.18" +num-traits = "0.2.19" once_cell.workspace = true opentelemetry-stdout = { version = "0.1.0", features = ["trace"] } opentelemetry = { version = "0.20.0", features = ["testing"] } @@ -307,7 +307,7 @@ opentelemetry-proto = { version = "0.5.0", features = [ ] } p256 = "0.13.2" rand_core = "0.6.4" -reqwest = { version = "0.11.24", default-features = false, features = [ +reqwest = { version = "0.11.27", default-features = false, features = [ "json", "multipart", "stream", @@ -318,26 +318,26 @@ rhai = { version = "1.17.1", features = [ "internals", "testing-environ", ] } -serial_test = { version = "3.0.0" } +serial_test = { version = "3.1.1" } tempfile.workspace = true -test-log = { version = "0.2.14", default-features = false, features = [ +test-log = { version = "0.2.16", default-features = false, features = [ "trace", ] } -test-span = "0.7" -basic-toml = "0.1" +test-span = "0.7.0" +basic-toml = "0.1.9" tower-test = "0.4.0" # See note above in this file about `^tracing` packages which also applies to # these dev dependencies. -tracing-subscriber = { version = "0.3", default-features = false, features = [ +tracing-subscriber = { version = "0.3.18", default-features = false, features = [ "env-filter", "fmt", ] } tracing-opentelemetry = "0.21.0" tracing-test = "0.2.5" -walkdir = "2.4.0" +walkdir = "2.5.0" wiremock = "0.5.22" -libtest-mimic = "0.7.2" +libtest-mimic = "0.7.3" [target.'cfg(target_os = "linux")'.dev-dependencies] rstack = { version = "0.3.3", features = ["dw"], default-features = false } @@ -350,7 +350,7 @@ hyperlocal = { version = "0.8.0", default-features = false, features = [ [build-dependencies] tonic-build = "0.9.2" -basic-toml = "0.1" +basic-toml = "0.1.9" serde_json.workspace = true [[test]] diff --git a/apollo-router/src/apollo_studio_interop/mod.rs b/apollo-router/src/apollo_studio_interop/mod.rs index 89a97eabfb..4909a7af42 100644 --- a/apollo-router/src/apollo_studio_interop/mod.rs +++ b/apollo-router/src/apollo_studio_interop/mod.rs @@ -10,7 +10,6 @@ use std::sync::Arc; use apollo_compiler::ast::Argument; use apollo_compiler::ast::DirectiveList; -use apollo_compiler::ast::Name; use apollo_compiler::ast::OperationType; use apollo_compiler::ast::Value; use apollo_compiler::ast::VariableDefinition; @@ -24,6 +23,7 @@ use apollo_compiler::executable::SelectionSet; use apollo_compiler::schema::ExtendedType; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; +use apollo_compiler::Name; use apollo_compiler::Node; use apollo_compiler::Schema; use router_bridge::planner::ReferencedFieldsForType; diff --git a/apollo-router/src/axum_factory/axum_http_server_factory.rs b/apollo-router/src/axum_factory/axum_http_server_factory.rs index 8bd0d3e0e8..f687440f4c 100644 --- a/apollo-router/src/axum_factory/axum_http_server_factory.rs +++ b/apollo-router/src/axum_factory/axum_http_server_factory.rs @@ -59,8 +59,6 @@ use crate::http_server_factory::HttpServerFactory; use crate::http_server_factory::HttpServerHandle; use crate::http_server_factory::Listener; use crate::plugins::telemetry::SpanMode; -use crate::plugins::traffic_shaping::Elapsed; -use crate::plugins::traffic_shaping::RateLimited; use crate::router::ApolloRouterError; use crate::router_factory::Endpoint; use crate::router_factory::RouterFactory; @@ -663,24 +661,7 @@ async fn handle_graphql( ); match res { - Err(err) => { - if let Some(source_err) = err.source() { - if source_err.is::() { - return RateLimited::new().into_response(); - } - if source_err.is::() { - return Elapsed::new().into_response(); - } - } - if err.is::() { - return RateLimited::new().into_response(); - } - if err.is::() { - return Elapsed::new().into_response(); - } - - internal_server_error(err) - } + Err(err) => internal_server_error(err), Ok(response) => { let (mut parts, body) = response.response.into_parts(); diff --git a/apollo-router/src/axum_factory/tests.rs b/apollo-router/src/axum_factory/tests.rs index 4d668c6dd4..b5946284ac 100644 --- a/apollo-router/src/axum_factory/tests.rs +++ b/apollo-router/src/axum_factory/tests.rs @@ -2384,6 +2384,18 @@ async fn test_supergraph_timeout() { .unwrap(); assert_eq!(response.status(), StatusCode::GATEWAY_TIMEOUT); + let body = response.bytes().await.unwrap(); - assert_eq!(std::str::from_utf8(&body).unwrap(), "request timed out"); + let body: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_eq!( + body, + json!({ + "errors": [{ + "message": "Request timed out", + "extensions": { + "code": "REQUEST_TIMEOUT" + } + }] + }) + ); } diff --git a/apollo-router/src/axum_factory/utils.rs b/apollo-router/src/axum_factory/utils.rs index e685dec4b3..1e208fdb00 100644 --- a/apollo-router/src/axum_factory/utils.rs +++ b/apollo-router/src/axum_factory/utils.rs @@ -8,14 +8,12 @@ use tower_http::trace::MakeSpan; use tower_service::Service; use tracing::Span; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_ERROR; use crate::plugins::telemetry::SpanMode; -use crate::plugins::telemetry::OTEL_STATUS_CODE; -use crate::plugins::telemetry::OTEL_STATUS_CODE_ERROR; use crate::uplink::license_enforcement::LicenseState; use crate::uplink::license_enforcement::LICENSE_EXPIRED_SHORT_MESSAGE; -pub(crate) const REQUEST_SPAN_NAME: &str = "request"; - #[derive(Clone, Default)] pub(crate) struct PropagatingMakeSpan { pub(crate) license: LicenseState, diff --git a/apollo-router/src/cache/redis.rs b/apollo-router/src/cache/redis.rs index a3a1ed4033..f0973551f4 100644 --- a/apollo-router/src/cache/redis.rs +++ b/apollo-router/src/cache/redis.rs @@ -1,5 +1,6 @@ use std::collections::HashMap; use std::fmt; +use std::pin::Pin; use std::sync::Arc; use std::time::Duration; @@ -17,9 +18,11 @@ use fred::types::FromRedis; use fred::types::PerformanceConfig; use fred::types::ReconnectPolicy; use fred::types::RedisConfig; +use fred::types::ScanResult; use fred::types::TlsConfig; use fred::types::TlsHostMapping; use futures::FutureExt; +use futures::Stream; use tower::BoxError; use url::Url; @@ -557,6 +560,31 @@ impl RedisCacheStorage { }; tracing::trace!("insert result {:?}", r); } + + pub(crate) async fn delete(&self, keys: Vec>) -> Option { + self.inner + .del(keys) + .await + .map_err(|e| { + if !e.is_not_found() { + tracing::error!(error = %e, "redis del error"); + } + e + }) + .ok() + } + + pub(crate) fn scan( + &self, + pattern: String, + count: Option, + ) -> Pin> + Send>> { + if self.is_cluster { + Box::pin(self.inner.scan_cluster(pattern, count, None)) + } else { + Box::pin(self.inner.scan(pattern, count, None)) + } + } } #[cfg(test)] diff --git a/apollo-router/src/configuration/mod.rs b/apollo-router/src/configuration/mod.rs index 5d74f7eee4..a9e6d3155c 100644 --- a/apollo-router/src/configuration/mod.rs +++ b/apollo-router/src/configuration/mod.rs @@ -1038,7 +1038,7 @@ impl Default for Apq { } /// Query planning cache configuration -#[derive(Debug, Clone, Default, Deserialize, Serialize, JsonSchema)] +#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] #[serde(deny_unknown_fields, default)] pub(crate) struct QueryPlanning { /// Cache configuration @@ -1047,7 +1047,6 @@ pub(crate) struct QueryPlanning { /// a list of the most used queries (from the in memory cache) /// Configures the number of queries warmed up. Defaults to 1/3 of /// the in memory cache - #[serde(default)] pub(crate) warmed_up_queries: Option, /// Sets a limit to the number of generated query plans. @@ -1080,6 +1079,32 @@ pub(crate) struct QueryPlanning { /// Set the size of a pool of workers to enable query planning parallelism. /// Default: 1. pub(crate) experimental_parallelism: AvailableParallelism, + + /// Activates introspection response caching + /// Historically, the Router has executed introspection queries in the query planner, and cached their + /// response in its cache because they were expensive. This will change soon as introspection will be + /// removed from the query planner. In the meantime, since storing introspection responses can fill up + /// the cache, this option can be used to deactivate it. + /// Default: true + pub(crate) legacy_introspection_caching: bool, +} + +impl Default for QueryPlanning { + fn default() -> Self { + Self { + cache: QueryPlanCache::default(), + warmed_up_queries: Default::default(), + experimental_plans_limit: Default::default(), + experimental_parallelism: Default::default(), + experimental_paths_limit: Default::default(), + experimental_reuse_query_plans: Default::default(), + legacy_introspection_caching: default_legacy_introspection_caching(), + } + } +} + +const fn default_legacy_introspection_caching() -> bool { + true } impl QueryPlanning { diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap index 109993d720..cfd488b7c6 100644 --- a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap @@ -1435,7 +1435,7 @@ expression: "&schema" "description": "#/definitions/BatchProcessorConfig" }, "enable_span_mapping": { - "default": false, + "default": true, "description": "Enable datadog span mapping for span name and resource name.", "type": "boolean" }, @@ -1446,6 +1446,19 @@ expression: "&schema" "endpoint": { "$ref": "#/definitions/UriEndpoint", "description": "#/definitions/UriEndpoint" + }, + "fixed_span_names": { + "default": true, + "description": "Fixes the span names, this means that the APM view will show the original span names in the operation dropdown.", + "type": "boolean" + }, + "resource_mapping": { + "additionalProperties": { + "type": "string" + }, + "default": {}, + "description": "Custom mapping to be used as the resource field in spans, defaults to: router -> http.route supergraph -> graphql.operation.name query_planning -> graphql.operation.name subgraph -> subgraph.name subgraph_request -> subgraph.name http_request -> http.route", + "type": "object" } }, "required": [ @@ -2453,8 +2466,8 @@ expression: "&schema" "type": "boolean" }, "format": { - "$ref": "#/definitions/TraceIdFormat", - "description": "#/definitions/TraceIdFormat" + "$ref": "#/definitions/TraceIdFormat2", + "description": "#/definitions/TraceIdFormat2" }, "header_name": { "description": "Choose the header name to expose trace_id (default: apollo-trace-id)", @@ -4298,6 +4311,11 @@ expression: "&schema" "description": "If cache warm up is configured, this will allow the router to keep a query plan created with the old schema, if it determines that the schema update does not affect the corresponding query", "type": "boolean" }, + "legacy_introspection_caching": { + "default": true, + "description": "Activates introspection response caching Historically, the Router has executed introspection queries in the query planner, and cached their response in its cache because they were expensive. This will change soon as introspection will be removed from the query planner. In the meantime, since storing introspection responses can fill up the cache, this option can be used to deactivate it. Default: true", + "type": "boolean" + }, "warmed_up_queries": { "default": null, "description": "Warms up the cache on reloads by running the query plan over a list of the most used queries (from the in memory cache) Configures the number of queries warmed up. Defaults to 1/3 of the in memory cache", @@ -4746,6 +4764,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_RouterSelector", + "description": "#/definitions/Condition_for_RouterSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -4783,6 +4806,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_RouterSelector", + "description": "#/definitions/Condition_for_RouterSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -4879,8 +4907,8 @@ expression: "&schema" "description": "The trace ID of the request.", "properties": { "trace_id": { - "$ref": "#/definitions/TraceIdFormat2", - "description": "#/definitions/TraceIdFormat2" + "$ref": "#/definitions/TraceIdFormat", + "description": "#/definitions/TraceIdFormat" } }, "required": [ @@ -5690,6 +5718,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_SubgraphSelector", + "description": "#/definitions/Condition_for_SubgraphSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -5727,6 +5760,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_SubgraphSelector", + "description": "#/definitions/Condition_for_SubgraphSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -6044,6 +6082,19 @@ expression: "&schema" ], "type": "object" }, + { + "additionalProperties": false, + "properties": { + "subgraph_on_graphql_error": { + "description": "Boolean set to true if the response body contains graphql error", + "type": "boolean" + } + }, + "required": [ + "subgraph_on_graphql_error" + ], + "type": "object" + }, { "additionalProperties": false, "properties": { @@ -6395,6 +6446,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_SupergraphSelector", + "description": "#/definitions/Condition_for_SupergraphSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -6427,6 +6483,11 @@ expression: "&schema" "description": "Send the body", "type": "boolean" }, + "condition": { + "$ref": "#/definitions/Condition_for_SupergraphSelector", + "description": "#/definitions/Condition_for_SupergraphSelector", + "nullable": true + }, "context": { "default": false, "description": "Send the context", @@ -6733,6 +6794,20 @@ expression: "&schema" "cost" ], "type": "object" + }, + { + "additionalProperties": false, + "description": "Boolean returning true if it's the primary response and not events like subscription events or deferred responses", + "properties": { + "is_primary_response": { + "description": "Boolean returning true if it's the primary response and not events like subscription events or deferred responses", + "type": "boolean" + } + }, + "required": [ + "is_primary_response" + ], + "type": "object" } ] }, @@ -6894,16 +6969,16 @@ expression: "&schema" "TraceIdFormat": { "oneOf": [ { - "description": "Format the Trace ID as a hexadecimal number\n\n(e.g. Trace ID 16 -> 00000000000000000000000000000010)", + "description": "Open Telemetry trace ID, a hex string.", "enum": [ - "hexadecimal" + "open_telemetry" ], "type": "string" }, { - "description": "Format the Trace ID as a decimal number\n\n(e.g. Trace ID 16 -> 16)", + "description": "Datadog trace ID, a u64.", "enum": [ - "decimal" + "datadog" ], "type": "string" } @@ -6912,14 +6987,21 @@ expression: "&schema" "TraceIdFormat2": { "oneOf": [ { - "description": "Open Telemetry trace ID, a hex string.", + "description": "Format the Trace ID as a hexadecimal number\n\n(e.g. Trace ID 16 -> 00000000000000000000000000000010)", "enum": [ - "open_telemetry" + "hexadecimal" ], "type": "string" }, { - "description": "Datadog trace ID, a u64.", + "description": "Format the Trace ID as a decimal number\n\n(e.g. Trace ID 16 -> 16)", + "enum": [ + "decimal" + ], + "type": "string" + }, + { + "description": "Datadog", "enum": [ "datadog" ], diff --git a/apollo-router/src/graphql/mod.rs b/apollo-router/src/graphql/mod.rs index bbe96a2b03..a205648f4a 100644 --- a/apollo-router/src/graphql/mod.rs +++ b/apollo-router/src/graphql/mod.rs @@ -123,29 +123,30 @@ impl Error { let mut object = ensure_object!(value).map_err(|error| FetchError::SubrequestMalformedResponse { service: service_name.to_string(), - reason: error.to_string(), + reason: format!("invalid error within `errors`: {}", error), })?; let extensions = extract_key_value_from_object!(object, "extensions", Value::Object(o) => o) .map_err(|err| FetchError::SubrequestMalformedResponse { service: service_name.to_string(), - reason: err.to_string(), + reason: format!("invalid `extensions` within error: {}", err), })? .unwrap_or_default(); let message = extract_key_value_from_object!(object, "message", Value::String(s) => s) .map_err(|err| FetchError::SubrequestMalformedResponse { service: service_name.to_string(), - reason: err.to_string(), + reason: format!("invalid `message` within error: {}", err), })? .map(|s| s.as_str().to_string()) .unwrap_or_default(); let locations = extract_key_value_from_object!(object, "locations") + .map(skip_invalid_locations) .map(serde_json_bytes::from_value) .transpose() .map_err(|err| FetchError::SubrequestMalformedResponse { service: service_name.to_string(), - reason: err.to_string(), + reason: format!("invalid `locations` within error: {}", err), })? .unwrap_or_default(); let path = extract_key_value_from_object!(object, "path") @@ -153,7 +154,7 @@ impl Error { .transpose() .map_err(|err| FetchError::SubrequestMalformedResponse { service: service_name.to_string(), - reason: err.to_string(), + reason: format!("invalid `path` within error: {}", err), })?; Ok(Error { @@ -165,6 +166,20 @@ impl Error { } } +/// GraphQL spec require that both "line" and "column" are positive numbers. +/// However GraphQL Java and GraphQL Kotlin return `{ "line": -1, "column": -1 }` +/// if they can't determine error location inside query. +/// This function removes such locations from suplied value. +fn skip_invalid_locations(mut value: Value) -> Value { + if let Some(array) = value.as_array_mut() { + array.retain(|location| { + location.get("line") != Some(&Value::from(-1)) + || location.get("column") != Some(&Value::from(-1)) + }) + } + value +} + /// Displays (only) the error message. impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/apollo-router/src/layers/map_future_with_request_data.rs b/apollo-router/src/layers/map_future_with_request_data.rs index 53faf6a299..0eeff03363 100644 --- a/apollo-router/src/layers/map_future_with_request_data.rs +++ b/apollo-router/src/layers/map_future_with_request_data.rs @@ -36,6 +36,7 @@ where } /// [`Service`] for mapping futures with request data. See [`ServiceBuilderExt::map_future_with_request_data()`](crate::layers::ServiceBuilderExt::map_future_with_request_data()). +#[derive(Clone)] pub struct MapFutureWithRequestDataService { inner: S, req_fn: RF, diff --git a/apollo-router/src/plugins/authorization/authenticated.rs b/apollo-router/src/plugins/authorization/authenticated.rs index f88796ce4d..a4324b5548 100644 --- a/apollo-router/src/plugins/authorization/authenticated.rs +++ b/apollo-router/src/plugins/authorization/authenticated.rs @@ -6,7 +6,7 @@ use apollo_compiler::ast; use apollo_compiler::executable; use apollo_compiler::schema; use apollo_compiler::schema::Implementers; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; use tower::BoxError; @@ -23,7 +23,7 @@ pub(crate) const AUTHENTICATED_SPEC_VERSION_RANGE: &str = ">=0.1.0, <=0.1.0"; pub(crate) struct AuthenticatedCheckVisitor<'a> { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a Node>, + fragments: HashMap<&'a Name, &'a Node>, pub(crate) found: bool, authenticated_directive_name: String, entity_query: bool, @@ -175,13 +175,13 @@ impl<'a> traverse::Visitor for AuthenticatedCheckVisitor<'a> { pub(crate) struct AuthenticatedVisitor<'a> { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a ast::FragmentDefinition>, + fragments: HashMap<&'a Name, &'a ast::FragmentDefinition>, implementers_map: &'a HashMap, pub(crate) query_requires_authentication: bool, pub(crate) unauthorized_paths: Vec, // store the error paths from fragments so we can add them at // the point of application - fragments_unauthorized_paths: HashMap<&'a ast::Name, Vec>, + fragments_unauthorized_paths: HashMap<&'a Name, Vec>, current_path: Path, authenticated_directive_name: String, dry_run: bool, diff --git a/apollo-router/src/plugins/authorization/policy.rs b/apollo-router/src/plugins/authorization/policy.rs index 18d68ce5aa..002c98592c 100644 --- a/apollo-router/src/plugins/authorization/policy.rs +++ b/apollo-router/src/plugins/authorization/policy.rs @@ -13,7 +13,7 @@ use apollo_compiler::ast; use apollo_compiler::executable; use apollo_compiler::schema; use apollo_compiler::schema::Implementers; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; use tower::BoxError; @@ -26,7 +26,7 @@ use crate::spec::TYPENAME; pub(crate) struct PolicyExtractionVisitor<'a> { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a Node>, + fragments: HashMap<&'a Name, &'a Node>, pub(crate) extracted_policies: HashSet, policy_directive_name: String, entity_query: bool, @@ -187,7 +187,7 @@ impl<'a> traverse::Visitor for PolicyExtractionVisitor<'a> { pub(crate) struct PolicyFilteringVisitor<'a> { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a ast::FragmentDefinition>, + fragments: HashMap<&'a Name, &'a ast::FragmentDefinition>, implementers_map: &'a HashMap, dry_run: bool, request_policies: HashSet, @@ -195,7 +195,7 @@ pub(crate) struct PolicyFilteringVisitor<'a> { pub(crate) unauthorized_paths: Vec, // store the error paths from fragments so we can add them at // the point of application - fragments_unauthorized_paths: HashMap<&'a ast::Name, Vec>, + fragments_unauthorized_paths: HashMap<&'a Name, Vec>, current_path: Path, policy_directive_name: String, } diff --git a/apollo-router/src/plugins/authorization/scopes.rs b/apollo-router/src/plugins/authorization/scopes.rs index d83988e220..a566be56d8 100644 --- a/apollo-router/src/plugins/authorization/scopes.rs +++ b/apollo-router/src/plugins/authorization/scopes.rs @@ -13,7 +13,7 @@ use apollo_compiler::ast; use apollo_compiler::executable; use apollo_compiler::schema; use apollo_compiler::schema::Implementers; -use apollo_compiler::schema::Name; +use apollo_compiler::Name; use apollo_compiler::Node; use tower::BoxError; @@ -26,7 +26,7 @@ use crate::spec::TYPENAME; pub(crate) struct ScopeExtractionVisitor<'a> { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a Node>, + fragments: HashMap<&'a Name, &'a Node>, pub(crate) extracted_scopes: HashSet, requires_scopes_directive_name: String, entity_query: bool, @@ -204,14 +204,14 @@ fn scopes_sets_argument(directive: &ast::Directive) -> impl Iterator { schema: &'a schema::Schema, - fragments: HashMap<&'a ast::Name, &'a ast::FragmentDefinition>, + fragments: HashMap<&'a Name, &'a ast::FragmentDefinition>, implementers_map: &'a HashMap, request_scopes: HashSet, pub(crate) query_requires_scopes: bool, pub(crate) unauthorized_paths: Vec, // store the error paths from fragments so we can add them at // the point of application - fragments_unauthorized_paths: HashMap<&'a ast::Name, Vec>, + fragments_unauthorized_paths: HashMap<&'a Name, Vec>, current_path: Path, requires_scopes_directive_name: String, dry_run: bool, diff --git a/apollo-router/src/plugins/cache/entity.rs b/apollo-router/src/plugins/cache/entity.rs index 3d29af663e..ebc6772008 100644 --- a/apollo-router/src/plugins/cache/entity.rs +++ b/apollo-router/src/plugins/cache/entity.rs @@ -10,6 +10,7 @@ use http::header::CACHE_CONTROL; use schemars::JsonSchema; use serde::Deserialize; use serde::Serialize; +use serde_json_bytes::from_value; use serde_json_bytes::ByteString; use serde_json_bytes::Value; use sha2::Digest; @@ -23,7 +24,9 @@ use tracing::Instrument; use tracing::Level; use super::cache_control::CacheControl; +use super::invalidation::Invalidation; use super::metrics::CacheMetricsService; +use crate::batching::BatchQuery; use crate::cache::redis::RedisCacheStorage; use crate::cache::redis::RedisKey; use crate::cache::redis::RedisValue; @@ -55,9 +58,11 @@ register_plugin!("apollo", "preview_entity_cache", EntityCache); pub(crate) struct EntityCache { storage: Option, subgraphs: Arc>, + entity_type: Option, enabled: bool, metrics: Metrics, private_queries: Arc>>, + pub(crate) invalidation: Invalidation, } /// Configuration for entity caching @@ -121,6 +126,12 @@ impl Plugin for EntityCache { where Self: Sized, { + let entity_type = init + .supergraph_schema + .schema_definition + .query + .as_ref() + .map(|q| q.name.to_string()); let required_to_start = init.config.redis.required_to_start; // we need to explicitely disable TTL reset because it is managed directly by this plugin let mut redis_config = init.config.redis.clone(); @@ -153,12 +164,16 @@ impl Plugin for EntityCache { .into()); } + let invalidation = Invalidation::new(storage.clone()).await?; + Ok(Self { storage, + entity_type, enabled: init.config.enabled, subgraphs: Arc::new(init.config.subgraph), metrics: init.config.metrics, private_queries: Arc::new(RwLock::new(HashSet::new())), + invalidation, }) } @@ -245,11 +260,13 @@ impl Plugin for EntityCache { }) .service(CacheService(Some(InnerCacheService { service, + entity_type: self.entity_type.clone(), name: name.to_string(), storage, subgraph_ttl, private_queries, private_id, + invalidation: self.invalidation.clone(), }))); tower::util::BoxService::new(inner) } else { @@ -279,8 +296,10 @@ impl EntityCache { where Self: Sized, { + let invalidation = Invalidation::new(Some(storage.clone())).await?; Ok(Self { storage: Some(storage), + entity_type: None, enabled: true, subgraphs: Arc::new(SubgraphConfiguration { all: Subgraph::default(), @@ -288,6 +307,7 @@ impl EntityCache { }), metrics: Metrics::default(), private_queries: Default::default(), + invalidation, }) } } @@ -296,10 +316,12 @@ struct CacheService(Option); struct InnerCacheService { service: subgraph::BoxService, name: String, + entity_type: Option, storage: RedisCacheStorage, subgraph_ttl: Option, private_queries: Arc>>, private_id: Option, + invalidation: Invalidation, } impl Service for CacheService { @@ -330,6 +352,17 @@ impl InnerCacheService { mut self, request: subgraph::Request, ) -> Result { + // Check if the request is part of a batch. If it is, completely bypass entity caching since it + // will break any request batches which this request is part of. + // This check is what enables Batching and entity caching to work together, so be very careful + // before making any changes to it. + if request + .context + .extensions() + .with_lock(|lock| lock.contains_key::()) + { + return self.service.call(request).await; + } let query = request .subgraph_request .body() @@ -353,7 +386,8 @@ impl InnerCacheService { { if request.operation_kind == OperationKind::Query { match cache_lookup_root( - self.name, + self.name.clone(), + self.entity_type.as_deref(), self.storage.clone(), is_known_private, private_id.as_deref(), @@ -364,7 +398,7 @@ impl InnerCacheService { { ControlFlow::Break(response) => Ok(response), ControlFlow::Continue((request, mut root_cache_key)) => { - let response = self.service.call(request).await?; + let mut response = self.service.call(request).await?; let cache_control = if response.response.headers().contains_key(CACHE_CONTROL) { @@ -389,6 +423,15 @@ impl InnerCacheService { } } + if let Some(invalidation_extensions) = response + .response + .body_mut() + .extensions + .remove("invalidation") + { + self.handle_invalidation(invalidation_extensions).await; + } + if cache_control.should_store() { cache_store_root_from_response( self.storage, @@ -404,11 +447,21 @@ impl InnerCacheService { } } } else { - self.service.call(request).await + let mut response = self.service.call(request).await?; + if let Some(invalidation_extensions) = response + .response + .body_mut() + .extensions + .remove("invalidation") + { + self.handle_invalidation(invalidation_extensions).await; + } + + Ok(response) } } else { match cache_lookup_entities( - self.name, + self.name.clone(), self.storage.clone(), is_known_private, private_id.as_deref(), @@ -436,6 +489,15 @@ impl InnerCacheService { self.private_queries.write().await.insert(query.to_string()); } + if let Some(invalidation_extensions) = response + .response + .body_mut() + .extensions + .remove("invalidation") + { + self.handle_invalidation(invalidation_extensions).await; + } + cache_store_entities_from_response( self.storage, self.subgraph_ttl, @@ -466,10 +528,21 @@ impl InnerCacheService { }) }) } + + async fn handle_invalidation(&mut self, invalidation_extensions: Value) { + if let Ok(requests) = from_value(invalidation_extensions) { + if let Err(e) = self.invalidation.invalidate(requests).await { + tracing::error!(error = %e, + message = "could not invalidate entity cache entries", + ); + } + } + } } async fn cache_lookup_root( name: String, + entity_type_opt: Option<&str>, cache: RedisCacheStorage, is_known_private: bool, private_id: Option<&str>, @@ -479,6 +552,7 @@ async fn cache_lookup_root( let key = extract_cache_key_root( &name, + entity_type_opt, &request.query_hash, body, &request.context, @@ -768,8 +842,10 @@ pub(crate) fn hash_additional_data( } // build a cache key for the root operation +#[allow(clippy::too_many_arguments)] fn extract_cache_key_root( subgraph_name: &str, + entity_type_opt: Option<&str>, query_hash: &QueryHash, body: &mut graphql::Request, context: &Context, @@ -782,14 +858,17 @@ fn extract_cache_key_root( // hash more data like variables and authorization status let additional_data_hash = hash_additional_data(body, context, cache_key); + let entity_type = entity_type_opt.unwrap_or("Query"); + // the cache key is written to easily find keys matching a prefix for deletion: - // - subgraph name: caching is done per subgraph + // - subgraph name: subgraph name + // - entity type: entity type // - query hash: invalidate the entry for a specific query and operation name // - additional data: separate cache entries depending on info like authorization status let mut key = String::new(); let _ = write!( &mut key, - "subgraph:{subgraph_name}:Query:{query_hash}:{additional_data_hash}" + "subgraph:{subgraph_name}:type:{entity_type}:hash:{query_hash}:data:{additional_data_hash}" ); if is_known_private { diff --git a/apollo-router/src/plugins/cache/invalidation.rs b/apollo-router/src/plugins/cache/invalidation.rs new file mode 100644 index 0000000000..6293df2ab9 --- /dev/null +++ b/apollo-router/src/plugins/cache/invalidation.rs @@ -0,0 +1,154 @@ +use std::time::Instant; + +use fred::types::Scanner; +use futures::SinkExt; +use futures::StreamExt; +use serde::Deserialize; +use serde::Serialize; +use serde_json_bytes::Value; +use tower::BoxError; +use tracing::Instrument; + +use crate::cache::redis::RedisCacheStorage; +use crate::cache::redis::RedisKey; +use crate::notification::Handle; +use crate::notification::HandleStream; +use crate::Notify; + +#[derive(Clone)] +pub(crate) struct Invalidation { + enabled: bool, + handle: Handle>, +} + +#[derive(Copy, Clone, Hash, PartialEq, Eq)] +pub(crate) struct InvalidationTopic; + +impl Invalidation { + pub(crate) async fn new(storage: Option) -> Result { + let mut notify = Notify::new(None, None, None); + let (handle, _b) = notify.create_or_subscribe(InvalidationTopic, false).await?; + let enabled = storage.is_some(); + if let Some(storage) = storage { + let h = handle.clone(); + + tokio::task::spawn(async move { start(storage, h.into_stream()).await }); + } + Ok(Self { enabled, handle }) + } + + pub(crate) async fn invalidate( + &mut self, + requests: Vec, + ) -> Result<(), BoxError> { + if self.enabled { + let mut sink = self.handle.clone().into_sink(); + sink.send(requests).await.map_err(|e| e.message)?; + } + + Ok(()) + } +} + +async fn start( + storage: RedisCacheStorage, + mut handle: HandleStream>, +) { + while let Some(requests) = handle.next().await { + handle_request_batch(&storage, requests) + .instrument(tracing::info_span!("cache.invalidation.batch")) + .await + } +} + +async fn handle_request_batch(storage: &RedisCacheStorage, requests: Vec) { + for request in requests { + let start = Instant::now(); + handle_request(storage, &request) + .instrument(tracing::info_span!("cache.invalidation.request")) + .await; + f64_histogram!( + "apollo.router.cache.invalidation.duration", + "Duration of the invalidation event execution.", + start.elapsed().as_secs_f64() + ); + } +} + +async fn handle_request(storage: &RedisCacheStorage, request: &InvalidationRequest) { + let key_prefix = request.key_prefix(); + tracing::debug!( + "got invalidation request: {request:?}, will scan for: {}", + key_prefix + ); + + // FIXME: configurable batch size + let mut stream = storage.scan(key_prefix.clone(), Some(10)); + let mut count = 0u64; + + while let Some(res) = stream.next().await { + match res { + Err(e) => { + tracing::error!( + pattern = key_prefix, + error = %e, + message = "error scanning for key", + ); + break; + } + Ok(scan_res) => { + if let Some(keys) = scan_res.results() { + let keys = keys + .iter() + .filter_map(|k| k.as_str()) + .map(|k| RedisKey(k.to_string())) + .collect::>(); + if !keys.is_empty() { + tracing::debug!("deleting keys: {keys:?}"); + count += keys.len() as u64; + storage.delete(keys).await; + } + } + } + } + } + + u64_histogram!( + "apollo.router.cache.invalidation.keys", + "Number of invalidated keys.", + count + ); +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "lowercase")] +pub(crate) enum InvalidationRequest { + Subgraph { + subgraph: String, + }, + Type { + subgraph: String, + r#type: String, + }, + Entity { + subgraph: String, + r#type: String, + key: Value, + }, +} + +impl InvalidationRequest { + fn key_prefix(&self) -> String { + match self { + InvalidationRequest::Subgraph { subgraph } => { + format!("subgraph:{subgraph}*",) + } + InvalidationRequest::Type { subgraph, r#type } => { + format!("subgraph:{subgraph}:type:{type}*",) + } + _ => { + todo!() + } + } + } +} diff --git a/apollo-router/src/plugins/cache/mod.rs b/apollo-router/src/plugins/cache/mod.rs index 084578434f..dded2f9586 100644 --- a/apollo-router/src/plugins/cache/mod.rs +++ b/apollo-router/src/plugins/cache/mod.rs @@ -1,5 +1,6 @@ pub(crate) mod cache_control; pub(crate) mod entity; +pub(crate) mod invalidation; pub(crate) mod metrics; #[cfg(test)] pub(crate) mod tests; diff --git a/apollo-router/src/plugins/cache/tests.rs b/apollo-router/src/plugins/cache/tests.rs index a7a59a2335..3d0bb21169 100644 --- a/apollo-router/src/plugins/cache/tests.rs +++ b/apollo-router/src/plugins/cache/tests.rs @@ -110,8 +110,67 @@ impl Mocks for MockStore { } return Ok(RedisValue::Null); } + //FIXME: this is not working because fred's mock never sends the response to SCAN to the client + /*"SCAN" => { + let mut args_it = command.args.iter(); + if let ( + Some(RedisValue::String(cursor)), + Some(RedisValue::String(_match)), + Some(RedisValue::String(pattern)), + Some(RedisValue::String(_count)), + Some(RedisValue::Integer(max_count)), + ) = ( + args_it.next(), + args_it.next(), + args_it.next(), + args_it.next(), + args_it.next(), + ) { + let cursor: usize = cursor.parse().unwrap(); + + if cursor > self.map.lock().len() { + let res = RedisValue::Array(vec![ + RedisValue::String(0.to_string().into()), + RedisValue::Array(Vec::new()), + ]); + println!("result: {res:?}"); + + return Ok(res); + } - _ => {} + let regex = Regex::new(pattern).unwrap(); + let mut count = 0; + let res: Vec<_> = self + .map + .lock() + .keys() + .enumerate() + .skip(cursor) + .map(|(i, key)| { + println!("seen key at index {i}"); + count = i + 1; + key + }) + .filter(|key| regex.is_match(&*key)) + .map(|key| RedisValue::Bytes(key.clone())) + .take(*max_count as usize) + .collect(); + + println!("scan returns cursor {count}, for {} values", res.len()); + let res = RedisValue::Array(vec![ + RedisValue::String(count.to_string().into()), + RedisValue::Array(res), + ]); + println!("result: {res:?}"); + + return Ok(res); + } else { + panic!() + } + }*/ + _ => { + panic!() + } } Err(RedisError::new(RedisErrorKind::NotFound, "mock not found")) } @@ -437,3 +496,123 @@ async fn private() { insta::assert_json_snapshot!(response); } + +/*FIXME: reactivate test if we manage to make fred return the response to SCAN in mocks +#[tokio::test(flavor = "multi_thread")] +async fn invalidate() { + let query = "query { currentUser { activeOrganization { id creatorUser { __typename id } } } }"; + + let subgraphs = MockedSubgraphs([ + ("user", MockSubgraph::builder().with_json( + serde_json::json!{{"query":"{currentUser{activeOrganization{__typename id}}}"}}, + serde_json::json!{{"data": {"currentUser": { "activeOrganization": { + "__typename": "Organization", + "id": "1" + } }}}} + ).with_header(CACHE_CONTROL, HeaderValue::from_static("public")).build()), + ("orga", MockSubgraph::builder().with_json( + serde_json::json!{{ + "query": "query($representations:[_Any!]!){_entities(representations:$representations){...on Organization{creatorUser{__typename id}}}}", + "variables": { + "representations": [ + { + "id": "1", + "__typename": "Organization", + } + ] + }}}, + serde_json::json!{{"data": { + "_entities": [{ + "creatorUser": { + "__typename": "User", + "id": 2 + } + }] + }}} + ).with_header(CACHE_CONTROL, HeaderValue::from_static("public")).build()) + ].into_iter().collect()); + + let redis_cache = RedisCacheStorage::from_mocks(Arc::new(MockStore::new())) + .await + .unwrap(); + let entity_cache = EntityCache::with_mocks(redis_cache.clone(), HashMap::new()) + .await + .unwrap(); + let mut invalidation = entity_cache.invalidation.clone(); + + let service = TestHarness::builder() + .configuration_json(serde_json::json!({"include_subgraph_errors": { "all": true } })) + .unwrap() + .schema(SCHEMA) + .extra_plugin(entity_cache.clone()) + .extra_plugin(subgraphs) + .build_supergraph() + .await + .unwrap(); + + let request = supergraph::Request::fake_builder() + .query(query) + .context(Context::new()) + .build() + .unwrap(); + let mut response = service.oneshot(request).await.unwrap(); + + insta::assert_debug_snapshot!(response.response.headers().get(CACHE_CONTROL)); + let response = response.next_response().await.unwrap(); + + insta::assert_json_snapshot!(response); + + // Now testing without any mock subgraphs, all the data should come from the cache + let service = TestHarness::builder() + .configuration_json(serde_json::json!({"include_subgraph_errors": { "all": true } })) + .unwrap() + .schema(SCHEMA) + .extra_plugin(entity_cache.clone()) + .build_supergraph() + .await + .unwrap(); + + let request = supergraph::Request::fake_builder() + .query(query) + .context(Context::new()) + .build() + .unwrap(); + let mut response = service.clone().oneshot(request).await.unwrap(); + + insta::assert_debug_snapshot!(response.response.headers().get(CACHE_CONTROL)); + let response = response.next_response().await.unwrap(); + + insta::assert_json_snapshot!(response); + + // now we invalidate data + invalidation + .invalidate(vec![InvalidationRequest::Subgraph { + subgraph: "orga".to_string(), + }]) + .await + .unwrap(); + tokio::time::sleep(Duration::from_millis(2000)).await; + + panic!(); + let service = TestHarness::builder() + .configuration_json(serde_json::json!({"include_subgraph_errors": { "all": true } })) + .unwrap() + .schema(SCHEMA) + .extra_plugin(entity_cache) + .build_supergraph() + .await + .unwrap(); + + let request = supergraph::Request::fake_builder() + .query(query) + .context(Context::new()) + .build() + .unwrap(); + let mut response = service.clone().oneshot(request).await.unwrap(); + + insta::assert_debug_snapshot!(response.response.headers().get(CACHE_CONTROL)); + let response = response.next_response().await.unwrap(); + + insta::assert_json_snapshot!(response); + panic!() +}*/ diff --git a/apollo-router/src/plugins/coprocessor/mod.rs b/apollo-router/src/plugins/coprocessor/mod.rs index a21d4107e4..cba03ba5c6 100644 --- a/apollo-router/src/plugins/coprocessor/mod.rs +++ b/apollo-router/src/plugins/coprocessor/mod.rs @@ -36,6 +36,9 @@ use crate::layers::async_checkpoint::OneShotAsyncCheckpointLayer; use crate::layers::ServiceBuilderExt; use crate::plugin::Plugin; use crate::plugin::PluginInit; +use crate::plugins::telemetry::config_new::conditions::Condition; +use crate::plugins::telemetry::config_new::selectors::RouterSelector; +use crate::plugins::telemetry::config_new::selectors::SubgraphSelector; use crate::plugins::traffic_shaping::Http2Config; use crate::register_plugin; use crate::services; @@ -234,6 +237,9 @@ where #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct RouterRequestConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -252,6 +258,9 @@ pub(super) struct RouterRequestConf { #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct RouterResponseConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -267,6 +276,9 @@ pub(super) struct RouterResponseConf { #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct SubgraphRequestConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -285,6 +297,9 @@ pub(super) struct SubgraphRequestConf { #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct SubgraphResponseConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -598,7 +613,7 @@ async fn process_router_request_stage( coprocessor_url: String, sdl: Arc, mut request: router::Request, - request_config: RouterRequestConf, + mut request_config: RouterRequestConf, ) -> Result, BoxError> where C: Service, Response = http::Response, Error = BoxError> @@ -608,6 +623,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = request_config + .condition + .as_mut() + .map(|c| c.evaluate_request(&request) == Some(true)) + .unwrap_or(true); + if !should_be_executed { + return Ok(ControlFlow::Continue(request)); + } // Call into our out of process processor with a body of our body // First, extract the data we need from our request and prepare our // external call. Use our configuration to figure out which data to send. @@ -761,6 +784,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = response_config + .condition + .as_ref() + .map(|c| c.evaluate_response(&response)) + .unwrap_or(true); + if !should_be_executed { + return Ok(response); + } // split the response into parts + body let (parts, body) = response.response.into_parts(); @@ -945,7 +976,7 @@ async fn process_subgraph_request_stage( coprocessor_url: String, service_name: String, mut request: subgraph::Request, - request_config: SubgraphRequestConf, + mut request_config: SubgraphRequestConf, ) -> Result, BoxError> where C: Service, Response = http::Response, Error = BoxError> @@ -955,6 +986,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = request_config + .condition + .as_mut() + .map(|c| c.evaluate_request(&request) == Some(true)) + .unwrap_or(true); + if !should_be_executed { + return Ok(ControlFlow::Continue(request)); + } // Call into our out of process processor with a body of our body // First, extract the data we need from our request and prepare our // external call. Use our configuration to figure out which data to send. @@ -1100,6 +1139,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = response_config + .condition + .as_ref() + .map(|c| c.evaluate_response(&response)) + .unwrap_or(true); + if !should_be_executed { + return Ok(response); + } // Call into our out of process processor with a body of our body // First, extract the data we need from our response and prepare our // external call. Use our configuration to figure out which data to send. diff --git a/apollo-router/src/plugins/coprocessor/supergraph.rs b/apollo-router/src/plugins/coprocessor/supergraph.rs index 31175882f7..d5aff80b7e 100644 --- a/apollo-router/src/plugins/coprocessor/supergraph.rs +++ b/apollo-router/src/plugins/coprocessor/supergraph.rs @@ -16,12 +16,17 @@ use crate::graphql; use crate::layers::async_checkpoint::OneShotAsyncCheckpointLayer; use crate::layers::ServiceBuilderExt; use crate::plugins::coprocessor::EXTERNAL_SPAN_NAME; +use crate::plugins::telemetry::config_new::conditions::Condition; +use crate::plugins::telemetry::config_new::selectors::SupergraphSelector; use crate::services::supergraph; /// What information is passed to a router request/response stage #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct SupergraphRequestConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -38,6 +43,9 @@ pub(super) struct SupergraphRequestConf { #[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize, JsonSchema)] #[serde(default, deny_unknown_fields)] pub(super) struct SupergraphResponseConf { + /// Condition to trigger this stage + #[serde(skip_serializing)] + pub(super) condition: Option>, /// Send the headers pub(super) headers: bool, /// Send the context @@ -183,7 +191,7 @@ async fn process_supergraph_request_stage( coprocessor_url: String, sdl: Arc, mut request: supergraph::Request, - request_config: SupergraphRequestConf, + mut request_config: SupergraphRequestConf, ) -> Result, BoxError> where C: Service, Response = http::Response, Error = BoxError> @@ -193,6 +201,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = request_config + .condition + .as_mut() + .map(|c| c.evaluate_request(&request) == Some(true)) + .unwrap_or(true); + if !should_be_executed { + return Ok(ControlFlow::Continue(request)); + } // Call into our out of process processor with a body of our body // First, extract the data we need from our request and prepare our // external call. Use our configuration to figure out which data to send. @@ -331,6 +347,14 @@ where + 'static, >>::Future: Send + 'static, { + let should_be_executed = response_config + .condition + .as_ref() + .map(|c| c.evaluate_response(&response)) + .unwrap_or(true); + if !should_be_executed { + return Ok(response); + } // split the response into parts + body let (mut parts, body) = response.response.into_parts(); @@ -419,8 +443,16 @@ where let generator_map_context = map_context.clone(); let generator_sdl_to_send = sdl_to_send.clone(); let generator_id = map_context.id.clone(); + let should_be_executed = response_config + .condition + .as_ref() + .map(|c| c.evaluate_event_response(&deferred_response, &map_context)) + .unwrap_or(true); async move { + if !should_be_executed { + return Ok(deferred_response); + } let body_to_send = response_config.body.then(|| { serde_json::to_value(&deferred_response).expect("serialization will not fail") }); @@ -512,6 +544,7 @@ mod tests { use super::*; use crate::plugin::test::MockInternalHttpClientService; use crate::plugin::test::MockSupergraphService; + use crate::plugins::telemetry::config_new::conditions::SelectorOrValue; use crate::services::router::body::get_body_bytes; use crate::services::supergraph; @@ -564,6 +597,7 @@ mod tests { async fn external_plugin_supergraph_request() { let supergraph_stage = SupergraphStage { request: SupergraphRequestConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -697,6 +731,15 @@ mod tests { async fn external_plugin_supergraph_request_controlflow_break() { let supergraph_stage = SupergraphStage { request: SupergraphRequestConf { + condition: Condition::Eq([ + SelectorOrValue::Selector(SupergraphSelector::RequestHeader { + request_header: String::from("another_header"), + redact: None, + default: None, + }), + SelectorOrValue::Value("value".to_string().into()), + ]) + .into(), headers: false, context: false, body: true, @@ -728,6 +771,7 @@ mod tests { } }, "headers": { + "another_header": ["another value"], "aheader": ["a value"] } }"#, @@ -736,14 +780,17 @@ mod tests { }) }); - let service = supergraph_stage.as_service( + let service = supergraph_stage.clone().as_service( mock_http_client, mock_supergraph_service.boxed(), "http://test".to_string(), Arc::new("".to_string()), ); - let request = supergraph::Request::fake_builder().build().unwrap(); + let request = supergraph::Request::fake_builder() + .header("another_header", "value") + .build() + .unwrap(); let crate::services::supergraph::Response { mut response, @@ -753,21 +800,81 @@ mod tests { assert!(context.get::<_, bool>("testKey").unwrap().unwrap()); let value = response.headers().get("aheader").unwrap(); - assert_eq!(value, "a value"); + let value = response.headers().get("another_header").unwrap(); + assert_eq!(value, "another value"); + assert_eq!( response.body_mut().next().await.unwrap().errors[0] .message .as_str(), "my error message" ); + + let mut mock_supergraph_service = MockSupergraphService::new(); + mock_supergraph_service + .expect_call() + .returning(|req: supergraph::Request| { + Ok(supergraph::Response::builder() + .data(json!({ "test": 1234_u32 })) + .errors(Vec::new()) + .extensions(crate::json_ext::Object::new()) + .context(req.context) + .build() + .unwrap()) + }); + + // This should not trigger the supergraph response stage because of the condition + let request = supergraph::Request::fake_builder().build().unwrap(); + // let mut mock_http_client = MockInternalHttpClientService::new(); + // mock_http_client.expect_clone().; + let mock_http_client = mock_with_callback(move |_: http::Request| { + Box::pin(async { + Ok(http::Response::builder() + .body(RouterBody::from( + r#"{ + "version": 1, + "stage": "SupergraphRequest", + "control": { + "break": 200 + }, + "body": { + "errors": [{ "message": "my error message" }] + }, + "context": { + "entries": { + "testKey": true + } + }, + "headers": { + "another_header": ["another value"], + "aheader": ["a value"] + } + }"#, + )) + .unwrap()) + }) + }); + + let service = supergraph_stage.as_service( + mock_http_client, + mock_supergraph_service.boxed(), + "http://test".to_string(), + Arc::new("".to_string()), + ); + + let crate::services::supergraph::Response { context, .. } = + service.oneshot(request).await.unwrap(); + + assert!(context.get::<_, bool>("testKey").ok().flatten().is_none()); } #[tokio::test] async fn external_plugin_supergraph_response() { let supergraph_stage = SupergraphStage { response: SupergraphResponseConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -899,6 +1006,7 @@ mod tests { async fn multi_part() { let supergraph_stage = SupergraphStage { response: SupergraphResponseConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -1005,4 +1113,122 @@ mod tests { json!({ "data": { "test": 3, "has_next": false }, "hasNext": false }), ); } + + #[tokio::test] + async fn multi_part_only_primary() { + let supergraph_stage = SupergraphStage { + response: SupergraphResponseConf { + condition: Condition::Eq([ + SelectorOrValue::Selector(SupergraphSelector::IsPrimaryResponse { + is_primary_response: true, + }), + SelectorOrValue::Value(true.into()), + ]) + .into(), + headers: true, + context: true, + body: true, + sdl: true, + status_code: false, + }, + request: Default::default(), + }; + + let mut mock_supergraph_service = MockSupergraphService::new(); + + mock_supergraph_service + .expect_call() + .returning(|req: supergraph::Request| { + Ok(supergraph::Response::fake_stream_builder() + .response( + graphql::Response::builder() + .data(json!({ "test": 1 })) + .has_next(true) + .build(), + ) + .response( + graphql::Response::builder() + .data(json!({ "test": 2 })) + .has_next(true) + .build(), + ) + .response( + graphql::Response::builder() + .data(json!({ "test": 3 })) + .has_next(false) + .build(), + ) + .context(req.context) + .build() + .unwrap()) + }); + + let mock_http_client = + mock_with_deferred_callback(move |res: http::Request| { + Box::pin(async { + let mut deserialized_response: Externalizable = + serde_json::from_slice(&get_body_bytes(res.into_body()).await.unwrap()) + .unwrap(); + assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); + assert_eq!( + PipelineStep::SupergraphResponse.to_string(), + deserialized_response.stage + ); + + // Copy the has_next from the body into the data for checking later + deserialized_response + .body + .as_mut() + .unwrap() + .as_object_mut() + .unwrap() + .get_mut("data") + .unwrap() + .as_object_mut() + .unwrap() + .insert( + "has_next".to_string(), + serde_json::Value::from( + deserialized_response.has_next.unwrap_or_default(), + ), + ); + + Ok(http::Response::builder() + .body(RouterBody::from( + serde_json::to_string(&deserialized_response).unwrap_or_default(), + )) + .unwrap()) + }) + }); + + let service = supergraph_stage.as_service( + mock_http_client, + mock_supergraph_service.boxed(), + "http://test".to_string(), + Arc::new("".to_string()), + ); + + let request = supergraph::Request::canned_builder() + .query("foo") + .build() + .unwrap(); + + let mut res = service.oneshot(request).await.unwrap(); + + let body = res.response.body_mut().next().await.unwrap(); + assert_eq!( + serde_json::to_value(&body).unwrap(), + json!({ "data": { "test": 1, "has_next": true }, "hasNext": true }), + ); + let body = res.response.body_mut().next().await.unwrap(); + assert_eq!( + serde_json::to_value(&body).unwrap(), + json!({ "data": { "test": 2 }, "hasNext": true }), + ); + let body = res.response.body_mut().next().await.unwrap(); + assert_eq!( + serde_json::to_value(&body).unwrap(), + json!({ "data": { "test": 3 }, "hasNext": false }), + ); + } } diff --git a/apollo-router/src/plugins/coprocessor/test.rs b/apollo-router/src/plugins/coprocessor/test.rs index 27fd309382..c5d99e7abd 100644 --- a/apollo-router/src/plugins/coprocessor/test.rs +++ b/apollo-router/src/plugins/coprocessor/test.rs @@ -25,6 +25,7 @@ mod tests { use crate::plugin::test::MockSupergraphService; use crate::plugins::coprocessor::supergraph::SupergraphResponseConf; use crate::plugins::coprocessor::supergraph::SupergraphStage; + use crate::plugins::telemetry::config_new::conditions::SelectorOrValue; use crate::services::external::Externalizable; use crate::services::external::PipelineStep; use crate::services::external::EXTERNALIZABLE_VERSION; @@ -98,6 +99,7 @@ mod tests { async fn coprocessor_returning_the_wrong_version_should_fail() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -157,6 +159,7 @@ mod tests { async fn coprocessor_returning_the_wrong_stage_should_fail() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -216,6 +219,7 @@ mod tests { async fn coprocessor_missing_request_control_should_fail() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -274,6 +278,7 @@ mod tests { async fn coprocessor_subgraph_with_invalid_response_body_should_fail() { let subgraph_stage = SubgraphStage { request: SubgraphRequestConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -336,6 +341,7 @@ mod tests { async fn external_plugin_subgraph_request() { let subgraph_stage = SubgraphStage { request: SubgraphRequestConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -461,10 +467,93 @@ mod tests { ); } + #[tokio::test] + async fn external_plugin_subgraph_request_with_condition() { + let subgraph_stage = SubgraphStage { + request: SubgraphRequestConf { + condition: Condition::Eq([ + SelectorOrValue::Selector(SubgraphSelector::SubgraphRequestHeader { + subgraph_request_header: String::from("another_header"), + redact: None, + default: None, + }), + SelectorOrValue::Value("value".to_string().into()), + ]) + .into(), + headers: false, + context: false, + body: true, + uri: false, + method: false, + service_name: false, + }, + response: Default::default(), + }; + + // This will never be called because we will fail at the coprocessor. + let mut mock_subgraph_service = MockSubgraphService::new(); + + mock_subgraph_service + .expect_call() + .returning(|req: subgraph::Request| { + assert_eq!("/", req.subgraph_request.uri().to_string()); + + Ok(subgraph::Response::builder() + .data(json!({ "test": 1234_u32 })) + .errors(Vec::new()) + .extensions(crate::json_ext::Object::new()) + .context(req.context) + .build()) + }); + + let mock_http_client = mock_with_callback(move |_: http::Request| { + Box::pin(async { + Ok(http::Response::builder() + .body(RouterBody::from( + r#"{ + "version": 1, + "stage": "SubgraphRequest", + "control": "continue", + "body": { + "query": "query Long {\n me {\n name\n}\n}" + }, + "context": { + }, + "serviceName": "service name shouldn't change", + "uri": "http://thisurihaschanged" + }"#, + )) + .unwrap()) + }) + }); + + let service = subgraph_stage.as_service( + mock_http_client, + mock_subgraph_service.boxed(), + "http://test".to_string(), + "my_subgraph_service_name".to_string(), + ); + + let request = subgraph::Request::fake_builder().build(); + + assert_eq!( + serde_json_bytes::json!({ "test": 1234_u32 }), + service + .oneshot(request) + .await + .unwrap() + .response + .into_body() + .data + .unwrap() + ); + } + #[tokio::test] async fn external_plugin_subgraph_request_controlflow_break() { let subgraph_stage = SubgraphStage { request: SubgraphRequestConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -533,6 +622,7 @@ mod tests { async fn external_plugin_subgraph_request_controlflow_break_with_message_string() { let subgraph_stage = SubgraphStage { request: SubgraphRequestConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -597,6 +687,125 @@ mod tests { let subgraph_stage = SubgraphStage { request: Default::default(), response: SubgraphResponseConf { + condition: Default::default(), + headers: false, + context: false, + body: true, + service_name: false, + status_code: false, + }, + }; + + // This will never be called because we will fail at the coprocessor. + let mut mock_subgraph_service = MockSubgraphService::new(); + + mock_subgraph_service + .expect_call() + .returning(|req: subgraph::Request| { + Ok(subgraph::Response::builder() + .data(json!({ "test": 1234_u32 })) + .errors(Vec::new()) + .extensions(crate::json_ext::Object::new()) + .context(req.context) + .build()) + }); + + let mock_http_client = mock_with_callback(move |_: http::Request| { + Box::pin(async { + Ok(http::Response::builder() + .body(RouterBody::from( + r#"{ + "version": 1, + "stage": "SubgraphResponse", + "headers": { + "cookie": [ + "tasty_cookie=strawberry" + ], + "content-type": [ + "application/json" + ], + "host": [ + "127.0.0.1:4000" + ], + "apollo-federation-include-trace": [ + "ftv1" + ], + "apollographql-client-name": [ + "manual" + ], + "accept": [ + "*/*" + ], + "user-agent": [ + "curl/7.79.1" + ], + "content-length": [ + "46" + ] + }, + "body": { + "data": { + "test": 5678 + } + }, + "context": { + "entries": { + "accepts-json": false, + "accepts-wildcard": true, + "accepts-multipart": false, + "this-is-a-test-context": 42 + } + } + }"#, + )) + .unwrap()) + }) + }); + + let service = subgraph_stage.as_service( + mock_http_client, + mock_subgraph_service.boxed(), + "http://test".to_string(), + "my_subgraph_service_name".to_string(), + ); + + let request = subgraph::Request::fake_builder().build(); + + let response = service.oneshot(request).await.unwrap(); + + // Let's assert that the subgraph response has been transformed as it should have. + assert_eq!( + response.response.headers().get("cookie").unwrap(), + "tasty_cookie=strawberry" + ); + + assert_eq!( + response + .context + .get::<&str, u8>("this-is-a-test-context") + .unwrap() + .unwrap(), + 42 + ); + + assert_eq!( + serde_json_bytes::json!({ "test": 5678_u32 }), + response.response.into_body().data.unwrap() + ); + } + + #[tokio::test] + async fn external_plugin_subgraph_response_with_condition() { + let subgraph_stage = SubgraphStage { + request: Default::default(), + response: SubgraphResponseConf { + // Will be satisfied + condition: Condition::Exists(SubgraphSelector::ResponseContext { + response_context: String::from("context_value"), + redact: None, + default: None, + }) + .into(), headers: false, context: false, body: true, @@ -611,6 +820,9 @@ mod tests { mock_subgraph_service .expect_call() .returning(|req: subgraph::Request| { + req.context + .insert("context_value", "content".to_string()) + .unwrap(); Ok(subgraph::Response::builder() .data(json!({ "test": 1234_u32 })) .errors(Vec::new()) @@ -708,6 +920,7 @@ mod tests { let supergraph_stage = SupergraphStage { request: Default::default(), response: SupergraphResponseConf { + condition: Default::default(), headers: false, context: false, body: true, @@ -768,6 +981,7 @@ mod tests { async fn external_plugin_router_request() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -882,10 +1096,123 @@ mod tests { service.oneshot(request.try_into().unwrap()).await.unwrap(); } + #[tokio::test] + async fn external_plugin_router_request_with_condition() { + let router_stage = RouterStage { + request: RouterRequestConf { + // Won't be satisfied + condition: Condition::Eq([ + SelectorOrValue::Selector(RouterSelector::RequestMethod { + request_method: true, + }), + SelectorOrValue::Value("GET".to_string().into()), + ]) + .into(), + headers: true, + context: true, + body: true, + sdl: true, + path: true, + method: true, + }, + response: Default::default(), + }; + + let mock_router_service = router::service::from_supergraph_mock_callback(move |req| { + assert!(req + .context + .get::<&str, u8>("this-is-a-test-context") + .ok() + .flatten() + .is_none()); + Ok(supergraph::Response::builder() + .data(json!({ "test": 1234_u32 })) + .context(req.context) + .build() + .unwrap()) + }) + .await; + + let mock_http_client = mock_with_callback(move |req: http::Request| { + Box::pin(async { + let deserialized_request: Externalizable = + serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) + .unwrap(); + + assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); + assert_eq!( + PipelineStep::RouterRequest.to_string(), + deserialized_request.stage + ); + + let input = json!( + { + "version": 1, + "stage": "RouterRequest", + "control": "continue", + "id": "1b19c05fdafc521016df33148ad63c1b", + "headers": { + "cookie": [ + "tasty_cookie=strawberry" + ], + "content-type": [ + "application/json" + ], + "host": [ + "127.0.0.1:4000" + ], + "apollo-federation-include-trace": [ + "ftv1" + ], + "apollographql-client-name": [ + "manual" + ], + "accept": [ + "*/*" + ], + "user-agent": [ + "curl/7.79.1" + ], + "content-length": [ + "46" + ] + }, + "body": "{ + \"query\": \"query Long {\n me {\n name\n}\n}\" + }", + "context": { + "entries": { + "accepts-json": false, + "accepts-wildcard": true, + "accepts-multipart": false, + "this-is-a-test-context": 42 + } + }, + "sdl": "the sdl shouldnt change" + }); + Ok(http::Response::builder() + .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .unwrap()) + }) + }); + + let service = router_stage.as_service( + mock_http_client, + mock_router_service.boxed(), + "http://test".to_string(), + Arc::new("".to_string()), + ); + + let request = supergraph::Request::canned_builder().build().unwrap(); + + service.oneshot(request.try_into().unwrap()).await.unwrap(); + } + #[tokio::test] async fn external_plugin_router_request_http_get() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -1014,6 +1341,7 @@ mod tests { async fn external_plugin_router_request_controlflow_break() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -1102,6 +1430,7 @@ mod tests { async fn external_plugin_router_request_controlflow_break_with_message_string() { let router_stage = RouterStage { request: RouterRequestConf { + condition: Default::default(), headers: true, context: true, body: true, @@ -1181,6 +1510,7 @@ mod tests { async fn external_plugin_router_response() { let router_stage = RouterStage { response: RouterResponseConf { + condition: Default::default(), headers: true, context: true, body: true, diff --git a/apollo-router/src/plugins/file_uploads/rearrange_query_plan.rs b/apollo-router/src/plugins/file_uploads/rearrange_query_plan.rs index c73432d8bd..075e3950e2 100644 --- a/apollo-router/src/plugins/file_uploads/rearrange_query_plan.rs +++ b/apollo-router/src/plugins/file_uploads/rearrange_query_plan.rs @@ -81,7 +81,7 @@ fn rearrange_plan_node<'a>( PlanNode::Fetch(fetch) => { // Extract variables used in this node. for variable in fetch.variable_usages.iter() { - if let Some((name, range)) = variable_ranges.get_key_value(variable.as_str()) { + if let Some((name, range)) = variable_ranges.get_key_value(variable.as_ref()) { acc_variables.entry(name).or_insert(range); } } @@ -90,7 +90,7 @@ fn rearrange_plan_node<'a>( PlanNode::Subscription { primary, rest } => { // Extract variables used in this node for variable in primary.variable_usages.iter() { - if let Some((name, range)) = variable_ranges.get_key_value(variable.as_str()) { + if let Some((name, range)) = variable_ranges.get_key_value(variable.as_ref()) { acc_variables.entry(name).or_insert(range); } } diff --git a/apollo-router/src/plugins/record_replay/record.rs b/apollo-router/src/plugins/record_replay/record.rs index 16fb7f89e3..4e6563eafe 100644 --- a/apollo-router/src/plugins/record_replay/record.rs +++ b/apollo-router/src/plugins/record_replay/record.rs @@ -63,14 +63,16 @@ impl Plugin for Record { .storage_path .unwrap_or_else(default_storage_path); + let schema_config = Default::default(); + let schema = Schema::parse(init.supergraph_sdl.clone().as_str(), &schema_config)?; + let api_schema = Schema::parse_compiler_schema(&schema.create_api_schema(&schema_config)?)?; + let schema = schema.with_api_schema(api_schema); + let plugin = Self { enabled: init.config.enabled, supergraph_sdl: init.supergraph_sdl.clone(), storage_path: storage_path.clone().into(), - schema: Arc::new(Schema::parse( - init.supergraph_sdl.clone().as_str(), - &Default::default(), - )?), + schema: Arc::new(schema), }; if init.config.enabled { diff --git a/apollo-router/src/plugins/rhai/engine.rs b/apollo-router/src/plugins/rhai/engine.rs index 75cac2c75f..da87655c17 100644 --- a/apollo-router/src/plugins/rhai/engine.rs +++ b/apollo-router/src/plugins/rhai/engine.rs @@ -1200,7 +1200,9 @@ mod router_plugin { // TraceId support #[rhai_fn(return_raw)] pub(crate) fn traceid() -> Result> { - TraceId::maybe_new().ok_or_else(|| "trace unavailable".into()) + TraceId::maybe_new() + .or_else(TraceId::current) + .ok_or_else(|| "trace unavailable".into()) } #[rhai_fn(name = "to_string")] diff --git a/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs b/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs index 46a349bdc8..dd745ef48d 100644 --- a/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs +++ b/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs @@ -38,11 +38,11 @@ use crate::plugins::telemetry::apollo::router_id; use crate::plugins::telemetry::apollo_exporter::get_uname; use crate::plugins::telemetry::apollo_exporter::ROUTER_REPORT_TYPE_TRACES; use crate::plugins::telemetry::apollo_exporter::ROUTER_TRACING_PROTOCOL_OTLP; +use crate::plugins::telemetry::consts::SUBGRAPH_SPAN_NAME; +use crate::plugins::telemetry::consts::SUPERGRAPH_SPAN_NAME; use crate::plugins::telemetry::tracing::apollo_telemetry::APOLLO_PRIVATE_OPERATION_SIGNATURE; use crate::plugins::telemetry::tracing::BatchProcessorConfig; use crate::plugins::telemetry::GLOBAL_TRACER_NAME; -use crate::plugins::telemetry::SUBGRAPH_SPAN_NAME; -use crate::plugins::telemetry::SUPERGRAPH_SPAN_NAME; /// The Apollo Otlp exporter is a thin wrapper around the OTLP SpanExporter. #[derive(Clone, Derivative)] diff --git a/apollo-router/src/plugins/telemetry/config.rs b/apollo-router/src/plugins/telemetry/config.rs index 23279db16d..1f36eb4c34 100644 --- a/apollo-router/src/plugins/telemetry/config.rs +++ b/apollo-router/src/plugins/telemetry/config.rs @@ -244,6 +244,9 @@ pub(crate) enum TraceIdFormat { /// /// (e.g. Trace ID 16 -> 16) Decimal, + + /// Datadog + Datadog, } /// Apollo usage report signature normalization algorithm diff --git a/apollo-router/src/plugins/telemetry/config_new/conditions.rs b/apollo-router/src/plugins/telemetry/config_new/conditions.rs index 34662e8f97..f4782a3c13 100644 --- a/apollo-router/src/plugins/telemetry/config_new/conditions.rs +++ b/apollo-router/src/plugins/telemetry/config_new/conditions.rs @@ -7,8 +7,7 @@ use crate::plugins::telemetry::config::AttributeValue; use crate::plugins::telemetry::config_new::Selector; use crate::Context; -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum Condition { /// A condition to check a selection against a value. @@ -43,8 +42,7 @@ impl Condition<()> { } } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case", untagged)] pub(crate) enum SelectorOrValue { /// A constant value. @@ -848,9 +846,11 @@ where { } fn field(&mut self, value: Option) -> bool { match value { - None => self.evaluate_response_field(ty(), field(), &json!(false), &Context::new()), + None => { + self.evaluate_response_field(&ty(), field(), &json!(false), &Context::new()) + } Some(value) => { - self.evaluate_response_field(ty(), field(), &json!(value), &Context::new()) + self.evaluate_response_field(&ty(), field(), &json!(value), &Context::new()) } } } diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/metrics.snap index 1fdbf7a1f7..5ae2d31ff6 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/metrics.snap @@ -16,6 +16,8 @@ info: attributes: on.graphql.error: on_graphql_error: true + operation: + response_context: operation_name --- - name: custom_counter description: count of requests @@ -25,3 +27,4 @@ info: - value: 1 attributes: on.graphql.error: true + operation: Test diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/router.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/router.yaml index 4f4ef57519..4c53074d16 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/router.yaml +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/router.yaml @@ -11,3 +11,5 @@ telemetry: attributes: on.graphql.error: on_graphql_error: true + operation: + response_context: "operation_name" diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/test.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/test.yaml index f3ca06a5a7..3be827d00e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/test.yaml +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/event.attributes.on_graphql_error/test.yaml @@ -8,7 +8,10 @@ events: - supergraph_request: uri: "/hello" method: GET - query: "query { hello }" + query: "query Test { hello }" + - context: + map: + "operation_name": "Test" - context: map: "apollo::telemetry::contains_graphql_error": true diff --git a/apollo-router/src/plugins/telemetry/config_new/graphql/attributes.rs b/apollo-router/src/plugins/telemetry/config_new/graphql/attributes.rs index 1719a17664..e0267f1482 100644 --- a/apollo-router/src/plugins/telemetry/config_new/graphql/attributes.rs +++ b/apollo-router/src/plugins/telemetry/config_new/graphql/attributes.rs @@ -167,7 +167,7 @@ mod test { let ctx = Context::default(); let _ = ctx.insert(OPERATION_NAME, "operation_name".to_string()); let mut result = Default::default(); - attributes.on_response_field(&mut result, ty(), field(), &json!(true), &ctx); + attributes.on_response_field(&mut result, &ty(), field(), &json!(true), &ctx); assert_eq!(result.len(), 4); assert_eq!(result[0].key.as_str(), "graphql.field.name"); assert_eq!(result[0].value.as_str(), "field_name"); @@ -193,7 +193,7 @@ mod test { let mut result = Default::default(); attributes.on_response_field( &mut result, - ty(), + &ty(), field(), &json!(vec![true, true, true]), &ctx, diff --git a/apollo-router/src/plugins/telemetry/config_new/graphql/selectors.rs b/apollo-router/src/plugins/telemetry/config_new/graphql/selectors.rs index 16e718030b..20a648d465 100644 --- a/apollo-router/src/plugins/telemetry/config_new/graphql/selectors.rs +++ b/apollo-router/src/plugins/telemetry/config_new/graphql/selectors.rs @@ -134,13 +134,13 @@ impl Selector for GraphQLSelector { }, GraphQLSelector::FieldName { .. } => match value { Value::Null => None, - _ => Some(field.name.to_string().into()), + _ => Some(name_to_otel_string(&field.name).into()), }, GraphQLSelector::FieldType { field_type: FieldType::Name, } => match value { Value::Null => None, - _ => Some(field.definition.ty.inner_named_type().to_string().into()), + _ => Some(name_to_otel_string(field.definition.ty.inner_named_type()).into()), }, GraphQLSelector::FieldType { field_type: FieldType::Type, @@ -152,7 +152,7 @@ impl Selector for GraphQLSelector { }, GraphQLSelector::TypeName { .. } => match value { Value::Null => None, - _ => Some(ty.to_string().into()), + _ => Some(name_to_otel_string(ty).into()), }, GraphQLSelector::StaticField { r#static } => Some(r#static.clone().into()), GraphQLSelector::OperationName { @@ -175,6 +175,19 @@ impl Selector for GraphQLSelector { } } +fn name_to_otel_string(name: &apollo_compiler::Name) -> opentelemetry::StringValue { + if let Some(static_str) = name.as_static_str() { + static_str.into() + } else { + name.to_cloned_arc() + .expect( + "expected `apollo_compiler::Name` to always contain \ + either `&'static str` or `Arc` but both conversions failed", + ) + .into() + } +} + #[cfg(test)] mod tests { use opentelemetry::Value; @@ -190,7 +203,7 @@ mod tests { list_length: ListLength::Value, }; let result = selector.on_response_field( - ty(), + &ty(), field(), &json!(vec![true, true, true]), &Context::default(), @@ -203,7 +216,7 @@ mod tests { let selector = GraphQLSelector::FieldName { field_name: FieldName::String, }; - let result = selector.on_response_field(ty(), field(), &json!(true), &Context::default()); + let result = selector.on_response_field(&ty(), field(), &json!(true), &Context::default()); assert_eq!(result, Some(Value::String("field_name".into()))); } @@ -212,14 +225,14 @@ mod tests { let selector = GraphQLSelector::FieldType { field_type: FieldType::Name, }; - let result = selector.on_response_field(ty(), field(), &json!(true), &Context::default()); + let result = selector.on_response_field(&ty(), field(), &json!(true), &Context::default()); assert_eq!(result, Some(Value::String("field_type".into()))); } #[test] fn field_type_scalar_type() { - assert_scalar(ty(), field(), &json!("value")); - assert_scalar(ty(), field(), &json!(1)); + assert_scalar(&ty(), field(), &json!("value")); + assert_scalar(&ty(), field(), &json!(1)); } fn assert_scalar(ty: &NamedType, field: &Field, value: &serde_json_bytes::Value) { @@ -235,7 +248,7 @@ mod tests { let selector = GraphQLSelector::FieldType { field_type: FieldType::Type, }; - let result = selector.on_response_field(ty(), field(), &json!({}), &Context::default()); + let result = selector.on_response_field(&ty(), field(), &json!({}), &Context::default()); assert_eq!(result, Some(Value::String("object".into()))); } @@ -245,7 +258,7 @@ mod tests { field_type: FieldType::Type, }; let result = - selector.on_response_field(ty(), field(), &json!(vec![true]), &Context::default()); + selector.on_response_field(&ty(), field(), &json!(vec![true]), &Context::default()); assert_eq!(result, Some(Value::String("list".into()))); } @@ -254,7 +267,8 @@ mod tests { let selector = GraphQLSelector::TypeName { type_name: TypeName::String, }; - let result = selector.on_response_field(ty(), field(), &json!("true"), &Context::default()); + let result = + selector.on_response_field(&ty(), field(), &json!("true"), &Context::default()); assert_eq!(result, Some(Value::String("type_name".into()))); } @@ -263,7 +277,7 @@ mod tests { let selector = GraphQLSelector::StaticField { r#static: "static_value".into(), }; - let result = selector.on_response_field(ty(), field(), &json!(true), &Context::default()); + let result = selector.on_response_field(&ty(), field(), &json!(true), &Context::default()); assert_eq!(result, Some(Value::String("static_value".into()))); } @@ -275,7 +289,7 @@ mod tests { }; let ctx = Context::default(); let _ = ctx.insert(OPERATION_NAME, "some-operation".to_string()); - let result = selector.on_response_field(ty(), field(), &json!(true), &ctx); + let result = selector.on_response_field(&ty(), field(), &json!(true), &ctx); assert_eq!(result, Some(Value::String("some-operation".into()))); } @@ -287,7 +301,7 @@ mod tests { }; let ctx = Context::default(); let _ = ctx.insert(OPERATION_NAME, "some-operation".to_string()); - let result = selector.on_response_field(ty(), field(), &json!(true), &ctx); + let result = selector.on_response_field(&ty(), field(), &json!(true), &ctx); assert_eq!( result, Some(Value::String( @@ -302,7 +316,7 @@ mod tests { operation_name: OperationName::String, default: Some("no-operation".to_string()), }; - let result = selector.on_response_field(ty(), field(), &json!(true), &Context::default()); + let result = selector.on_response_field(&ty(), field(), &json!(true), &Context::default()); assert_eq!(result, Some(Value::String("no-operation".into()))); } } diff --git a/apollo-router/src/plugins/telemetry/config_new/instruments.rs b/apollo-router/src/plugins/telemetry/config_new/instruments.rs index b33f38101a..2d615d2950 100644 --- a/apollo-router/src/plugins/telemetry/config_new/instruments.rs +++ b/apollo-router/src/plugins/telemetry/config_new/instruments.rs @@ -1934,9 +1934,9 @@ mod tests { use std::path::PathBuf; use std::str::FromStr; - use apollo_compiler::ast::Name; use apollo_compiler::ast::NamedType; use apollo_compiler::executable::SelectionSet; + use apollo_compiler::Name; use http::HeaderMap; use http::HeaderName; use http::Method; @@ -2194,26 +2194,26 @@ mod tests { apollo_compiler::executable::Field { definition: apollo_compiler::schema::FieldDefinition { description: None, - name: NamedType::new(field_name.clone()).expect("valid field name"), + name: NamedType::new(&field_name).expect("valid field name"), arguments: vec![], ty: apollo_compiler::schema::Type::Named( - NamedType::new(field_type.clone()).expect("valid type name"), + NamedType::new(&field_type).expect("valid type name"), ), directives: Default::default(), } .into(), alias: None, - name: NamedType::new(field_name.clone()).expect("valid field name"), + name: NamedType::new(&field_name).expect("valid field name"), arguments: vec![], directives: Default::default(), selection_set: SelectionSet::new( - NamedType::new(field_name).expect("valid field name"), + NamedType::new(&field_name).expect("valid field name"), ), } } fn create_type_name(type_name: String) -> Name { - NamedType::new(type_name).expect("valid type name") + NamedType::new(&type_name).expect("valid type name") } } diff --git a/apollo-router/src/plugins/telemetry/config_new/mod.rs b/apollo-router/src/plugins/telemetry/config_new/mod.rs index e7bbe6319c..b493e11eb4 100644 --- a/apollo-router/src/plugins/telemetry/config_new/mod.rs +++ b/apollo-router/src/plugins/telemetry/config_new/mod.rs @@ -214,10 +214,9 @@ mod test { use apollo_compiler::ast::FieldDefinition; use apollo_compiler::ast::NamedType; - use apollo_compiler::ast::Type; use apollo_compiler::executable::Field; + use apollo_compiler::name; use apollo_compiler::Node; - use apollo_compiler::NodeStr; use opentelemetry::trace::SpanContext; use opentelemetry::trace::SpanId; use opentelemetry::trace::TraceContextExt; @@ -239,22 +238,19 @@ mod test { static FIELD: OnceLock = OnceLock::new(); FIELD.get_or_init(|| { Field::new( - NamedType::new_unchecked(NodeStr::from_static(&"field_name")), + name!("field_name"), Node::new(FieldDefinition { description: None, - name: NamedType::new_unchecked(NodeStr::from_static(&"field_name")), + name: name!("field_name"), arguments: vec![], - ty: Type::Named(NamedType::new_unchecked(NodeStr::from_static( - &"field_type", - ))), + ty: apollo_compiler::ty!(field_type), directives: Default::default(), }), ) }) } - pub(crate) fn ty() -> &'static NamedType { - static TYPE: NamedType = NamedType::new_unchecked(NodeStr::from_static(&"type_name")); - &TYPE + pub(crate) fn ty() -> NamedType { + name!("type_name") } #[test] diff --git a/apollo-router/src/plugins/telemetry/config_new/selectors.rs b/apollo-router/src/plugins/telemetry/config_new/selectors.rs index 05503134b0..b7e05124fe 100644 --- a/apollo-router/src/plugins/telemetry/config_new/selectors.rs +++ b/apollo-router/src/plugins/telemetry/config_new/selectors.rs @@ -27,11 +27,11 @@ use crate::query_planner::APOLLO_OPERATION_ID; use crate::services::router; use crate::services::subgraph; use crate::services::supergraph; +use crate::services::FIRST_EVENT_CONTEXT_KEY; use crate::spec::operation_limits::OperationLimits; use crate::Context; -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum TraceIdFormat { /// Open Telemetry trace ID, a hex string. @@ -40,8 +40,7 @@ pub(crate) enum TraceIdFormat { Datadog, } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum OperationName { /// The raw operation name. @@ -51,8 +50,7 @@ pub(crate) enum OperationName { } #[allow(dead_code)] -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum ErrorRepr { // /// The error code if available @@ -61,8 +59,7 @@ pub(crate) enum ErrorRepr { Reason, } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum Query { /// The raw query kind. @@ -77,16 +74,14 @@ pub(crate) enum Query { RootFields, } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum SubgraphQuery { /// The raw query kind. String, } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum ResponseStatus { /// The http status code. @@ -95,8 +90,7 @@ pub(crate) enum ResponseStatus { Reason, } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum OperationKind { /// The raw operation kind. @@ -119,8 +113,7 @@ impl From<&RouterValue> for InstrumentValue { } } -#[derive(Deserialize, JsonSchema, Clone, Debug)] -#[cfg_attr(test, derive(PartialEq))] +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] #[serde(deny_unknown_fields, untagged)] pub(crate) enum RouterSelector { /// A header from the request @@ -251,9 +244,8 @@ impl From<&SupergraphValue> for InstrumentValue { } #[derive(Deserialize, JsonSchema, Clone, Derivative)] -#[cfg_attr(test, derivative(PartialEq))] #[serde(deny_unknown_fields, untagged)] -#[derivative(Debug)] +#[derivative(Debug, PartialEq)] pub(crate) enum SupergraphSelector { OperationName { /// The operation name from the query. @@ -402,6 +394,11 @@ pub(crate) enum SupergraphSelector { /// The cost value to select, one of: estimated, actual, delta. cost: CostValue, }, + /// Boolean returning true if it's the primary response and not events like subscription events or deferred responses + IsPrimaryResponse { + /// Boolean returning true if it's the primary response and not events like subscription events or deferred responses + is_primary_response: bool, + }, } #[derive(Deserialize, JsonSchema, Clone, Debug)] @@ -421,9 +418,8 @@ impl From<&SubgraphValue> for InstrumentValue { } #[derive(Deserialize, JsonSchema, Clone, Derivative)] -#[cfg_attr(test, derivative(PartialEq))] #[serde(deny_unknown_fields, rename_all = "snake_case", untagged)] -#[derivative(Debug)] +#[derivative(Debug, PartialEq)] pub(crate) enum SubgraphSelector { SubgraphOperationName { /// The operation name from the subgraph query. @@ -590,6 +586,10 @@ pub(crate) enum SubgraphSelector { /// Optional default value. default: Option, }, + OnGraphQLError { + /// Boolean set to true if the response body contains graphql error + subgraph_on_graphql_error: bool, + }, Baggage { /// The name of the baggage item. baggage: String, @@ -934,6 +934,32 @@ impl Selector for SupergraphSelector { None } } + SupergraphSelector::OperationName { + operation_name, + default, + .. + } => { + let op_name = response.context.get(OPERATION_NAME).ok().flatten(); + match operation_name { + OperationName::String => op_name.or_else(|| default.clone()), + OperationName::Hash => op_name.or_else(|| default.clone()).map(|op_name| { + let mut hasher = sha2::Sha256::new(); + hasher.update(op_name.as_bytes()); + let result = hasher.finalize(); + hex::encode(result) + }), + } + .map(opentelemetry::Value::from) + } + SupergraphSelector::OperationKind { .. } => response + .context + .get::<_, String>(OPERATION_KIND) + .ok() + .flatten() + .map(opentelemetry::Value::from), + SupergraphSelector::IsPrimaryResponse { + is_primary_response: is_primary, + } if *is_primary => Some(true.into()), SupergraphSelector::Static(val) => Some(val.clone().into()), SupergraphSelector::StaticField { r#static } => Some(r#static.clone().into()), // For request @@ -987,6 +1013,43 @@ impl Selector for SupergraphSelector { None } } + SupergraphSelector::OperationName { + operation_name, + default, + .. + } => { + let op_name = ctx.get(OPERATION_NAME).ok().flatten(); + match operation_name { + OperationName::String => op_name.or_else(|| default.clone()), + OperationName::Hash => op_name.or_else(|| default.clone()).map(|op_name| { + let mut hasher = sha2::Sha256::new(); + hasher.update(op_name.as_bytes()); + let result = hasher.finalize(); + hex::encode(result) + }), + } + .map(opentelemetry::Value::from) + } + SupergraphSelector::OperationKind { .. } => ctx + .get::<_, String>(OPERATION_KIND) + .ok() + .flatten() + .map(opentelemetry::Value::from), + SupergraphSelector::IsPrimaryResponse { + is_primary_response: is_primary, + } if *is_primary => Some(opentelemetry::Value::Bool( + ctx.get_json_value(FIRST_EVENT_CONTEXT_KEY) + == Some(serde_json_bytes::Value::Bool(true)), + )), + SupergraphSelector::ResponseContext { + response_context, + default, + .. + } => ctx + .get_json_value(response_context) + .as_ref() + .and_then(|v| v.maybe_to_otel_value()) + .or_else(|| default.maybe_to_otel_value()), SupergraphSelector::Static(val) => Some(val.clone().into()), SupergraphSelector::StaticField { r#static } => Some(r#static.clone().into()), _ => None, @@ -995,6 +1058,28 @@ impl Selector for SupergraphSelector { fn on_error(&self, error: &tower::BoxError, ctx: &Context) -> Option { match self { + SupergraphSelector::OperationName { + operation_name, + default, + .. + } => { + let op_name = ctx.get(OPERATION_NAME).ok().flatten(); + match operation_name { + OperationName::String => op_name.or_else(|| default.clone()), + OperationName::Hash => op_name.or_else(|| default.clone()).map(|op_name| { + let mut hasher = sha2::Sha256::new(); + hasher.update(op_name.as_bytes()); + let result = hasher.finalize(); + hex::encode(result) + }), + } + .map(opentelemetry::Value::from) + } + SupergraphSelector::OperationKind { .. } => ctx + .get::<_, String>(OPERATION_KIND) + .ok() + .flatten() + .map(opentelemetry::Value::from), SupergraphSelector::Query { query, .. } => { let limits_opt = ctx .extensions() @@ -1026,6 +1111,12 @@ impl Selector for SupergraphSelector { .as_ref() .and_then(|v| v.maybe_to_otel_value()) .or_else(|| default.maybe_to_otel_value()), + SupergraphSelector::IsPrimaryResponse { + is_primary_response: is_primary, + } if *is_primary => Some(opentelemetry::Value::Bool( + ctx.get_json_value(FIRST_EVENT_CONTEXT_KEY) + == Some(serde_json_bytes::Value::Bool(true)), + )), _ => None, } } @@ -1276,6 +1367,9 @@ impl Selector for SubgraphSelector { .as_ref() .and_then(|v| v.maybe_to_otel_value()) .or_else(|| default.maybe_to_otel_value()), + SubgraphSelector::OnGraphQLError { + subgraph_on_graphql_error: on_graphql_error, + } if *on_graphql_error => Some((!response.response.body().errors.is_empty()).into()), SubgraphSelector::Static(val) => Some(val.clone().into()), SubgraphSelector::StaticField { r#static } => Some(r#static.clone().into()), // For request @@ -1349,6 +1443,7 @@ mod test { use crate::plugins::telemetry::config_new::Selector; use crate::plugins::telemetry::otel; use crate::query_planner::APOLLO_OPERATION_ID; + use crate::services::FIRST_EVENT_CONTEXT_KEY; use crate::spec::operation_limits::OperationLimits; #[test] @@ -1844,6 +1939,32 @@ mod test { ); } + #[test] + fn supergraph_is_primary() { + let selector = SupergraphSelector::IsPrimaryResponse { + is_primary_response: true, + }; + let context = crate::context::Context::new(); + let _ = context.insert(FIRST_EVENT_CONTEXT_KEY, true); + assert_eq!( + selector + .on_response( + &crate::services::SupergraphResponse::fake_builder() + .context(context.clone()) + .build() + .unwrap() + ) + .unwrap(), + true.into() + ); + assert_eq!( + selector + .on_response_event(&crate::graphql::Response::builder().build(), &context) + .unwrap(), + true.into() + ); + } + #[test] fn supergraph_response_context() { let selector = SupergraphSelector::ResponseContext { @@ -2807,6 +2928,41 @@ mod test { ); } + #[test] + fn subgraph_on_graphql_error() { + let selector = SubgraphSelector::OnGraphQLError { + subgraph_on_graphql_error: true, + }; + assert_eq!( + selector + .on_response( + &crate::services::SubgraphResponse::fake_builder() + .error( + graphql::Error::builder() + .message("not found") + .extension_code("NOT_FOUND") + .build() + ) + .build() + ) + .unwrap(), + opentelemetry::Value::Bool(true) + ); + + assert_eq!( + selector + .on_response( + &crate::services::SubgraphResponse::fake_builder() + .data(serde_json_bytes::json!({ + "hello": ["bonjour", "hello", "ciao"] + })) + .build() + ) + .unwrap(), + opentelemetry::Value::Bool(false) + ); + } + #[test] fn router_response_status_reason() { let selector = RouterSelector::ResponseStatus { diff --git a/apollo-router/src/plugins/telemetry/config_new/spans.rs b/apollo-router/src/plugins/telemetry/config_new/spans.rs index 090bb13073..ff4a3b00a0 100644 --- a/apollo-router/src/plugins/telemetry/config_new/spans.rs +++ b/apollo-router/src/plugins/telemetry/config_new/spans.rs @@ -137,6 +137,7 @@ mod test { use crate::plugins::telemetry::config_new::DefaultForLevel; use crate::plugins::telemetry::config_new::Selectors; use crate::plugins::telemetry::otlp::TelemetryDataKind; + use crate::plugins::telemetry::OTEL_NAME; use crate::services::router; use crate::services::subgraph; use crate::services::supergraph; @@ -548,7 +549,7 @@ mod test { }, ); spans.attributes.custom.insert( - "otel.name".to_string(), + OTEL_NAME.to_string(), Conditional { selector: RouterSelector::StaticField { r#static: String::from("new_name").into(), @@ -568,7 +569,7 @@ mod test { .any(|key_val| key_val.key == opentelemetry::Key::from_static_str("test"))); assert!(values.iter().any(|key_val| key_val.key - == opentelemetry::Key::from_static_str("otel.name") + == opentelemetry::Key::from_static_str(OTEL_NAME) && key_val.value == opentelemetry::Value::String(String::from("new_name").into()))); } diff --git a/apollo-router/src/plugins/telemetry/consts.rs b/apollo-router/src/plugins/telemetry/consts.rs new file mode 100644 index 0000000000..e1d84c937b --- /dev/null +++ b/apollo-router/src/plugins/telemetry/consts.rs @@ -0,0 +1,31 @@ +pub(crate) const OTEL_NAME: &str = "otel.name"; +pub(crate) const OTEL_ORIGINAL_NAME: &str = "otel.original_name"; +pub(crate) const OTEL_KIND: &str = "otel.kind"; +pub(crate) const OTEL_STATUS_CODE: &str = "otel.status_code"; +pub(crate) const OTEL_STATUS_MESSAGE: &str = "otel.status_message"; +#[allow(dead_code)] +pub(crate) const OTEL_STATUS_DESCRIPTION: &str = "otel.status_description"; +pub(crate) const OTEL_STATUS_CODE_OK: &str = "OK"; +pub(crate) const OTEL_STATUS_CODE_ERROR: &str = "ERROR"; + +pub(crate) const FIELD_EXCEPTION_MESSAGE: &str = "exception.message"; +pub(crate) const FIELD_EXCEPTION_STACKTRACE: &str = "exception.stacktrace"; +pub(crate) const SUPERGRAPH_SPAN_NAME: &str = "supergraph"; +pub(crate) const SUBGRAPH_SPAN_NAME: &str = "subgraph"; +pub(crate) const ROUTER_SPAN_NAME: &str = "router"; +pub(crate) const EXECUTION_SPAN_NAME: &str = "execution"; +pub(crate) const REQUEST_SPAN_NAME: &str = "request"; +pub(crate) const QUERY_PLANNING_SPAN_NAME: &str = "query_planning"; +pub(crate) const HTTP_REQUEST_SPAN_NAME: &str = "http_request"; +pub(crate) const SUBGRAPH_REQUEST_SPAN_NAME: &str = "subgraph_request"; + +pub(crate) const BUILT_IN_SPAN_NAMES: [&str; 8] = [ + REQUEST_SPAN_NAME, + ROUTER_SPAN_NAME, + SUPERGRAPH_SPAN_NAME, + SUBGRAPH_SPAN_NAME, + SUBGRAPH_REQUEST_SPAN_NAME, + HTTP_REQUEST_SPAN_NAME, + QUERY_PLANNING_SPAN_NAME, + EXECUTION_SPAN_NAME, +]; diff --git a/apollo-router/src/plugins/telemetry/dynamic_attribute.rs b/apollo-router/src/plugins/telemetry/dynamic_attribute.rs index 6e0f8df595..9d04a022ba 100644 --- a/apollo-router/src/plugins/telemetry/dynamic_attribute.rs +++ b/apollo-router/src/plugins/telemetry/dynamic_attribute.rs @@ -6,13 +6,13 @@ use tracing_subscriber::registry::LookupSpan; use tracing_subscriber::Layer; use tracing_subscriber::Registry; +use super::consts::OTEL_KIND; +use super::consts::OTEL_NAME; +use super::consts::OTEL_STATUS_CODE; +use super::consts::OTEL_STATUS_MESSAGE; use super::formatters::APOLLO_PRIVATE_PREFIX; use super::otel::layer::str_to_span_kind; use super::otel::layer::str_to_status; -use super::otel::layer::SPAN_KIND_FIELD; -use super::otel::layer::SPAN_NAME_FIELD; -use super::otel::layer::SPAN_STATUS_CODE_FIELD; -use super::otel::layer::SPAN_STATUS_MESSAGE_FIELD; use super::otel::OtelData; use super::reload::IsSampled; @@ -197,10 +197,12 @@ impl SpanDynAttribute for ::tracing::Span { fn update_otel_data(otel_data: &mut OtelData, key: &Key, value: &opentelemetry::Value) { match key.as_str() { - SPAN_NAME_FIELD => otel_data.forced_span_name = Some(value.to_string()), - SPAN_KIND_FIELD => otel_data.builder.span_kind = str_to_span_kind(&value.as_str()), - SPAN_STATUS_CODE_FIELD => otel_data.forced_status = str_to_status(&value.as_str()).into(), - SPAN_STATUS_MESSAGE_FIELD => { + OTEL_NAME if otel_data.forced_span_name.is_none() => { + otel_data.forced_span_name = Some(value.to_string()) + } + OTEL_KIND => otel_data.builder.span_kind = str_to_span_kind(&value.as_str()), + OTEL_STATUS_CODE => otel_data.forced_status = str_to_status(&value.as_str()).into(), + OTEL_STATUS_MESSAGE => { otel_data.builder.status = opentelemetry::trace::Status::error(value.as_str().to_string()) } diff --git a/apollo-router/src/plugins/telemetry/metrics/span_metrics_exporter.rs b/apollo-router/src/plugins/telemetry/metrics/span_metrics_exporter.rs index 5e6778ab74..a5869daad5 100644 --- a/apollo-router/src/plugins/telemetry/metrics/span_metrics_exporter.rs +++ b/apollo-router/src/plugins/telemetry/metrics/span_metrics_exporter.rs @@ -9,11 +9,11 @@ use tracing_subscriber::layer::Context; use tracing_subscriber::registry::LookupSpan; use tracing_subscriber::Layer; -use crate::axum_factory::utils::REQUEST_SPAN_NAME; -use crate::plugins::telemetry::EXECUTION_SPAN_NAME; -use crate::plugins::telemetry::SUBGRAPH_SPAN_NAME; -use crate::plugins::telemetry::SUPERGRAPH_SPAN_NAME; -use crate::services::QUERY_PLANNING_SPAN_NAME; +use crate::plugins::telemetry::consts::EXECUTION_SPAN_NAME; +use crate::plugins::telemetry::consts::QUERY_PLANNING_SPAN_NAME; +use crate::plugins::telemetry::consts::REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::SUBGRAPH_SPAN_NAME; +use crate::plugins::telemetry::consts::SUPERGRAPH_SPAN_NAME; const SUBGRAPH_ATTRIBUTE_NAME: &str = "apollo.subgraph.name"; diff --git a/apollo-router/src/plugins/telemetry/mod.rs b/apollo-router/src/plugins/telemetry/mod.rs index 09da428fd7..14dd43407c 100644 --- a/apollo-router/src/plugins/telemetry/mod.rs +++ b/apollo-router/src/plugins/telemetry/mod.rs @@ -39,6 +39,7 @@ use opentelemetry::trace::TraceState; use opentelemetry::trace::TracerProvider; use opentelemetry::Key; use opentelemetry::KeyValue; +use opentelemetry_api::trace::TraceId; use opentelemetry_semantic_conventions::trace::HTTP_REQUEST_METHOD; use parking_lot::Mutex; use rand::Rng; @@ -78,7 +79,6 @@ use self::tracing::apollo_telemetry::CLIENT_NAME_KEY; use self::tracing::apollo_telemetry::CLIENT_VERSION_KEY; use crate::apollo_studio_interop::ExtendedReferenceStats; use crate::apollo_studio_interop::ReferencedEnums; -use crate::axum_factory::utils::REQUEST_SPAN_NAME; use crate::context::CONTAINS_GRAPHQL_ERROR; use crate::context::OPERATION_KIND; use crate::context::OPERATION_NAME; @@ -100,6 +100,15 @@ use crate::plugins::telemetry::config::TracingCommon; use crate::plugins::telemetry::config_new::cost::add_cost_attributes; use crate::plugins::telemetry::config_new::graphql::GraphQLInstruments; use crate::plugins::telemetry::config_new::instruments::SupergraphInstruments; +use crate::plugins::telemetry::config_new::trace_id; +use crate::plugins::telemetry::config_new::DatadogId; +use crate::plugins::telemetry::consts::EXECUTION_SPAN_NAME; +use crate::plugins::telemetry::consts::OTEL_NAME; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_ERROR; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_OK; +use crate::plugins::telemetry::consts::REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::ROUTER_SPAN_NAME; use crate::plugins::telemetry::dynamic_attribute::SpanDynAttribute; use crate::plugins::telemetry::fmt_layer::create_fmt_layer; use crate::plugins::telemetry::metrics::apollo::histogram::ListLengthHistogram; @@ -134,7 +143,6 @@ use crate::services::SubgraphResponse; use crate::services::SupergraphRequest; use crate::services::SupergraphResponse; use crate::spec::operation_limits::OperationLimits; -use crate::tracer::TraceId; use crate::Context; use crate::ListenAddr; @@ -143,6 +151,7 @@ pub(crate) mod apollo_exporter; pub(crate) mod apollo_otlp_exporter; pub(crate) mod config; pub(crate) mod config_new; +pub(crate) mod consts; pub(crate) mod dynamic_attribute; mod endpoint; mod fmt_layer; @@ -159,22 +168,12 @@ pub(crate) mod tracing; pub(crate) mod utils; // Tracing consts -pub(crate) const SUPERGRAPH_SPAN_NAME: &str = "supergraph"; -pub(crate) const SUBGRAPH_SPAN_NAME: &str = "subgraph"; -pub(crate) const ROUTER_SPAN_NAME: &str = "router"; -pub(crate) const EXECUTION_SPAN_NAME: &str = "execution"; const CLIENT_NAME: &str = "apollo_telemetry::client_name"; const CLIENT_VERSION: &str = "apollo_telemetry::client_version"; const SUBGRAPH_FTV1: &str = "apollo_telemetry::subgraph_ftv1"; pub(crate) const STUDIO_EXCLUDE: &str = "apollo_telemetry::studio::exclude"; pub(crate) const LOGGING_DISPLAY_HEADERS: &str = "apollo_telemetry::logging::display_headers"; pub(crate) const LOGGING_DISPLAY_BODY: &str = "apollo_telemetry::logging::display_body"; - -pub(crate) const OTEL_STATUS_CODE: &str = "otel.status_code"; -#[allow(dead_code)] -pub(crate) const OTEL_STATUS_DESCRIPTION: &str = "otel.status_description"; -pub(crate) const OTEL_STATUS_CODE_OK: &str = "OK"; -pub(crate) const OTEL_STATUS_CODE_ERROR: &str = "ERROR"; const GLOBAL_TRACER_NAME: &str = "apollo-router"; const DEFAULT_EXPOSE_TRACE_ID_HEADER: &str = "apollo-trace-id"; static DEFAULT_EXPOSE_TRACE_ID_HEADER_NAME: HeaderName = @@ -337,11 +336,17 @@ impl Plugin for Telemetry { span.record("graphql.operation.name", operation_name); } match (&operation_kind, &operation_name) { - (Ok(Some(kind)), Ok(Some(name))) => { - span.record("otel.name", format!("{kind} {name}")) + (Ok(Some(kind)), Ok(Some(name))) => span.set_span_dyn_attribute( + OTEL_NAME.into(), + format!("{kind} {name}").into(), + ), + (Ok(Some(kind)), _) => { + span.set_span_dyn_attribute(OTEL_NAME.into(), kind.clone().into()) } - (Ok(Some(kind)), _) => span.record("otel.name", kind), - _ => span.record("otel.name", "GraphQL Operation"), + _ => span.set_span_dyn_attribute( + OTEL_NAME.into(), + "GraphQL Operation".into(), + ), }; } } @@ -556,17 +561,18 @@ impl Plugin for Telemetry { }); // Append the trace ID with the right format, based on the config - let format_id = |trace: TraceId| { + let format_id = |trace_id: TraceId| { let id = match config.exporters.tracing.response_trace_id.format { - TraceIdFormat::Hexadecimal => format!("{:032x}", trace.to_u128()), - TraceIdFormat::Decimal => format!("{}", trace.to_u128()), + TraceIdFormat::Hexadecimal => format!("{:032x}", trace_id), + TraceIdFormat::Decimal => format!("{}", u128::from_be_bytes(trace_id.to_bytes())), + TraceIdFormat::Datadog => trace_id.to_datadog() }; HeaderValue::from_str(&id).ok() }; if let (Some(header_name), Some(trace_id)) = ( expose_trace_id_header, - TraceId::current().and_then(format_id), + trace_id().and_then(format_id), ) { resp.response.headers_mut().append(header_name, trace_id); } @@ -859,7 +865,7 @@ impl Telemetry { if propagation.zipkin || tracing.zipkin.enabled { propagators.push(Box::::default()); } - if propagation.datadog || tracing.datadog.enabled { + if propagation.datadog || tracing.datadog.enabled() { propagators.push(Box::::default()); } if propagation.aws_xray { diff --git a/apollo-router/src/plugins/telemetry/otel/layer.rs b/apollo-router/src/plugins/telemetry/otel/layer.rs index b1a43b1d5d..51922f4833 100644 --- a/apollo-router/src/plugins/telemetry/otel/layer.rs +++ b/apollo-router/src/plugins/telemetry/otel/layer.rs @@ -31,24 +31,23 @@ use tracing_subscriber::Layer; use super::OtelData; use super::PreSampledTracer; -use crate::axum_factory::utils::REQUEST_SPAN_NAME; use crate::plugins::telemetry::config::Sampler; use crate::plugins::telemetry::config::SamplerOption; +use crate::plugins::telemetry::consts::FIELD_EXCEPTION_MESSAGE; +use crate::plugins::telemetry::consts::FIELD_EXCEPTION_STACKTRACE; +use crate::plugins::telemetry::consts::OTEL_KIND; +use crate::plugins::telemetry::consts::OTEL_NAME; +use crate::plugins::telemetry::consts::OTEL_ORIGINAL_NAME; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; +use crate::plugins::telemetry::consts::OTEL_STATUS_MESSAGE; +use crate::plugins::telemetry::consts::REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::ROUTER_SPAN_NAME; use crate::plugins::telemetry::reload::IsSampled; use crate::plugins::telemetry::reload::SampledSpan; use crate::plugins::telemetry::reload::SPAN_SAMPLING_RATE; -use crate::plugins::telemetry::ROUTER_SPAN_NAME; use crate::query_planner::subscription::SUBSCRIPTION_EVENT_SPAN_NAME; use crate::router_factory::STARTING_SPAN_NAME; -pub(crate) const SPAN_NAME_FIELD: &str = "otel.name"; -pub(crate) const SPAN_KIND_FIELD: &str = "otel.kind"; -pub(crate) const SPAN_STATUS_CODE_FIELD: &str = "otel.status_code"; -pub(crate) const SPAN_STATUS_MESSAGE_FIELD: &str = "otel.status_message"; - -const FIELD_EXCEPTION_MESSAGE: &str = "exception.message"; -const FIELD_EXCEPTION_STACKTRACE: &str = "exception.stacktrace"; - /// An [OpenTelemetry] propagation layer for use in a project that uses /// [tracing]. /// @@ -353,12 +352,12 @@ impl<'a> field::Visit for SpanAttributeVisitor<'a> { /// [`Span`]: opentelemetry::trace::Span fn record_str(&mut self, field: &field::Field, value: &str) { match field.name() { - SPAN_NAME_FIELD => self.span_builder.name = value.to_string().into(), - SPAN_KIND_FIELD => self.span_builder.span_kind = str_to_span_kind(value), - SPAN_STATUS_CODE_FIELD => { + OTEL_NAME => self.span_builder.name = value.to_string().into(), + OTEL_KIND => self.span_builder.span_kind = str_to_span_kind(value), + OTEL_STATUS_CODE => { self.span_builder.status = str_to_status(value); } - SPAN_STATUS_MESSAGE_FIELD => { + OTEL_STATUS_MESSAGE => { self.span_builder.status = otel::Status::error(value.to_string()) } _ => self.record(KeyValue::new(field.name(), value.to_string())), @@ -371,14 +370,10 @@ impl<'a> field::Visit for SpanAttributeVisitor<'a> { /// [`Span`]: opentelemetry::trace::Span fn record_debug(&mut self, field: &field::Field, value: &dyn fmt::Debug) { match field.name() { - SPAN_NAME_FIELD => self.span_builder.name = format!("{:?}", value).into(), - SPAN_KIND_FIELD => { - self.span_builder.span_kind = str_to_span_kind(&format!("{:?}", value)) - } - SPAN_STATUS_CODE_FIELD => { - self.span_builder.status = str_to_status(&format!("{:?}", value)) - } - SPAN_STATUS_MESSAGE_FIELD => { + OTEL_NAME => self.span_builder.name = format!("{:?}", value).into(), + OTEL_KIND => self.span_builder.span_kind = str_to_span_kind(&format!("{:?}", value)), + OTEL_STATUS_CODE => self.span_builder.status = str_to_status(&format!("{:?}", value)), + OTEL_STATUS_MESSAGE => { self.span_builder.status = otel::Status::error(format!("{:?}", value)) } _ => self.record(Key::new(field.name()).string(format!("{:?}", value))), @@ -1083,7 +1078,7 @@ where let attributes = builder .attributes - .get_or_insert_with(|| OrderMap::with_capacity(2)); + .get_or_insert_with(|| OrderMap::with_capacity(3)); attributes.insert(busy_ns, timings.busy.into()); attributes.insert(idle_ns, timings.idle.into()); } @@ -1092,6 +1087,11 @@ where builder.status = forced_status; } if let Some(forced_span_name) = forced_span_name { + // Insert the original span name as an attribute so that we can map it later + let attributes = builder + .attributes + .get_or_insert_with(|| OrderMap::with_capacity(1)); + attributes.insert(OTEL_ORIGINAL_NAME.into(), builder.name.into()); builder.name = forced_span_name.into(); } @@ -1158,6 +1158,7 @@ mod tests { use super::*; use crate::plugins::telemetry::dynamic_attribute::SpanDynAttribute; + use crate::plugins::telemetry::OTEL_NAME; #[derive(Debug, Clone)] struct TestTracer(Arc>>); @@ -1295,7 +1296,7 @@ mod tests { let span = tracing::debug_span!("static_name", otel.name = dynamic_name.as_str()); let _entered = span.enter(); span.set_span_dyn_attribute( - Key::from_static_str("otel.name"), + Key::from_static_str(OTEL_NAME), opentelemetry::Value::String(forced_dynamic_name.clone().into()), ); }); diff --git a/apollo-router/src/plugins/telemetry/span_factory.rs b/apollo-router/src/plugins/telemetry/span_factory.rs index dc4edb01da..a13b2da7fd 100644 --- a/apollo-router/src/plugins/telemetry/span_factory.rs +++ b/apollo-router/src/plugins/telemetry/span_factory.rs @@ -3,12 +3,12 @@ use serde::Deserialize; use tracing::error_span; use tracing::info_span; -use crate::axum_factory::utils::REQUEST_SPAN_NAME; use crate::context::OPERATION_NAME; +use crate::plugins::telemetry::consts::REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::ROUTER_SPAN_NAME; +use crate::plugins::telemetry::consts::SUBGRAPH_SPAN_NAME; +use crate::plugins::telemetry::consts::SUPERGRAPH_SPAN_NAME; use crate::plugins::telemetry::Telemetry; -use crate::plugins::telemetry::ROUTER_SPAN_NAME; -use crate::plugins::telemetry::SUBGRAPH_SPAN_NAME; -use crate::plugins::telemetry::SUPERGRAPH_SPAN_NAME; use crate::services::SubgraphRequest; use crate::services::SupergraphRequest; use crate::tracer::TraceId; @@ -97,7 +97,7 @@ impl SpanMode { } SpanMode::SpecCompliant => { info_span!(ROUTER_SPAN_NAME, - // Needed for apollo_telemetry + // Needed for apollo_telemetry and datadog span mapping "http.route" = %request.uri(), "http.request.method" = %request.method(), "otel.name" = ::tracing::field::Empty, diff --git a/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs b/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs index be8b8fad63..fc343c6b4d 100644 --- a/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs +++ b/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs @@ -66,6 +66,10 @@ use crate::plugins::telemetry::config_new::cost::APOLLO_PRIVATE_COST_ACTUAL; use crate::plugins::telemetry::config_new::cost::APOLLO_PRIVATE_COST_ESTIMATED; use crate::plugins::telemetry::config_new::cost::APOLLO_PRIVATE_COST_RESULT; use crate::plugins::telemetry::config_new::cost::APOLLO_PRIVATE_COST_STRATEGY; +use crate::plugins::telemetry::consts::EXECUTION_SPAN_NAME; +use crate::plugins::telemetry::consts::ROUTER_SPAN_NAME; +use crate::plugins::telemetry::consts::SUBGRAPH_SPAN_NAME; +use crate::plugins::telemetry::consts::SUPERGRAPH_SPAN_NAME; use crate::plugins::telemetry::otlp::Protocol; use crate::plugins::telemetry::tracing::apollo::TracesReport; use crate::plugins::telemetry::tracing::BatchProcessorConfig; @@ -74,10 +78,6 @@ use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_ALIASES; use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_DEPTH; use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_HEIGHT; use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_ROOT_FIELDS; -use crate::plugins::telemetry::EXECUTION_SPAN_NAME; -use crate::plugins::telemetry::ROUTER_SPAN_NAME; -use crate::plugins::telemetry::SUBGRAPH_SPAN_NAME; -use crate::plugins::telemetry::SUPERGRAPH_SPAN_NAME; use crate::query_planner::subscription::SUBSCRIPTION_EVENT_SPAN_NAME; use crate::query_planner::OperationKind; use crate::query_planner::CONDITION_ELSE_SPAN_NAME; diff --git a/apollo-router/src/plugins/telemetry/tracing/datadog.rs b/apollo-router/src/plugins/telemetry/tracing/datadog.rs index cd5f4351a2..345c54dae9 100644 --- a/apollo-router/src/plugins/telemetry/tracing/datadog.rs +++ b/apollo-router/src/plugins/telemetry/tracing/datadog.rs @@ -3,7 +3,6 @@ use std::collections::HashMap; use http::Uri; -use lazy_static::lazy_static; use opentelemetry::sdk; use opentelemetry::sdk::trace::BatchSpanProcessor; use opentelemetry::sdk::trace::Builder; @@ -18,41 +17,72 @@ use tower::BoxError; use crate::plugins::telemetry::config::GenericWith; use crate::plugins::telemetry::config::TracingCommon; use crate::plugins::telemetry::config_new::spans::Spans; +use crate::plugins::telemetry::consts::BUILT_IN_SPAN_NAMES; +use crate::plugins::telemetry::consts::HTTP_REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::OTEL_ORIGINAL_NAME; +use crate::plugins::telemetry::consts::QUERY_PLANNING_SPAN_NAME; +use crate::plugins::telemetry::consts::REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::ROUTER_SPAN_NAME; +use crate::plugins::telemetry::consts::SUBGRAPH_REQUEST_SPAN_NAME; +use crate::plugins::telemetry::consts::SUBGRAPH_SPAN_NAME; +use crate::plugins::telemetry::consts::SUPERGRAPH_SPAN_NAME; use crate::plugins::telemetry::endpoint::UriEndpoint; use crate::plugins::telemetry::tracing::BatchProcessorConfig; use crate::plugins::telemetry::tracing::SpanProcessorExt; use crate::plugins::telemetry::tracing::TracingConfigurator; -lazy_static! { - static ref SPAN_RESOURCE_NAME_ATTRIBUTE_MAPPING: HashMap<&'static str, &'static str> = { - let mut map = HashMap::new(); - map.insert("request", "http.route"); - map.insert("supergraph", "graphql.operation.name"); - map.insert("query_planning", "graphql.operation.name"); - map.insert("subgraph", "subgraph.name"); - map.insert("subgraph_request", "graphql.operation.name"); - map - }; - static ref DEFAULT_ENDPOINT: Uri = Uri::from_static("http://127.0.0.1:8126"); +fn default_resource_mappings() -> HashMap { + let mut map = HashMap::with_capacity(7); + map.insert(REQUEST_SPAN_NAME, "http.route"); + map.insert(ROUTER_SPAN_NAME, "http.route"); + map.insert(SUPERGRAPH_SPAN_NAME, "graphql.operation.name"); + map.insert(QUERY_PLANNING_SPAN_NAME, "graphql.operation.name"); + map.insert(SUBGRAPH_SPAN_NAME, "subgraph.name"); + map.insert(SUBGRAPH_REQUEST_SPAN_NAME, "graphql.operation.name"); + map.insert(HTTP_REQUEST_SPAN_NAME, "http.route"); + map.iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect() } -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] +const ENV_KEY: Key = Key::from_static_str("env"); +const DEFAULT_ENDPOINT: &str = "http://127.0.0.1:8126"; + +#[derive(Debug, Clone, Deserialize, JsonSchema, serde_derive_default::Default)] #[serde(deny_unknown_fields)] pub(crate) struct Config { /// Enable datadog - pub(crate) enabled: bool, + enabled: bool, /// The endpoint to send to #[serde(default)] - pub(crate) endpoint: UriEndpoint, + endpoint: UriEndpoint, /// batch processor configuration #[serde(default)] - pub(crate) batch_processor: BatchProcessorConfig, + batch_processor: BatchProcessorConfig, /// Enable datadog span mapping for span name and resource name. + #[serde(default = "default_true")] + enable_span_mapping: bool, + + /// Fixes the span names, this means that the APM view will show the original span names in the operation dropdown. + #[serde(default = "default_true")] + fixed_span_names: bool, + + /// Custom mapping to be used as the resource field in spans, defaults to: + /// router -> http.route + /// supergraph -> graphql.operation.name + /// query_planning -> graphql.operation.name + /// subgraph -> subgraph.name + /// subgraph_request -> subgraph.name + /// http_request -> http.route #[serde(default)] - pub(crate) enable_span_mapping: bool, + resource_mapping: HashMap, +} + +fn default_true() -> bool { + true } impl TracingConfigurator for Config { @@ -67,25 +97,60 @@ impl TracingConfigurator for Config { _spans_config: &Spans, ) -> Result { tracing::info!("Configuring Datadog tracing: {}", self.batch_processor); - let enable_span_mapping = self.enable_span_mapping.then_some(true); let common: sdk::trace::Config = trace.into(); + + // Precompute representation otel Keys for the mappings so that we don't do heap allocation for each span + let resource_mappings = self.enable_span_mapping.then(|| { + let mut resource_mappings = default_resource_mappings(); + resource_mappings.extend(self.resource_mapping.clone()); + resource_mappings + .iter() + .map(|(k, v)| (k.clone(), opentelemetry::Key::from(v.clone()))) + .collect::>() + }); + + let fixed_span_names = self.fixed_span_names; + let exporter = opentelemetry_datadog::new_pipeline() - .with(&self.endpoint.to_uri(&DEFAULT_ENDPOINT), |builder, e| { - builder.with_agent_endpoint(e.to_string().trim_end_matches('/')) + .with( + &self.endpoint.to_uri(&Uri::from_static(DEFAULT_ENDPOINT)), + |builder, e| builder.with_agent_endpoint(e.to_string().trim_end_matches('/')), + ) + .with(&resource_mappings, |builder, resource_mappings| { + let resource_mappings = resource_mappings.clone(); + builder.with_resource_mapping(move |span, _model_config| { + let span_name = if let Some(original) = span + .attributes + .get(&Key::from_static_str(OTEL_ORIGINAL_NAME)) + { + original.as_str() + } else { + span.name.clone() + }; + if let Some(mapping) = resource_mappings.get(span_name.as_ref()) { + if let Some(Value::String(value)) = span.attributes.get(mapping) { + return value.as_str(); + } + } + return span.name.as_ref(); + }) }) - .with(&enable_span_mapping, |builder, _e| { - builder - .with_name_mapping(|span, _model_config| span.name.as_ref()) - .with_resource_mapping(|span, _model_config| { - SPAN_RESOURCE_NAME_ATTRIBUTE_MAPPING - .get(span.name.as_ref()) - .and_then(|key| span.attributes.get(&Key::from_static_str(key))) - .and_then(|value| match value { - Value::String(value) => Some(value.as_str()), - _ => None, - }) - .unwrap_or(span.name.as_ref()) - }) + .with_name_mapping(move |span, _model_config| { + if fixed_span_names { + if let Some(original) = span + .attributes + .get(&Key::from_static_str(OTEL_ORIGINAL_NAME)) + { + // Datadog expects static span names, not the ones in the otel spec. + // Remap the span name to the original name if it was remapped. + for name in BUILT_IN_SPAN_NAMES { + if name == original.as_str() { + return name; + } + } + } + } + &span.name }) .with( &common.resource.get(SERVICE_NAME), @@ -95,6 +160,9 @@ impl TracingConfigurator for Config { builder.with_service_name(service_name.as_str()) }, ) + .with(&common.resource.get(ENV_KEY), |builder, env| { + builder.with_env(env.as_str()) + }) .with_version( common .resource diff --git a/apollo-router/src/plugins/traffic_shaping/mod.rs b/apollo-router/src/plugins/traffic_shaping/mod.rs index 6c4dd43a0b..a3ddda0e6d 100644 --- a/apollo-router/src/plugins/traffic_shaping/mod.rs +++ b/apollo-router/src/plugins/traffic_shaping/mod.rs @@ -17,13 +17,13 @@ use std::sync::Mutex; use std::time::Duration; use futures::future::BoxFuture; +use futures::FutureExt; use http::header::CONTENT_ENCODING; use http::HeaderValue; +use http::StatusCode; use schemars::JsonSchema; use serde::Deserialize; -use tower::retry::Retry; use tower::util::Either; -use tower::util::Oneshot; use tower::BoxError; use tower::Service; use tower::ServiceBuilder; @@ -31,11 +31,13 @@ use tower::ServiceExt; use self::deduplication::QueryDeduplicationLayer; use self::rate::RateLimitLayer; -pub(crate) use self::rate::RateLimited; +use self::rate::RateLimited; pub(crate) use self::retry::RetryPolicy; -pub(crate) use self::timeout::Elapsed; +use self::timeout::Elapsed; use self::timeout::TimeoutLayer; use crate::error::ConfigurationError; +use crate::graphql; +use crate::layers::ServiceBuilderExt; use crate::plugin::Plugin; use crate::plugin::PluginInit; use crate::register_plugin; @@ -266,15 +268,7 @@ impl Plugin for TrafficShaping { pub(crate) type TrafficShapingSubgraphFuture = Either< Either< BoxFuture<'static, Result>, - timeout::future::ResponseFuture< - Oneshot< - Either< - Retry, S>>, - Either, S>, - >, - subgraph::Request, - >, - >, + BoxFuture<'static, Result>, >, >::Future, >; @@ -295,9 +289,7 @@ impl TrafficShaping { supergraph::Request, Response = supergraph::Response, Error = BoxError, - Future = timeout::future::ResponseFuture< - Oneshot, S>, supergraph::Request>, - >, + Future = BoxFuture<'static, Result>, > + Clone + Send + Sync @@ -311,6 +303,32 @@ impl TrafficShaping { >::Future: std::marker::Send, { ServiceBuilder::new() + .map_future_with_request_data( + |req: &supergraph::Request| req.context.clone(), + move |ctx, future| { + async { + let response: Result = future.await; + match response { + Err(error) if error.is::() => { + supergraph::Response::error_builder() + .status_code(StatusCode::GATEWAY_TIMEOUT) + .error::(Elapsed::new().into()) + .context(ctx) + .build() + } + Err(error) if error.is::() => { + supergraph::Response::error_builder() + .status_code(StatusCode::TOO_MANY_REQUESTS) + .error::(RateLimited::new().into()) + .context(ctx) + .build() + } + _ => response, + } + } + .boxed() + }, + ) .layer(TimeoutLayer::new( self.config .router @@ -380,6 +398,31 @@ impl TrafficShaping { .option_layer(config.shaping.deduplicate_query.unwrap_or_default().then( QueryDeduplicationLayer::default )) + .map_future_with_request_data( + |req: &subgraph::Request| req.context.clone(), + move |ctx, future| { + async { + let response: Result = future.await; + match response { + Err(error) if error.is::() => { + subgraph::Response::error_builder() + .status_code(StatusCode::GATEWAY_TIMEOUT) + .error::(Elapsed::new().into()) + .context(ctx) + .build() + } + Err(error) if error.is::() => { + subgraph::Response::error_builder() + .status_code(StatusCode::TOO_MANY_REQUESTS) + .error::(RateLimited::new().into()) + .context(ctx) + .build() + } + _ => response, + } + }.boxed() + }, + ) .layer(TimeoutLayer::new( config.shaping .timeout @@ -419,6 +462,7 @@ mod test { use std::sync::Arc; use bytes::Bytes; + use maplit::hashmap; use once_cell::sync::Lazy; use serde_json_bytes::json; use serde_json_bytes::ByteString; @@ -744,41 +788,64 @@ mod test { let plugin = get_traffic_shaping_plugin(&config).await; - let test_service = MockSubgraph::new(HashMap::new()); + let test_service = MockSubgraph::new(hashmap! { + graphql::Request::default() => graphql::Response::default() + }); - let _response = plugin + assert!(&plugin .as_any() .downcast_ref::() .unwrap() .subgraph_service_internal("test", test_service.clone()) .oneshot(SubgraphRequest::fake_builder().build()) .await - .unwrap(); - let _response = plugin - .as_any() - .downcast_ref::() .unwrap() - .subgraph_service_internal("test", test_service.clone()) - .oneshot(SubgraphRequest::fake_builder().build()) - .await - .expect_err("should be in error due to a timeout and rate limit"); - let _response = plugin + .response + .body() + .errors + .is_empty()); + assert_eq!( + plugin + .as_any() + .downcast_ref::() + .unwrap() + .subgraph_service_internal("test", test_service.clone()) + .oneshot(SubgraphRequest::fake_builder().build()) + .await + .unwrap() + .response + .body() + .errors[0] + .extensions + .get("code") + .unwrap(), + "REQUEST_RATE_LIMITED" + ); + assert!(plugin .as_any() .downcast_ref::() .unwrap() .subgraph_service_internal("another", test_service.clone()) .oneshot(SubgraphRequest::fake_builder().build()) .await - .unwrap(); + .unwrap() + .response + .body() + .errors + .is_empty()); tokio::time::sleep(Duration::from_millis(300)).await; - let _response = plugin + assert!(plugin .as_any() .downcast_ref::() .unwrap() .subgraph_service_internal("test", test_service.clone()) .oneshot(SubgraphRequest::fake_builder().build()) .await - .unwrap(); + .unwrap() + .response + .body() + .errors + .is_empty()); } #[tokio::test(flavor = "multi_thread")] @@ -812,7 +879,7 @@ mod test { mock_service }); - let _response = plugin + assert!(plugin .as_any() .downcast_ref::() .unwrap() @@ -822,18 +889,30 @@ mod test { .unwrap() .next_response() .await - .unwrap(); - - assert!(plugin - .as_any() - .downcast_ref::() .unwrap() - .supergraph_service_internal(mock_service.clone()) - .oneshot(SupergraphRequest::fake_builder().build().unwrap()) - .await - .is_err()); + .errors + .is_empty()); + + assert_eq!( + plugin + .as_any() + .downcast_ref::() + .unwrap() + .supergraph_service_internal(mock_service.clone()) + .oneshot(SupergraphRequest::fake_builder().build().unwrap()) + .await + .unwrap() + .next_response() + .await + .unwrap() + .errors[0] + .extensions + .get("code") + .unwrap(), + "REQUEST_RATE_LIMITED" + ); tokio::time::sleep(Duration::from_millis(300)).await; - let _response = plugin + assert!(plugin .as_any() .downcast_ref::() .unwrap() @@ -843,6 +922,8 @@ mod test { .unwrap() .next_response() .await - .unwrap(); + .unwrap() + .errors + .is_empty()); } } diff --git a/apollo-router/src/plugins/traffic_shaping/rate/error.rs b/apollo-router/src/plugins/traffic_shaping/rate/error.rs index 6e06c5823a..d1c7ef09e3 100644 --- a/apollo-router/src/plugins/traffic_shaping/rate/error.rs +++ b/apollo-router/src/plugins/traffic_shaping/rate/error.rs @@ -3,8 +3,7 @@ use std::error; use std::fmt; -use axum::response::IntoResponse; -use http::StatusCode; +use crate::graphql; /// The rate limit error. #[derive(Debug, Default)] @@ -23,9 +22,12 @@ impl fmt::Display for RateLimited { } } -impl IntoResponse for RateLimited { - fn into_response(self) -> axum::response::Response { - (StatusCode::TOO_MANY_REQUESTS, self.to_string()).into_response() +impl From for graphql::Error { + fn from(_: RateLimited) -> Self { + graphql::Error::builder() + .message(String::from("Your request has been rate limited")) + .extension_code("REQUEST_RATE_LIMITED") + .build() } } diff --git a/apollo-router/src/plugins/traffic_shaping/timeout/error.rs b/apollo-router/src/plugins/traffic_shaping/timeout/error.rs index 66ac450b4a..38e36dc8ad 100644 --- a/apollo-router/src/plugins/traffic_shaping/timeout/error.rs +++ b/apollo-router/src/plugins/traffic_shaping/timeout/error.rs @@ -3,8 +3,7 @@ use std::error; use std::fmt; -use axum::response::IntoResponse; -use http::StatusCode; +use crate::graphql; /// The timeout elapsed. #[derive(Debug, Default)] @@ -23,9 +22,12 @@ impl fmt::Display for Elapsed { } } -impl IntoResponse for Elapsed { - fn into_response(self) -> axum::response::Response { - (StatusCode::GATEWAY_TIMEOUT, self.to_string()).into_response() +impl From for graphql::Error { + fn from(_: Elapsed) -> Self { + graphql::Error::builder() + .message(String::from("Request timed out")) + .extension_code("REQUEST_TIMEOUT") + .build() } } diff --git a/apollo-router/src/query_planner/bridge_query_planner.rs b/apollo-router/src/query_planner/bridge_query_planner.rs index 5c51a2c863..85ee2d15b7 100644 --- a/apollo-router/src/query_planner/bridge_query_planner.rs +++ b/apollo-router/src/query_planner/bridge_query_planner.rs @@ -7,9 +7,8 @@ use std::sync::Arc; use std::time::Instant; use apollo_compiler::ast; -use apollo_compiler::ast::Name; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use apollo_federation::error::FederationError; use apollo_federation::query_plan::query_planner::QueryPlanner; use futures::future::BoxFuture; @@ -258,11 +257,11 @@ impl PlannerMode { }) } PlannerMode::Both { js, rust } => { - let operation_name = operation.as_deref().map(NodeStr::from); - let start = Instant::now(); - let result = js.plan(filtered_query, operation, plan_options).await; + let result = js + .plan(filtered_query, operation.clone(), plan_options) + .await; metric_query_planning_plan_duration(JS_QP_MODE, start); @@ -282,7 +281,7 @@ impl PlannerMode { BothModeComparisonJob { rust_planner: rust.clone(), document: doc.executable.clone(), - operation_name, + operation_name: operation, // Exclude usage reporting from the Result sent for comparison js_result: js_result .as_ref() @@ -1505,7 +1504,7 @@ mod tests { if let Some(node) = &deferred.node { check_query_plan_coverage( node, - deferred.label.as_ref().map(|l| l.as_str()), + deferred.label.as_deref(), subselections, ) } diff --git a/apollo-router/src/query_planner/caching_query_planner.rs b/apollo-router/src/query_planner/caching_query_planner.rs index d306ac0bad..ff28cbc4c5 100644 --- a/apollo-router/src/query_planner/caching_query_planner.rs +++ b/apollo-router/src/query_planner/caching_query_planner.rs @@ -78,6 +78,7 @@ pub(crate) struct CachingQueryPlanner { enable_authorization_directives: bool, config_mode: ConfigMode, introspection: bool, + legacy_introspection_caching: bool, } fn init_query_plan_from_redis( @@ -142,6 +143,10 @@ where enable_authorization_directives, config_mode, introspection: configuration.supergraph.introspection, + legacy_introspection_caching: configuration + .supergraph + .query_planning + .legacy_introspection_caching, }) } @@ -515,9 +520,16 @@ where errors, }) => { if let Some(content) = content.clone() { - tokio::spawn(async move { - entry.insert(Ok(content)).await; - }); + let can_cache = match &content { + QueryPlannerContent::Plan { .. } => true, + _ => self.legacy_introspection_caching, + }; + + if can_cache { + tokio::spawn(async move { + entry.insert(Ok(content)).await; + }); + } } // This will be overridden when running in ApolloMetricsGenerationMode::New mode @@ -678,6 +690,7 @@ mod tests { use super::*; use crate::error::PlanErrors; + use crate::json_ext::Object; use crate::query_planner::QueryPlan; use crate::spec::Query; use crate::spec::Schema; @@ -881,4 +894,108 @@ mod tests { stats_report_key_hash("# IgnitionMeQuery\nquery IgnitionMeQuery{me{id}}") ); } + + #[test(tokio::test)] + async fn test_introspection_cache() { + let mut delegate = MockMyQueryPlanner::new(); + delegate + .expect_clone() + // This is the main point of the test: if introspection queries are not cached, then the delegate + // will be called twice when we send the same request twice + .times(2) + .returning(|| { + let mut planner = MockMyQueryPlanner::new(); + planner.expect_sync_call().returning(|_| { + let qp_content = QueryPlannerContent::Response { + response: Box::new( + crate::graphql::Response::builder() + .data(Object::new()) + .build(), + ), + }; + + Ok(QueryPlannerResponse::builder() + .content(qp_content) + .context(Context::new()) + .build()) + }); + planner + }); + + let configuration = Arc::new(crate::Configuration { + supergraph: crate::configuration::Supergraph { + query_planning: crate::configuration::QueryPlanning { + legacy_introspection_caching: false, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }); + let schema = include_str!("testdata/schema.graphql"); + let schema = Arc::new(Schema::parse_test(schema, &configuration).unwrap()); + + let mut planner = CachingQueryPlanner::new( + delegate, + schema.clone(), + Default::default(), + &configuration, + IndexMap::new(), + ) + .await + .unwrap(); + + let configuration = Configuration::default(); + + let doc1 = Query::parse_document( + "{ + __schema { + types { + name + } + } + }", + None, + &schema, + &configuration, + ) + .unwrap(); + + let context = Context::new(); + context + .extensions() + .with_lock(|mut lock| lock.insert::(doc1)); + + assert!(planner + .call(query_planner::CachingRequest::new( + "{ + __schema { + types { + name + } + } + }" + .to_string(), + Some("".into()), + context.clone(), + )) + .await + .is_ok()); + + assert!(planner + .call(query_planner::CachingRequest::new( + "{ + __schema { + types { + name + } + } + }" + .to_string(), + Some("".into()), + context.clone(), + )) + .await + .is_ok()); + } } diff --git a/apollo-router/src/query_planner/convert.rs b/apollo-router/src/query_planner/convert.rs index 7e9a8acb8e..bf59d1861e 100644 --- a/apollo-router/src/query_planner/convert.rs +++ b/apollo-router/src/query_planner/convert.rs @@ -79,9 +79,9 @@ impl From<&'_ Box> for plan::PlanNode { variable_usages: variable_usages.iter().map(|v| v.clone().into()).collect(), // TODO: use Arc in apollo_federation to avoid this clone operation: SubgraphOperation::from_parsed(Arc::new(operation_document.clone())), - operation_name: operation_name.clone(), + operation_name: operation_name.clone().map(|n| n.into()), operation_kind: (*operation_kind).into(), - id: id.map(|id| id.to_string().into()), + id: id.map(|id| id.to_string()), input_rewrites: option_vec(input_rewrites), output_rewrites: option_vec(output_rewrites), context_rewrites: option_vec(context_rewrites), @@ -159,7 +159,7 @@ impl From<&'_ next::FetchNode> for subscription::SubscriptionNode { variable_usages: variable_usages.iter().map(|v| v.clone().into()).collect(), // TODO: use Arc in apollo_federation to avoid this clone operation: SubgraphOperation::from_parsed(Arc::new(operation_document.clone())), - operation_name: operation_name.clone(), + operation_name: operation_name.clone().map(|n| n.into()), operation_kind: (*operation_kind).into(), input_rewrites: option_vec(input_rewrites), output_rewrites: option_vec(output_rewrites), diff --git a/apollo-router/src/query_planner/dual_query_planner.rs b/apollo-router/src/query_planner/dual_query_planner.rs index 89e110b186..db860cd43b 100644 --- a/apollo-router/src/query_planner/dual_query_planner.rs +++ b/apollo-router/src/query_planner/dual_query_planner.rs @@ -9,10 +9,9 @@ use std::sync::OnceLock; use std::time::Instant; use apollo_compiler::ast; -use apollo_compiler::ast::Name; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use apollo_federation::query_plan::query_planner::QueryPlanner; use apollo_federation::query_plan::QueryPlan; use apollo_federation::subgraph::spec::ENTITIES_QUERY; @@ -39,7 +38,7 @@ const WORKER_THREAD_COUNT: usize = 1; pub(crate) struct BothModeComparisonJob { pub(crate) rust_planner: Arc, pub(crate) document: Arc>, - pub(crate) operation_name: Option, + pub(crate) operation_name: Option, pub(crate) js_result: Result>>, } @@ -76,7 +75,11 @@ impl BothModeComparisonJob { // TODO: once the Rust query planner does not use `todo!()` anymore, // remove `USING_CATCH_UNWIND` and this use of `catch_unwind`. let rust_result = std::panic::catch_unwind(|| { - let name = self.operation_name.clone().map(Name::new).transpose()?; + let name = self + .operation_name + .clone() + .map(Name::try_from) + .transpose()?; USING_CATCH_UNWIND.set(true); let start = Instant::now(); diff --git a/apollo-router/src/query_planner/execution.rs b/apollo-router/src/query_planner/execution.rs index 0b658b7bb3..c6bd24e43e 100644 --- a/apollo-router/src/query_planner/execution.rs +++ b/apollo-router/src/query_planner/execution.rs @@ -2,7 +2,6 @@ use std::collections::HashMap; use std::sync::Arc; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; use futures::future::join_all; use futures::prelude::*; use tokio::sync::broadcast; @@ -105,7 +104,7 @@ pub(crate) struct ExecutionParameters<'a> { pub(crate) schema: &'a Arc, pub(crate) subgraph_schemas: &'a Arc>>>, pub(crate) supergraph_request: &'a Arc>, - pub(crate) deferred_fetches: &'a HashMap)>>, + pub(crate) deferred_fetches: &'a HashMap)>>, pub(crate) query: &'a Arc, pub(crate) root_node: &'a PlanNode, pub(crate) subscription_handle: &'a Option, @@ -207,7 +206,7 @@ impl PlanNode { .instrument(tracing::info_span!( SUBSCRIBE_SPAN_NAME, "otel.kind" = "INTERNAL", - "apollo.subgraph.name" = primary.service_name.as_str(), + "apollo.subgraph.name" = primary.service_name.as_ref(), "apollo_private.sent_time_offset" = fetch_time_offset )) .await; @@ -240,7 +239,7 @@ impl PlanNode { .instrument(tracing::info_span!( FETCH_SPAN_NAME, "otel.kind" = "INTERNAL", - "apollo.subgraph.name" = fetch_node.service_name.as_str(), + "apollo.subgraph.name" = fetch_node.service_name.as_ref(), "apollo_private.sent_time_offset" = fetch_time_offset )) .await; @@ -256,7 +255,7 @@ impl PlanNode { errors = Vec::new(); async { let mut deferred_fetches: HashMap< - NodeStr, + String, broadcast::Sender<(Value, Vec)>, > = HashMap::new(); let mut futures = Vec::new(); @@ -406,7 +405,7 @@ impl DeferredNode { parent_value: &Value, sender: mpsc::Sender, primary_sender: &broadcast::Sender<(Value, Vec)>, - deferred_fetches: &mut HashMap)>>, + deferred_fetches: &mut HashMap)>>, ) -> impl Future { let mut deferred_receivers = Vec::new(); diff --git a/apollo-router/src/query_planner/fetch.rs b/apollo-router/src/query_planner/fetch.rs index 11631250c8..1d40ea8be2 100644 --- a/apollo-router/src/query_planner/fetch.rs +++ b/apollo-router/src/query_planner/fetch.rs @@ -5,7 +5,6 @@ use std::sync::Arc; use apollo_compiler::ast; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; -use apollo_compiler::NodeStr; use indexmap::IndexSet; use serde::Deserialize; use serde::Serialize; @@ -101,7 +100,7 @@ pub(crate) type SubgraphSchemas = HashMap, /// The data that is required for the subgraph fetch. #[serde(skip_serializing_if = "Vec::is_empty")] @@ -109,19 +108,19 @@ pub(crate) struct FetchNode { pub(crate) requires: Vec, /// The variables that are used for the subgraph fetch. - pub(crate) variable_usages: Vec, + pub(crate) variable_usages: Vec>, /// The GraphQL subquery that is used for the fetch. pub(crate) operation: SubgraphOperation, /// The GraphQL subquery operation name. - pub(crate) operation_name: Option, + pub(crate) operation_name: Option>, /// The GraphQL operation kind that is used for the fetch. pub(crate) operation_kind: OperationKind, /// Optional id used by Deferred nodes - pub(crate) id: Option, + pub(crate) id: Option, // Optionally describes a number of "rewrites" that query plan executors should apply to the data that is sent as input of this fetch. pub(crate) input_rewrites: Option>, @@ -275,7 +274,7 @@ impl Variables { #[allow(clippy::too_many_arguments)] pub(super) fn new( requires: &[Selection], - variable_usages: &[NodeStr], + variable_usages: &[Arc], data: &Value, current_dir: &Path, request: &Arc>, @@ -290,7 +289,7 @@ impl Variables { variables.extend(variable_usages.iter().filter_map(|key| { body.variables - .get_key_value(key.as_str()) + .get_key_value(key.as_ref()) .map(|(variable_key, value)| (variable_key.clone(), value.clone())) })); @@ -354,7 +353,7 @@ impl Variables { .iter() .filter_map(|key| { body.variables - .get_key_value(key.as_str()) + .get_key_value(key.as_ref()) .map(|(variable_key, value)| (variable_key.clone(), value.clone())) }) .collect::(), @@ -671,7 +670,7 @@ impl FetchNode { &mut self, subgraph_schemas: &SubgraphSchemas, ) -> Result<(), ValidationErrors> { - let schema = &subgraph_schemas[self.service_name.as_str()]; + let schema = &subgraph_schemas[self.service_name.as_ref()]; self.operation.init_parsed(schema)?; Ok(()) } @@ -681,7 +680,7 @@ impl FetchNode { subgraph_schemas: &SubgraphSchemas, supergraph_schema_hash: &str, ) -> Result<(), ValidationErrors> { - let schema = &subgraph_schemas[self.service_name.as_str()]; + let schema = &subgraph_schemas[self.service_name.as_ref()]; let doc = self.operation.init_parsed(schema)?; if let Ok(hash) = QueryHashVisitor::hash_query( diff --git a/apollo-router/src/query_planner/labeler.rs b/apollo-router/src/query_planner/labeler.rs index 2b4781d57b..a993e1dbd3 100644 --- a/apollo-router/src/query_planner/labeler.rs +++ b/apollo-router/src/query_planner/labeler.rs @@ -3,6 +3,7 @@ use apollo_compiler::ast; use apollo_compiler::name; +use apollo_compiler::Name; use apollo_compiler::Node; use apollo_compiler::Schema; use tower::BoxError; @@ -12,7 +13,7 @@ use crate::spec::query::transform; use crate::spec::query::transform::document; use crate::spec::query::transform::Visitor; -const LABEL_NAME: ast::Name = name!("label"); +const LABEL_NAME: Name = name!("label"); /// go through the query and adds labels to defer fragments that do not have any /// @@ -81,7 +82,7 @@ fn directives( has_label = true; if let ast::Value::String(label) = arg.make_mut().value.make_mut() { // Add a prefix to existing labels - *label = format!("_{label}").into(); + *label = format!("_{label}"); } else { return Err("@defer with a non-string label".into()); } diff --git a/apollo-router/src/query_planner/plan.rs b/apollo-router/src/query_planner/plan.rs index 5124d7e21b..bf4471e23b 100644 --- a/apollo-router/src/query_planner/plan.rs +++ b/apollo-router/src/query_planner/plan.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use apollo_compiler::validation::Valid; -use apollo_compiler::NodeStr; use router_bridge::planner::PlanOptions; use router_bridge::planner::UsageReporting; use serde::Deserialize; @@ -466,9 +465,9 @@ impl PlanNode { Self::Subscription { primary, rest } => match rest { Some(rest) => Box::new( rest.service_usage() - .chain(Some(primary.service_name.as_str())), + .chain(Some(primary.service_name.as_ref())), ) as Box + 'a>, - None => Box::new(Some(primary.service_name.as_str()).into_iter()), + None => Box::new(Some(primary.service_name.as_ref()).into_iter()), }, Self::Flatten(flatten) => flatten.node.service_usage(), Self::Defer { primary, deferred } => primary @@ -591,7 +590,7 @@ pub(crate) struct DeferredNode { pub(crate) depends: Vec, /// The optional defer label. - pub(crate) label: Option, + pub(crate) label: Option, /// Path to the @defer this correspond to. `subselection` start at that `path`. pub(crate) query_path: Path, /// The part of the original query that "selects" the data to send @@ -606,5 +605,5 @@ pub(crate) struct DeferredNode { #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Depends { - pub(crate) id: NodeStr, + pub(crate) id: String, } diff --git a/apollo-router/src/query_planner/rewrites.rs b/apollo-router/src/query_planner/rewrites.rs index f3b10a7fa4..6c941fc88d 100644 --- a/apollo-router/src/query_planner/rewrites.rs +++ b/apollo-router/src/query_planner/rewrites.rs @@ -8,7 +8,7 @@ //! every appear on the input side, while other will only appear on outputs, but it does not hurt //! to be future-proof by supporting all types of rewrites on both "sides". -use apollo_compiler::NodeStr; +use apollo_compiler::Name; use serde::Deserialize; use serde::Serialize; @@ -43,7 +43,7 @@ pub(crate) struct DataValueSetter { #[serde(rename_all = "camelCase")] pub(crate) struct DataKeyRenamer { pub(crate) path: Path, - pub(crate) rename_key_to: NodeStr, + pub(crate) rename_key_to: Name, } impl DataRewrite { @@ -109,6 +109,7 @@ pub(crate) fn apply_rewrites( #[cfg(test)] mod tests { + use apollo_compiler::name; use serde_json_bytes::json; use super::*; @@ -160,7 +161,7 @@ mod tests { let dr = DataRewrite::KeyRenamer(DataKeyRenamer { path: "data/testField__alias_0".into(), - rename_key_to: "testField".into(), + rename_key_to: name!("testField"), }); dr.maybe_apply( @@ -198,7 +199,7 @@ mod tests { let dr = DataRewrite::KeyRenamer(DataKeyRenamer { path: "data/testField__alias_0".into(), - rename_key_to: "testField".into(), + rename_key_to: name!("testField"), }); dr.maybe_apply( diff --git a/apollo-router/src/query_planner/selection.rs b/apollo-router/src/query_planner/selection.rs index 80c5ed3902..6e40dc4a0e 100644 --- a/apollo-router/src/query_planner/selection.rs +++ b/apollo-router/src/query_planner/selection.rs @@ -1,5 +1,5 @@ -use apollo_compiler::ast::Name; use apollo_compiler::schema::ExtendedType; +use apollo_compiler::Name; use serde::Deserialize; use serde::Serialize; use serde_json_bytes::ByteString; diff --git a/apollo-router/src/query_planner/subgraph_context.rs b/apollo-router/src/query_planner/subgraph_context.rs index 9e87f38703..e0bd6e06a3 100644 --- a/apollo-router/src/query_planner/subgraph_context.rs +++ b/apollo-router/src/query_planner/subgraph_context.rs @@ -2,7 +2,6 @@ use std::collections::HashMap; use std::collections::HashSet; use apollo_compiler::ast; -use apollo_compiler::ast::Name; use apollo_compiler::ast::VariableDefinition; use apollo_compiler::executable; use apollo_compiler::executable::Operation; @@ -11,6 +10,7 @@ use apollo_compiler::executable::SelectionSet; use apollo_compiler::validation::Valid; use apollo_compiler::validation::WithErrors; use apollo_compiler::ExecutableDocument; +use apollo_compiler::Name; use apollo_compiler::Node; use serde_json_bytes::ByteString; use serde_json_bytes::Map; @@ -245,7 +245,7 @@ fn transform_operation( if arguments.contains(v.name.as_str()) { for i in 0..*count { new_variables.push(Node::new(VariableDefinition { - name: Name::new_unchecked(format!("{}_{}", v.name.as_str(), i).into()), + name: Name::new_unchecked(&format!("{}_{}", v.name.as_str(), i)), ty: v.ty.clone(), default_value: v.default_value.clone(), directives: v.directives.clone(), @@ -284,7 +284,7 @@ fn transform_operation( // it is a field selection for _entities, so it's ok to reach in and give it an alias let mut cloned = field_selection.clone(); let cfs = cloned.make_mut(); - cfs.alias = Some(Name::new_unchecked(format!("_{}", i).into())); + cfs.alias = Some(Name::new_unchecked(&format!("_{}", i))); transform_field_arguments(&mut cfs.arguments, arguments, i); transform_selection_set(&mut cfs.selection_set, arguments, i); @@ -334,9 +334,11 @@ fn transform_field_arguments( let arg = arg.make_mut(); if let Some(v) = arg.value.as_variable() { if arguments.contains(v.as_str()) { - arg.value = Node::new(ast::Value::Variable(Name::new_unchecked( - format!("{}_{}", v.as_str(), index).into(), - ))); + arg.value = Node::new(ast::Value::Variable(Name::new_unchecked(&format!( + "{}_{}", + v.as_str(), + index + )))); } } }); @@ -382,34 +384,27 @@ mod subgraph_context_unit_tests { #[test] fn test_transform_selection_set() { - let type_name = executable::Name::new("Hello").unwrap(); - let field_name = executable::Name::new("f").unwrap(); + let type_name = Name::new("Hello").unwrap(); + let field_name = Name::new("f").unwrap(); let field_definition = ast::FieldDefinition { description: None, name: field_name.clone(), arguments: vec![Node::new(ast::InputValueDefinition { description: None, - name: executable::Name::new("param").unwrap(), - ty: Node::new(ast::Type::Named( - executable::Name::new("ParamType").unwrap(), - )), + name: Name::new("param").unwrap(), + ty: Node::new(ast::Type::Named(Name::new("ParamType").unwrap())), default_value: None, directives: ast::DirectiveList(vec![]), })], - ty: ast::Type::Named(executable::Name::new("FieldType").unwrap()), + ty: ast::Type::Named(Name::new("FieldType").unwrap()), directives: ast::DirectiveList(vec![]), }; let mut selection_set = SelectionSet::new(type_name); - let field = executable::Field::new( - executable::Name::new("f").unwrap(), - Node::new(field_definition), - ) - .with_argument( - executable::Name::new("param").unwrap(), - Node::new(ast::Value::Variable( - executable::Name::new("variable").unwrap(), - )), - ); + let field = executable::Field::new(Name::new("f").unwrap(), Node::new(field_definition)) + .with_argument( + Name::new("param").unwrap(), + Node::new(ast::Value::Variable(Name::new("variable").unwrap())), + ); selection_set.push(Selection::Field(Node::new(field))); diff --git a/apollo-router/src/query_planner/subscription.rs b/apollo-router/src/query_planner/subscription.rs index 6a327fdc60..31f4a37319 100644 --- a/apollo-router/src/query_planner/subscription.rs +++ b/apollo-router/src/query_planner/subscription.rs @@ -1,7 +1,7 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; +use std::sync::Arc; -use apollo_compiler::NodeStr; use futures::future; use serde::Deserialize; use serde::Serialize; @@ -57,16 +57,16 @@ impl SubscriptionHandle { #[serde(rename_all = "camelCase")] pub(crate) struct SubscriptionNode { /// The name of the service or subgraph that the subscription is querying. - pub(crate) service_name: NodeStr, + pub(crate) service_name: Arc, /// The variables that are used for the subgraph subscription. - pub(crate) variable_usages: Vec, + pub(crate) variable_usages: Vec>, /// The GraphQL subquery that is used for the subscription. pub(crate) operation: super::fetch::SubgraphOperation, /// The GraphQL subquery operation name. - pub(crate) operation_name: Option, + pub(crate) operation_name: Option>, /// The GraphQL operation kind that is used for the fetch. pub(crate) operation_kind: OperationKind, diff --git a/apollo-router/src/services/http/service.rs b/apollo-router/src/services/http/service.rs index 87d4265be7..cc37fa9083 100644 --- a/apollo-router/src/services/http/service.rs +++ b/apollo-router/src/services/http/service.rs @@ -37,6 +37,7 @@ use crate::axum_factory::compression::Compressor; use crate::configuration::TlsClientAuth; use crate::error::FetchError; use crate::plugins::authentication::subgraph::SigningParamsConfig; +use crate::plugins::telemetry::consts::HTTP_REQUEST_SPAN_NAME; use crate::plugins::telemetry::otel::OpenTelemetrySpanExt; use crate::plugins::telemetry::reload::prepare_context; use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; @@ -57,8 +58,6 @@ type MixedClient = Either; #[cfg(not(unix))] type MixedClient = HTTPClient; -pub(crate) const HTTP_REQUEST_SPAN_NAME: &str = "http_request"; - // interior mutability is not a concern here, the value is never modified #[allow(clippy::declare_interior_mutable_const)] static ACCEPTED_ENCODINGS: HeaderValue = HeaderValue::from_static("gzip, br, deflate"); diff --git a/apollo-router/src/services/subgraph_service.rs b/apollo-router/src/services/subgraph_service.rs index d316165726..48627109af 100644 --- a/apollo-router/src/services/subgraph_service.rs +++ b/apollo-router/src/services/subgraph_service.rs @@ -65,6 +65,7 @@ use crate::plugins::subscription::SUBSCRIPTION_WS_CUSTOM_CONNECTION_PARAMS; use crate::plugins::telemetry::config_new::events::log_event; use crate::plugins::telemetry::config_new::events::SubgraphEventRequest; use crate::plugins::telemetry::config_new::events::SubgraphEventResponse; +use crate::plugins::telemetry::consts::SUBGRAPH_REQUEST_SPAN_NAME; use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; use crate::plugins::telemetry::LOGGING_DISPLAY_HEADERS; use crate::protocols::websocket::convert_websocket_stream; @@ -77,7 +78,6 @@ use crate::Configuration; use crate::Context; use crate::Notify; -pub(crate) const SUBGRAPH_REQUEST_SPAN_NAME: &str = "subgraph_request"; const PERSISTED_QUERY_NOT_FOUND_EXTENSION_CODE: &str = "PERSISTED_QUERY_NOT_FOUND"; const PERSISTED_QUERY_NOT_SUPPORTED_EXTENSION_CODE: &str = "PERSISTED_QUERY_NOT_SUPPORTED"; const PERSISTED_QUERY_NOT_FOUND_MESSAGE: &str = "PersistedQueryNotFound"; diff --git a/apollo-router/src/services/supergraph/service.rs b/apollo-router/src/services/supergraph/service.rs index 51f7592e1a..2bb7daf14e 100644 --- a/apollo-router/src/services/supergraph/service.rs +++ b/apollo-router/src/services/supergraph/service.rs @@ -37,6 +37,7 @@ use crate::plugin::DynPlugin; use crate::plugins::subscription::SubscriptionConfig; use crate::plugins::telemetry::config_new::events::log_event; use crate::plugins::telemetry::config_new::events::SupergraphEventResponse; +use crate::plugins::telemetry::consts::QUERY_PLANNING_SPAN_NAME; use crate::plugins::telemetry::tracing::apollo_telemetry::APOLLO_PRIVATE_DURATION_NS; use crate::plugins::telemetry::Telemetry; use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; @@ -76,7 +77,7 @@ use crate::Configuration; use crate::Context; use crate::Notify; -pub(crate) const QUERY_PLANNING_SPAN_NAME: &str = "query_planning"; +pub(crate) const FIRST_EVENT_CONTEXT_KEY: &str = "apollo_router::supergraph::first_event"; /// An [`IndexMap`] of available plugins. pub(crate) type Plugins = IndexMap>; @@ -349,6 +350,20 @@ async fn service_call( let supergraph_response_event = context .extensions() .with_lock(|lock| lock.get::().cloned()); + let mut first_event = true; + let mut inserted = false; + let ctx = context.clone(); + let response_stream = response_stream.inspect(move |_| { + if first_event { + first_event = false; + } else if !inserted { + ctx.insert_json_value( + FIRST_EVENT_CONTEXT_KEY, + serde_json_bytes::Value::Bool(false), + ); + inserted = true; + } + }); match supergraph_response_event { Some(supergraph_response_event) => { let mut attrs = Vec::with_capacity(4); diff --git a/apollo-router/src/spec/field_type.rs b/apollo-router/src/spec/field_type.rs index 9e40b47a86..b160aff214 100644 --- a/apollo-router/src/spec/field_type.rs +++ b/apollo-router/src/spec/field_type.rs @@ -1,5 +1,5 @@ -use apollo_compiler::ast; use apollo_compiler::schema; +use apollo_compiler::Name; use serde::de::Error as _; use serde::Deserialize; use serde::Serialize; @@ -160,7 +160,7 @@ fn validate_input_value( } impl FieldType { - pub(crate) fn new_named(name: ast::Name) -> Self { + pub(crate) fn new_named(name: Name) -> Self { Self(schema::Type::Named(name)) } diff --git a/apollo-router/src/spec/operation_limits.rs b/apollo-router/src/spec/operation_limits.rs index 3311f32634..4b325ad840 100644 --- a/apollo-router/src/spec/operation_limits.rs +++ b/apollo-router/src/spec/operation_limits.rs @@ -3,6 +3,7 @@ use std::collections::HashSet; use apollo_compiler::executable; use apollo_compiler::ExecutableDocument; +use apollo_compiler::Name; use serde::Deserialize; use serde::Serialize; @@ -124,7 +125,7 @@ enum Computation { /// Recursively measure the given selection set against each limit fn count<'a>( document: &'a executable::ExecutableDocument, - fragment_cache: &mut HashMap<&'a executable::Name, Computation>>, + fragment_cache: &mut HashMap<&'a Name, Computation>>, selection_set: &'a executable::SelectionSet, ) -> OperationLimits { let mut counts = OperationLimits { diff --git a/apollo-router/src/spec/query.rs b/apollo-router/src/spec/query.rs index b8d158cce4..147f1e6805 100644 --- a/apollo-router/src/spec/query.rs +++ b/apollo-router/src/spec/query.rs @@ -567,14 +567,8 @@ impl Query { let typename = input_object .get(TYPENAME) .and_then(|val| val.as_str()) - .and_then(|s| { - Some(apollo_compiler::ast::Type::Named( - apollo_compiler::ast::NamedType::new( - apollo_compiler::NodeStr::new(s), - ) - .ok()?, - )) - }); + .and_then(|s| apollo_compiler::ast::NamedType::new(s).ok()) + .map(apollo_compiler::ast::Type::Named); let current_type = if parameters .schema diff --git a/apollo-router/src/spec/query/change.rs b/apollo-router/src/spec/query/change.rs index eeb29efa37..bbc39b2232 100644 --- a/apollo-router/src/spec/query/change.rs +++ b/apollo-router/src/spec/query/change.rs @@ -6,14 +6,13 @@ use std::hash::Hasher; use apollo_compiler::ast; use apollo_compiler::ast::Argument; use apollo_compiler::ast::FieldDefinition; -use apollo_compiler::ast::Name; use apollo_compiler::executable; use apollo_compiler::schema; use apollo_compiler::schema::DirectiveList; use apollo_compiler::schema::ExtendedType; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use apollo_compiler::Node; -use apollo_compiler::NodeStr; use apollo_compiler::Parser; use sha2::Digest; use sha2::Sha256; @@ -38,7 +37,7 @@ pub(crate) struct QueryHashVisitor<'a> { // introspection query is hashed, it should take the whole schema into account schema_str: &'a str, hasher: Sha256, - fragments: HashMap<&'a ast::Name, &'a Node>, + fragments: HashMap<&'a Name, &'a Node>, hashed_types: HashSet, // name, field hashed_fields: HashSet<(String, String)>, @@ -319,7 +318,7 @@ impl<'a> QueryHashVisitor<'a> { .argument_by_name("requires") .and_then(|arg| arg.as_str()) { - if let Ok(parent_type) = Name::new(NodeStr::new(parent_type)) { + if let Ok(parent_type) = Name::new(parent_type) { let mut parser = Parser::new(); if let Ok(field_set) = parser.parse_field_set( diff --git a/apollo-router/src/spec/query/transform.rs b/apollo-router/src/spec/query/transform.rs index b71cb735a5..b307d9a2d8 100644 --- a/apollo-router/src/spec/query/transform.rs +++ b/apollo-router/src/spec/query/transform.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use apollo_compiler::ast; use apollo_compiler::schema::FieldLookupError; +use apollo_compiler::Name; use tower::BoxError; /// Transform a document with the given visitor. @@ -243,7 +244,7 @@ pub(crate) fn selection_set( pub(crate) fn collect_fragments( executable: &ast::Document, -) -> HashMap<&ast::Name, &ast::FragmentDefinition> { +) -> HashMap<&Name, &ast::FragmentDefinition> { executable .definitions .iter() diff --git a/apollo-router/src/spec/schema.rs b/apollo-router/src/spec/schema.rs index 6298c8d1f3..f07ed436eb 100644 --- a/apollo-router/src/spec/schema.rs +++ b/apollo-router/src/spec/schema.rs @@ -8,6 +8,7 @@ use std::time::Instant; use apollo_compiler::ast; use apollo_compiler::schema::Implementers; use apollo_compiler::validation::Valid; +use apollo_compiler::Name; use http::Uri; use semver::Version; use semver::VersionReq; @@ -26,7 +27,7 @@ pub(crate) struct Schema { pub(crate) raw_sdl: Arc, supergraph: Supergraph, subgraphs: HashMap, - pub(crate) implementers_map: HashMap, + pub(crate) implementers_map: HashMap, api_schema: Option, pub(crate) schema_id: Arc, } @@ -631,7 +632,6 @@ mod tests { assert_eq!( s, r#"The supergraph schema failed to produce a valid API schema: The following errors occurred: - - Input field `InputObject.privateField` is @inaccessible but is used in the default value of `@foo(someArg:)`, which is in the API schema."# ); } diff --git a/apollo-router/tests/common.rs b/apollo-router/tests/common.rs index 22e40d5385..32f4888304 100644 --- a/apollo-router/tests/common.rs +++ b/apollo-router/tests/common.rs @@ -34,6 +34,7 @@ use opentelemetry::sdk::trace::TracerProvider; use opentelemetry::sdk::Resource; use opentelemetry::testing::trace::NoopSpanExporter; use opentelemetry::trace::TraceContextExt; +use opentelemetry_api::trace::TraceId; use opentelemetry_api::trace::TracerProvider as OtherTracerProvider; use opentelemetry_api::Context; use opentelemetry_api::KeyValue; @@ -81,7 +82,6 @@ pub struct IntegrationTest { _subgraphs: wiremock::MockServer, telemetry: Telemetry, - // Don't remove these, there is a weak reference to the tracer provider from a tracer and if the provider is dropped then no export will happen. pub _tracer_provider_client: TracerProvider, pub _tracer_provider_subgraph: TracerProvider, subscriber_client: Dispatch, @@ -175,6 +175,7 @@ impl Telemetry { .with_span_processor( BatchSpanProcessor::builder( opentelemetry_datadog::new_pipeline() + .with_service_name(service_name) .build_exporter() .expect("datadog pipeline failed"), opentelemetry::runtime::Tokio, @@ -488,7 +489,7 @@ impl IntegrationTest { #[allow(dead_code)] pub fn execute_default_query( &self, - ) -> impl std::future::Future { + ) -> impl std::future::Future { self.execute_query_internal( &json!({"query":"query {topProducts{name}}","variables":{}}), None, @@ -499,28 +500,28 @@ impl IntegrationTest { pub fn execute_query( &self, query: &Value, - ) -> impl std::future::Future { + ) -> impl std::future::Future { self.execute_query_internal(query, None) } #[allow(dead_code)] pub fn execute_bad_query( &self, - ) -> impl std::future::Future { + ) -> impl std::future::Future { self.execute_query_internal(&json!({"garbage":{}}), None) } #[allow(dead_code)] pub fn execute_huge_query( &self, - ) -> impl std::future::Future { + ) -> impl std::future::Future { self.execute_query_internal(&json!({"query":"query {topProducts{name, name, name, name, name, name, name, name, name, name}}","variables":{}}), None) } #[allow(dead_code)] pub fn execute_bad_content_type( &self, - ) -> impl std::future::Future { + ) -> impl std::future::Future { self.execute_query_internal(&json!({"garbage":{}}), Some("garbage")) } @@ -528,7 +529,7 @@ impl IntegrationTest { &self, query: &Value, content_type: Option<&'static str>, - ) -> impl std::future::Future { + ) -> impl std::future::Future { assert!( self.router.is_some(), "router was not started, call `router.start().await; router.assert_started().await`" @@ -540,7 +541,7 @@ impl IntegrationTest { async move { let span = info_span!("client_request"); - let span_id = span.context().span().span_context().trace_id().to_string(); + let span_id = span.context().span().span_context().trace_id(); async move { let client = reqwest::Client::new(); @@ -577,7 +578,7 @@ impl IntegrationTest { pub fn execute_untraced_query( &self, query: &Value, - ) -> impl std::future::Future { + ) -> impl std::future::Future { assert!( self.router.is_some(), "router was not started, call `router.start().await; router.assert_started().await`" @@ -600,14 +601,16 @@ impl IntegrationTest { request.headers_mut().remove(ACCEPT); match client.execute(request).await { Ok(response) => ( - response - .headers() - .get("apollo-custom-trace-id") - .cloned() - .unwrap_or(HeaderValue::from_static("no-trace-id")) - .to_str() - .unwrap_or_default() - .to_string(), + TraceId::from_hex( + response + .headers() + .get("apollo-custom-trace-id") + .cloned() + .unwrap_or(HeaderValue::from_static("no-trace-id")) + .to_str() + .unwrap_or_default(), + ) + .unwrap_or(TraceId::INVALID), response, ), Err(err) => { diff --git a/apollo-router/tests/integration/batching.rs b/apollo-router/tests/integration/batching.rs index a9e8e3234d..09281ea677 100644 --- a/apollo-router/tests/integration/batching.rs +++ b/apollo-router/tests/integration/batching.rs @@ -244,28 +244,22 @@ async fn it_handles_short_timeouts() -> Result<(), BoxError> { if test_is_enabled() { assert_yaml_snapshot!(responses, @r###" - --- - - data: - entryA: - index: 0 - - errors: - - message: "HTTP fetch failed from 'b': request timed out" - path: [] - extensions: - code: SUBREQUEST_HTTP_ERROR - service: b - reason: request timed out - - data: - entryA: - index: 1 - - errors: - - message: "HTTP fetch failed from 'b': request timed out" - path: [] - extensions: - code: SUBREQUEST_HTTP_ERROR - service: b - reason: request timed out - "###); + --- + - data: + entryA: + index: 0 + - errors: + - message: Request timed out + extensions: + code: REQUEST_TIMEOUT + - data: + entryA: + index: 1 + - errors: + - message: Request timed out + extensions: + code: REQUEST_TIMEOUT + "###); } Ok(()) @@ -317,38 +311,29 @@ async fn it_handles_indefinite_timeouts() -> Result<(), BoxError> { let responses = [results_a, results_b].concat(); if test_is_enabled() { assert_yaml_snapshot!(responses, @r###" - --- - - data: - entryA: - index: 0 - - data: - entryA: - index: 1 - - data: - entryA: - index: 2 - - errors: - - message: "HTTP fetch failed from 'b': request timed out" - path: [] - extensions: - code: SUBREQUEST_HTTP_ERROR - service: b - reason: request timed out - - errors: - - message: "HTTP fetch failed from 'b': request timed out" - path: [] - extensions: - code: SUBREQUEST_HTTP_ERROR - service: b - reason: request timed out - - errors: - - message: "HTTP fetch failed from 'b': request timed out" - path: [] - extensions: - code: SUBREQUEST_HTTP_ERROR - service: b - reason: request timed out - "###); + --- + - data: + entryA: + index: 0 + - data: + entryA: + index: 1 + - data: + entryA: + index: 2 + - errors: + - message: Request timed out + extensions: + code: REQUEST_TIMEOUT + - errors: + - message: Request timed out + extensions: + code: REQUEST_TIMEOUT + - errors: + - message: Request timed out + extensions: + code: REQUEST_TIMEOUT + "###); } Ok(()) diff --git a/apollo-router/tests/integration/mod.rs b/apollo-router/tests/integration/mod.rs index f3432b12c4..55c95c0d06 100644 --- a/apollo-router/tests/integration/mod.rs +++ b/apollo-router/tests/integration/mod.rs @@ -8,6 +8,8 @@ mod docs; mod file_upload; mod lifecycle; mod operation_limits; +mod subgraph_response; +mod traffic_shaping; #[cfg(any(not(feature = "ci"), all(target_arch = "x86_64", target_os = "linux")))] mod redis; diff --git a/apollo-router/tests/integration/redis.rs b/apollo-router/tests/integration/redis.rs index 5480b2c23a..9df887825f 100644 --- a/apollo-router/tests/integration/redis.rs +++ b/apollo-router/tests/integration/redis.rs @@ -416,7 +416,7 @@ async fn entity_cache() -> Result<(), BoxError> { insta::assert_json_snapshot!(response); let s:String = client - .get("subgraph:products:Query:0df945dc1bc08f7fc02e8905b4c72aa9112f29bb7a214e4a38d199f0aa635b48:d9d84a3c7ffc27b0190a671212f3740e5b8478e84e23825830e97822e25cf05c") + .get("subgraph:products:type:Query:hash:0df945dc1bc08f7fc02e8905b4c72aa9112f29bb7a214e4a38d199f0aa635b48:data:d9d84a3c7ffc27b0190a671212f3740e5b8478e84e23825830e97822e25cf05c") .await .unwrap(); let v: Value = serde_json::from_str(&s).unwrap(); @@ -743,7 +743,7 @@ async fn entity_cache_authorization() -> Result<(), BoxError> { insta::assert_json_snapshot!(response); let s:String = client - .get("subgraph:products:Query:0df945dc1bc08f7fc02e8905b4c72aa9112f29bb7a214e4a38d199f0aa635b48:d9d84a3c7ffc27b0190a671212f3740e5b8478e84e23825830e97822e25cf05c") + .get("subgraph:products:type:Query:hash:0df945dc1bc08f7fc02e8905b4c72aa9112f29bb7a214e4a38d199f0aa635b48:data:d9d84a3c7ffc27b0190a671212f3740e5b8478e84e23825830e97822e25cf05c") .await .unwrap(); let v: Value = serde_json::from_str(&s).unwrap(); diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit-2.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit-2.snap new file mode 100644 index 0000000000..4f2f8fe321 --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit-2.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response +--- +"{\"errors\":[{\"message\":\"Your request has been rate limited\",\"extensions\":{\"code\":\"REQUEST_RATE_LIMITED\"}}]}" diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit.snap new file mode 100644 index 0000000000..ba9702ddeb --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_rate_limit.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response +--- +"{\"data\":{\"topProducts\":[{\"name\":\"Table\"},{\"name\":\"Couch\"},{\"name\":\"Chair\"}]}}" diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_timeout.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_timeout.snap new file mode 100644 index 0000000000..d09e20a31d --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__router_timeout.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response.text().await? +--- +"{\"errors\":[{\"message\":\"Request timed out\",\"extensions\":{\"code\":\"REQUEST_TIMEOUT\"}}]}" diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit-2.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit-2.snap new file mode 100644 index 0000000000..584b125252 --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit-2.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response +--- +"{\"data\":null,\"errors\":[{\"message\":\"Your request has been rate limited\",\"extensions\":{\"code\":\"REQUEST_RATE_LIMITED\"}}]}" diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit.snap new file mode 100644 index 0000000000..72c6aac169 --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_rate_limit.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response.text().await? +--- +"{\"data\":{\"topProducts\":[{\"name\":\"Table\"},{\"name\":\"Couch\"},{\"name\":\"Chair\"}]}}" diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_timeout.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_timeout.snap new file mode 100644 index 0000000000..671e207784 --- /dev/null +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__traffic_shaping__subgraph_timeout.snap @@ -0,0 +1,5 @@ +--- +source: apollo-router/tests/integration/traffic_shaping.rs +expression: response.text().await? +--- +"{\"data\":null,\"errors\":[{\"message\":\"Request timed out\",\"extensions\":{\"code\":\"REQUEST_TIMEOUT\"}}]}" diff --git a/apollo-router/tests/integration/subgraph_response.rs b/apollo-router/tests/integration/subgraph_response.rs new file mode 100644 index 0000000000..5e6e831d3c --- /dev/null +++ b/apollo-router/tests/integration/subgraph_response.rs @@ -0,0 +1,214 @@ +use serde_json::json; +use tower::BoxError; +use wiremock::ResponseTemplate; + +use crate::integration::IntegrationTest; + +const CONFIG: &str = r#" +include_subgraph_errors: + all: true +"#; + +#[tokio::test(flavor = "multi_thread")] +async fn test_valid_error_locations() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(CONFIG) + .responder(ResponseTemplate::new(200).set_body_json(json!({ + "data": { "topProducts": null }, + "errors": [{ + "message": "Some error on subgraph", + "locations": [ + { "line": 1, "column": 2 }, + { "line": 3, "column": 4 }, + ], + "path": ["topProducts"] + }] + }))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ "query": "{ topProducts { name } }" })) + .await; + assert_eq!(response.status(), 200); + assert_eq!( + serde_json::from_str::(&response.text().await?)?, + json!({ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "locations": [ + { "line": 1, "column": 2 }, + { "line": 3, "column": 4 }, + ], + "path":["topProducts"] + }] + }) + ); + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_empty_error_locations() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(CONFIG) + .responder(ResponseTemplate::new(200).set_body_json(json!({ + "data": { "topProducts": null }, + "errors": [{ + "message": "Some error on subgraph", + "locations": [], + "path": ["topProducts"] + }] + }))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ "query": "{ topProducts { name } }" })) + .await; + assert_eq!(response.status(), 200); + assert_eq!( + serde_json::from_str::(&response.text().await?)?, + json!({ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "path":["topProducts"] + }] + }) + ); + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_invalid_error_locations() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(CONFIG) + .responder(ResponseTemplate::new(200).set_body_json(json!({ + "data": { "topProducts": null }, + "errors": [{ + "message": "Some error on subgraph", + "locations": [{ "line": true }], + "path": ["topProducts"] + }] + }))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ "query": "{ topProducts { name } }" })) + .await; + assert_eq!(response.status(), 200); + assert_eq!( + serde_json::from_str::(&response.text().await?)?, + json!({ + "data": null, + "errors": [{ + "message":"service 'products' response was malformed: invalid `locations` within error: invalid type: boolean `true`, expected u32", + "extensions": { + "service": "products", + "reason": "invalid `locations` within error: invalid type: boolean `true`, expected u32", + "code": "SUBREQUEST_MALFORMED_RESPONSE", + } + }] + }) + ); + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_invalid_error_locations_with_single_negative_one_location() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(CONFIG) + .responder(ResponseTemplate::new(200).set_body_json(json!({ + "data": { "topProducts": null }, + "errors": [{ + "message": "Some error on subgraph", + "locations": [{ "line": -1, "column": -1 }], + "path": ["topProducts"] + }] + }))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ "query": "{ topProducts { name } }" })) + .await; + assert_eq!(response.status(), 200); + assert_eq!( + serde_json::from_str::(&response.text().await?)?, + json!({ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "path":["topProducts"] + }] + }) + ); + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_invalid_error_locations_contains_negative_one_location() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(CONFIG) + .responder(ResponseTemplate::new(200).set_body_json(json!({ + "data": { "topProducts": null }, + "errors": [{ + "message": "Some error on subgraph", + "locations": [ + { "line": 1, "column": 2 }, + { "line": -1, "column": -1 }, + { "line": 3, "column": 4 }, + ], + "path": ["topProducts"] + }] + }))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ "query": "{ topProducts { name } }" })) + .await; + assert_eq!(response.status(), 200); + assert_eq!( + serde_json::from_str::(&response.text().await?)?, + json!({ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "locations": [ + { "line": 1, "column": 2 }, + { "line": 3, "column": 4 }, + ], + "path":["topProducts"] + }] + }) + ); + + router.graceful_shutdown().await; + Ok(()) +} diff --git a/apollo-router/tests/integration/telemetry/datadog.rs b/apollo-router/tests/integration/telemetry/datadog.rs new file mode 100644 index 0000000000..7ae6bdc5f5 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/datadog.rs @@ -0,0 +1,436 @@ +extern crate core; + +use std::collections::HashSet; +use std::time::Duration; + +use anyhow::anyhow; +use opentelemetry_api::trace::TraceId; +use serde_json::json; +use serde_json::Value; +use tower::BoxError; + +use crate::integration::common::graph_os_enabled; +use crate::integration::common::Telemetry; +use crate::integration::common::ValueExt; +use crate::integration::IntegrationTest; + +#[tokio::test(flavor = "multi_thread")] +async fn test_default_span_names() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!( + "fixtures/datadog_default_span_names.router.yaml" + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert_eq!( + result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .to_str() + .unwrap(), + id.to_datadog() + ); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "query_planning", + "client_request", + "subgraph_request", + "subgraph", + "fetch", + "supergraph", + "execution", + "query ExampleQuery", + "subgraph server", + "http_request", + "parse_query", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_override_span_names() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!( + "fixtures/datadog_override_span_names.router.yaml" + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert_eq!( + result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .to_str() + .unwrap(), + id.to_datadog() + ); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "query_planning", + "client_request", + "subgraph_request", + "subgraph", + "fetch", + "supergraph", + "execution", + "overridden", + "subgraph server", + "http_request", + "parse_query", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_override_span_names_late() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!( + "fixtures/datadog_override_span_names_late.router.yaml" + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert_eq!( + result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .to_str() + .unwrap(), + id.to_datadog() + ); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "query_planning", + "client_request", + "subgraph_request", + "subgraph", + "fetch", + "supergraph", + "execution", + "ExampleQuery", + "subgraph server", + "http_request", + "parse_query", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_basic() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!("fixtures/datadog.router.yaml")) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert_eq!( + result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .to_str() + .unwrap(), + id.to_datadog() + ); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "query_planning", + "client_request", + "ExampleQuery__products__0", + "products", + "fetch", + "/", + "execution", + "ExampleQuery", + "subgraph server", + "parse_query", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_resource_mapping_default() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!( + "fixtures/datadog_resource_mapping_default.router.yaml" + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert!(!result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .is_empty()); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "parse_query", + "ExampleQuery", + "client_request", + "execution", + "query_planning", + "products", + "fetch", + "subgraph server", + "ExampleQuery__products__0", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_resource_mapping_override() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Datadog) + .config(include_str!( + "fixtures/datadog_resource_mapping_override.router.yaml" + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let query = json!({"query":"query ExampleQuery {topProducts{name}}","variables":{}}); + let (id, result) = router.execute_query(&query).await; + assert!(!result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .is_empty()); + validate_trace( + id, + &query, + Some("ExampleQuery"), + &["client", "router", "subgraph"], + false, + &[ + "parse_query", + "ExampleQuery", + "client_request", + "execution", + "query_planning", + "products", + "fetch", + "subgraph server", + "overridden", + "ExampleQuery__products__0", + ], + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + +async fn validate_trace( + id: TraceId, + query: &Value, + operation_name: Option<&str>, + services: &[&'static str], + custom_span_instrumentation: bool, + expected_span_names: &[&'static str], +) -> Result<(), BoxError> { + let datadog_id = id.to_datadog(); + let url = format!("http://localhost:8126/test/traces?trace_ids={datadog_id}"); + for _ in 0..10 { + if find_valid_trace( + &url, + query, + operation_name, + services, + custom_span_instrumentation, + expected_span_names, + ) + .await + .is_ok() + { + return Ok(()); + } + tokio::time::sleep(Duration::from_millis(100)).await; + } + find_valid_trace( + &url, + query, + operation_name, + services, + custom_span_instrumentation, + expected_span_names, + ) + .await?; + Ok(()) +} + +async fn find_valid_trace( + url: &str, + _query: &Value, + operation_name: Option<&str>, + services: &[&'static str], + _custom_span_instrumentation: bool, + expected_span_names: &[&'static str], +) -> Result<(), BoxError> { + // A valid trace has: + // * All three services + // * The correct spans + // * All spans are parented + // * Required attributes of 'router' span has been set + + // For now just validate service name. + let trace: Value = reqwest::get(url) + .await + .map_err(|e| anyhow!("failed to contact datadog; {}", e))? + .json() + .await?; + tracing::debug!("{}", serde_json::to_string_pretty(&trace)?); + verify_trace_participants(&trace, services)?; + verify_spans_present(&trace, operation_name, services, expected_span_names)?; + Ok(()) +} + +fn verify_trace_participants(trace: &Value, services: &[&'static str]) -> Result<(), BoxError> { + let actual_services: HashSet = trace + .select_path("$..service")? + .into_iter() + .filter_map(|service| service.as_string()) + .collect(); + tracing::debug!("found services {:?}", actual_services); + + let expected_services = services + .iter() + .map(|s| s.to_string()) + .collect::>(); + if actual_services != expected_services { + return Err(BoxError::from(format!( + "incomplete traces, got {actual_services:?} expected {expected_services:?}" + ))); + } + Ok(()) +} + +fn verify_spans_present( + trace: &Value, + _operation_name: Option<&str>, + services: &[&'static str], + expected_span_names: &[&'static str], +) -> Result<(), BoxError> { + let operation_names: HashSet = trace + .select_path("$..resource")? + .into_iter() + .filter_map(|span_name| span_name.as_string()) + .collect(); + let mut expected_span_names: HashSet = + expected_span_names.iter().map(|s| s.to_string()).collect(); + if services.contains(&"client") { + expected_span_names.insert("client_request".into()); + } + tracing::debug!("found spans {:?}", operation_names); + let missing_operation_names: Vec<_> = expected_span_names + .iter() + .filter(|o| !operation_names.contains(*o)) + .collect(); + if !missing_operation_names.is_empty() { + return Err(BoxError::from(format!( + "spans did not match, got {operation_names:?}, missing {missing_operation_names:?}" + ))); + } + Ok(()) +} + +pub(crate) trait DatadogId { + fn to_datadog(&self) -> String; +} +impl DatadogId for TraceId { + fn to_datadog(&self) -> String { + let bytes = &self.to_bytes()[std::mem::size_of::()..std::mem::size_of::()]; + u64::from_be_bytes(bytes.try_into().unwrap()).to_string() + } +} diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog.router.yaml index 8ca0fdfa75..18294a77ff 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/datadog.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog.router.yaml @@ -1,8 +1,20 @@ telemetry: exporters: tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog common: service_name: router datadog: enabled: true - endpoint: default + batch_processor: + scheduled_delay: 100ms + instrumentation: + spans: + mode: spec_compliant + supergraph: + attributes: + graphql.operation.name: true + diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog_default_span_names.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog_default_span_names.router.yaml new file mode 100644 index 0000000000..67c2c070e6 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog_default_span_names.router.yaml @@ -0,0 +1,20 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog + common: + service_name: router + datadog: + enabled: true + batch_processor: + scheduled_delay: 100ms + fixed_span_names: false + enable_span_mapping: false + instrumentation: + spans: + mode: spec_compliant + + diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names.router.yaml new file mode 100644 index 0000000000..7d5e1ff2e1 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names.router.yaml @@ -0,0 +1,23 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog + common: + service_name: router + datadog: + enabled: true + # Span mapping will always override the span name as far as the test agent is concerned + enable_span_mapping: false + batch_processor: + scheduled_delay: 100ms + instrumentation: + spans: + mode: spec_compliant + router: + attributes: + otel.name: overridden + + diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names_late.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names_late.router.yaml new file mode 100644 index 0000000000..dda383a784 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog_override_span_names_late.router.yaml @@ -0,0 +1,24 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog + common: + service_name: router + datadog: + enabled: true + # Span mapping will always override the span name as far as the test agent is concerned + enable_span_mapping: false + batch_processor: + scheduled_delay: 100ms + instrumentation: + spans: + mode: spec_compliant + router: + attributes: + otel.name: + operation_name: string + + diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_default.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_default.router.yaml new file mode 100644 index 0000000000..396a60fa5d --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_default.router.yaml @@ -0,0 +1,17 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog + common: + service_name: router + datadog: + enabled: true + enable_span_mapping: true + batch_processor: + scheduled_delay: 100ms + + + diff --git a/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_override.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_override.router.yaml new file mode 100644 index 0000000000..a01c44fc61 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/datadog_resource_mapping_override.router.yaml @@ -0,0 +1,32 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + format: datadog + common: + service_name: router + datadog: + enabled: true + enable_span_mapping: true + batch_processor: + scheduled_delay: 100ms + resource_mapping: + router: graphql.operation.name + supergraph: override.name + + instrumentation: + spans: + mode: spec_compliant + router: + attributes: + graphql.operation.name: + operation_name: string + supergraph: + attributes: + override.name: overridden + + + + diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml index 19efbc0c47..6bd0fad86c 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml @@ -13,8 +13,8 @@ telemetry: enabled: true batch_processor: scheduled_delay: 100ms - agent: - endpoint: default + collector: + endpoint: http://127.0.0.1:14268/api/traces logging: experimental_when_header: - name: apollo-router-log-request diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml index e4c3ee0d57..bb377026d7 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml @@ -13,8 +13,8 @@ telemetry: enabled: true batch_processor: scheduled_delay: 100ms - agent: - endpoint: default + collector: + endpoint: http://127.0.0.1:14268/api/traces logging: experimental_when_header: - name: apollo-router-log-request diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml index 2259d9ab73..ffa63c772d 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml @@ -16,8 +16,8 @@ telemetry: enabled: true batch_processor: scheduled_delay: 100ms - agent: - endpoint: default + collector: + endpoint: http://127.0.0.1:14268/api/traces logging: experimental_when_header: - name: apollo-router-log-request diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml index e764dfef5b..11d6dad4ba 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml @@ -13,8 +13,8 @@ telemetry: enabled: true batch_processor: scheduled_delay: 100ms - agent: - endpoint: default + collector: + endpoint: http://127.0.0.1:14268/api/traces logging: experimental_when_header: - name: apollo-router-log-request @@ -25,7 +25,7 @@ telemetry: - name: custom-header match: ^foo.* headers: true - + override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml new file mode 100644 index 0000000000..e7c92e3599 --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml @@ -0,0 +1,33 @@ +telemetry: + exporters: + tracing: + experimental_response_trace_id: + enabled: true + format: decimal + header_name: apollo-custom-trace-id + propagation: + jaeger: true + common: + service_name: router + sampler: always_on + jaeger: + enabled: true + batch_processor: + scheduled_delay: 100ms + collector: + endpoint: http://127.0.0.1:14268/api/traces + logging: + experimental_when_header: + - name: apollo-router-log-request + value: test + headers: true # default: false + body: true # default: false + # log request for all requests coming from Iphones + - name: custom-header + match: ^foo.* + headers: true + +override_subgraph_url: + products: http://localhost:4005 +include_subgraph_errors: + all: true diff --git a/apollo-router/tests/integration/telemetry/jaeger.rs b/apollo-router/tests/integration/telemetry/jaeger.rs index bb6494f74b..6003efea04 100644 --- a/apollo-router/tests/integration/telemetry/jaeger.rs +++ b/apollo-router/tests/integration/telemetry/jaeger.rs @@ -1,10 +1,10 @@ -#![cfg(all(target_os = "linux", target_arch = "x86_64", test))] extern crate core; use std::collections::HashSet; use std::time::Duration; use anyhow::anyhow; +use opentelemetry_api::trace::TraceId; use serde_json::json; use serde_json::Value; use tower::BoxError; @@ -295,8 +295,43 @@ async fn test_span_customization() -> Result<(), BoxError> { Ok(()) } +#[tokio::test(flavor = "multi_thread")] +async fn test_decimal_trace_id() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Jaeger) + .config(include_str!("fixtures/jaeger_decimal_trace_id.router.yaml")) + .build() + .await; + + router.start().await; + router.assert_started().await; + let query = json!({"query":"query ExampleQuery1 {topProducts{name}}","variables":{}}); + + let (id, result) = router.execute_query(&query).await; + let id_from_router: u128 = result + .headers() + .get("apollo-custom-trace-id") + .unwrap() + .to_str() + .unwrap_or_default() + .parse() + .expect("expected decimal trace ID"); + assert_eq!(format!("{:x}", id_from_router), id.to_string()); + + validate_trace( + id, + &query, + Some("ExampleQuery1"), + &["client", "router", "subgraph"], + false, + ) + .await?; + router.graceful_shutdown().await; + Ok(()) +} + async fn validate_trace( - id: String, + id: TraceId, query: &Value, operation_name: Option<&str>, services: &[&'static str], @@ -306,6 +341,8 @@ async fn validate_trace( .append_pair("service", services.first().expect("expected root service")) .finish(); + let id = id.to_string(); + println!("trace id: {}", id); let url = format!("http://localhost:16686/api/traces/{id}?{params}"); for _ in 0..10 { if find_valid_trace( @@ -320,7 +357,7 @@ async fn validate_trace( { return Ok(()); } - tokio::time::sleep(Duration::from_millis(100)).await; + tokio::time::sleep(Duration::from_millis(1000)).await; } find_valid_trace( &url, diff --git a/apollo-router/tests/integration/telemetry/mod.rs b/apollo-router/tests/integration/telemetry/mod.rs index 8ef8d071f2..0a31187c58 100644 --- a/apollo-router/tests/integration/telemetry/mod.rs +++ b/apollo-router/tests/integration/telemetry/mod.rs @@ -1,5 +1,9 @@ +#[cfg(any(not(feature = "ci"), all(target_arch = "x86_64", target_os = "linux")))] +mod datadog; +#[cfg(any(not(feature = "ci"), all(target_arch = "x86_64", target_os = "linux")))] mod jaeger; mod logging; mod metrics; mod otlp; +#[cfg(any(not(feature = "ci"), all(target_arch = "x86_64", target_os = "linux")))] mod zipkin; diff --git a/apollo-router/tests/integration/telemetry/otlp.rs b/apollo-router/tests/integration/telemetry/otlp.rs index 87d32dae95..48d5f08e37 100644 --- a/apollo-router/tests/integration/telemetry/otlp.rs +++ b/apollo-router/tests/integration/telemetry/otlp.rs @@ -1,4 +1,3 @@ -#![cfg(all(target_os = "linux", target_arch = "x86_64", test))] extern crate core; use std::collections::HashSet; @@ -6,6 +5,7 @@ use std::time::Duration; use anyhow::anyhow; use itertools::Itertools; +use opentelemetry_api::trace::TraceId; use opentelemetry_proto::tonic::collector::metrics::v1::ExportMetricsServiceResponse; use opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceResponse; use prost::Message; @@ -83,7 +83,7 @@ async fn test_basic() -> Result<(), BoxError> { async fn validate_telemetry( mock_server: &MockServer, - _id: String, + _id: TraceId, query: &Value, operation_name: Option<&str>, services: &[&'static str], diff --git a/apollo-router/tests/integration/telemetry/zipkin.rs b/apollo-router/tests/integration/telemetry/zipkin.rs index 426b4f93b3..792a637a88 100644 --- a/apollo-router/tests/integration/telemetry/zipkin.rs +++ b/apollo-router/tests/integration/telemetry/zipkin.rs @@ -1,10 +1,10 @@ -#![cfg(all(target_os = "linux", target_arch = "x86_64", test))] extern crate core; use std::collections::HashSet; use std::time::Duration; use anyhow::anyhow; +use opentelemetry_api::trace::TraceId; use serde_json::json; use serde_json::Value; use tower::BoxError; @@ -48,7 +48,7 @@ async fn test_basic() -> Result<(), BoxError> { } async fn validate_trace( - id: String, + id: TraceId, query: &Value, operation_name: Option<&str>, services: &[&'static str], diff --git a/apollo-router/tests/integration/traffic_shaping.rs b/apollo-router/tests/integration/traffic_shaping.rs new file mode 100644 index 0000000000..feb9a7e725 --- /dev/null +++ b/apollo-router/tests/integration/traffic_shaping.rs @@ -0,0 +1,235 @@ +use std::time::Duration; + +use insta::assert_yaml_snapshot; +use serde_json::json; +use tower::BoxError; +use wiremock::ResponseTemplate; + +use crate::integration::common::graph_os_enabled; +use crate::integration::common::Telemetry; +use crate::integration::IntegrationTest; + +const PROMETHEUS_CONFIG: &str = r#" + telemetry: + exporters: + metrics: + prometheus: + listen: 127.0.0.1:4000 + enabled: true + path: /metrics +"#; + +#[tokio::test(flavor = "multi_thread")] +async fn test_router_timeout() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(format!( + r#" + {PROMETHEUS_CONFIG} + traffic_shaping: + router: + timeout: 1ns + "# + )) + .responder(ResponseTemplate::new(500).set_delay(Duration::from_millis(20))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router.execute_default_query().await; + assert_eq!(response.status(), 504); + let response = response.text().await?; + assert!(response.contains("REQUEST_TIMEOUT")); + assert_yaml_snapshot!(response); + + router.assert_metrics_contains(r#"apollo_router_graphql_error_total{code="REQUEST_TIMEOUT",otel_scope_name="apollo/router"} 1"#, None).await; + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_subgraph_timeout() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(format!( + r#" + {PROMETHEUS_CONFIG} + include_subgraph_errors: + all: true + traffic_shaping: + all: + timeout: 1ns + "# + )) + .responder(ResponseTemplate::new(500).set_delay(Duration::from_millis(20))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router.execute_default_query().await; + assert_eq!(response.status(), 200); + let response = response.text().await?; + assert!(response.contains("REQUEST_TIMEOUT")); + assert_yaml_snapshot!(response); + + router.assert_metrics_contains(r#"apollo_router_graphql_error_total{code="REQUEST_TIMEOUT",otel_scope_name="apollo/router"} 1"#, None).await; + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_router_timeout_operation_name_in_tracing() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config( + r#" + traffic_shaping: + router: + timeout: 1ns + "#, + ) + .responder(ResponseTemplate::new(500).set_delay(Duration::from_millis(20))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router + .execute_query(&json!({ + "query": "query UniqueName { topProducts { name } }" + })) + .await; + assert_eq!(response.status(), 504); + let response = response.text().await?; + assert!(response.contains("REQUEST_TIMEOUT")); + + router + .assert_log_contains(r#""otel.name":"query UniqueName""#) + .await; + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_router_timeout_custom_metric() -> Result<(), BoxError> { + if !graph_os_enabled() { + return Ok(()); + } + + let mut router = IntegrationTest::builder() + .telemetry(Telemetry::Jaeger) + .config(format!( + r#" + {PROMETHEUS_CONFIG} + instrumentation: + instruments: + router: + http.server.request.duration: + attributes: + # Standard attributes + http.response.status_code: true + graphql.error: + on_graphql_error: true + traffic_shaping: + router: + timeout: 1ns + "# + )) + .responder(ResponseTemplate::new(500).set_delay(Duration::from_millis(20))) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_trace_id, response) = router.execute_default_query().await; + assert_eq!(response.status(), 504); + let response = response.text().await?; + assert!(response.contains("REQUEST_TIMEOUT")); + + router.assert_metrics_contains(r#"http_server_request_duration_seconds_count{error_type="Gateway Timeout",graphql_error="true",http_request_method="POST",http_response_status_code="504""#, None).await; + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_router_rate_limit() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(format!( + r#" + {PROMETHEUS_CONFIG} + traffic_shaping: + router: + global_rate_limit: + capacity: 1 + interval: 10min + "# + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_, response) = router.execute_default_query().await; + assert_eq!(response.status(), 200); + let response = response.text().await?; + assert!(!response.contains("REQUEST_RATE_LIMITED")); + assert_yaml_snapshot!(response); + + let (_, response) = router.execute_default_query().await; + assert_eq!(response.status(), 429); + let response = response.text().await?; + assert!(response.contains("REQUEST_RATE_LIMITED")); + assert_yaml_snapshot!(response); + + router.assert_metrics_contains(r#"apollo_router_graphql_error_total{code="REQUEST_RATE_LIMITED",otel_scope_name="apollo/router"} 1"#, None).await; + + router.graceful_shutdown().await; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_subgraph_rate_limit() -> Result<(), BoxError> { + let mut router = IntegrationTest::builder() + .config(format!( + r#" + {PROMETHEUS_CONFIG} + include_subgraph_errors: + all: true + traffic_shaping: + all: + global_rate_limit: + capacity: 1 + interval: 10min + "#, + )) + .build() + .await; + + router.start().await; + router.assert_started().await; + + let (_, response) = router.execute_default_query().await; + assert_eq!(response.status(), 200); + let response = response.text().await?; + assert!(!response.contains("REQUEST_RATE_LIMITED")); + assert_yaml_snapshot!(response); + + let (_, response) = router.execute_default_query().await; + assert_eq!(response.status(), 200); + let response = response.text().await?; + assert!(response.contains("REQUEST_RATE_LIMITED")); + assert_yaml_snapshot!(response); + + router.assert_metrics_contains(r#"apollo_router_graphql_error_total{code="REQUEST_RATE_LIMITED",otel_scope_name="apollo/router"} 1"#, None).await; + + router.graceful_shutdown().await; + Ok(()) +} diff --git a/apollo-router/tests/integration/validation.rs b/apollo-router/tests/integration/validation.rs index 962df9c49b..b5b0f6682d 100644 --- a/apollo-router/tests/integration/validation.rs +++ b/apollo-router/tests/integration/validation.rs @@ -128,7 +128,7 @@ async fn test_validation_error() { } }, { - "message": "Field \"topProducts\" of type \"Product\" must have a selection of subfields. Did you mean \"topProducts { ... }\"?", + "message": "Field \"topProducts\" of type \"Query\" must have a selection of subfields. Did you mean \"topProducts { ... }\"?", "locations": [ { "line": 1, diff --git a/apollo-router/tests/samples/basic/query1/plan.json b/apollo-router/tests/samples/basic/query1/plan.json index 7153b2dbf6..d8fe2c400d 100644 --- a/apollo-router/tests/samples/basic/query1/plan.json +++ b/apollo-router/tests/samples/basic/query1/plan.json @@ -8,12 +8,20 @@ "accounts": { "requests": [ { - "request": {"query":"{me{name}}"}, - "response": {"data": { "me": { "name": "test" } } } + "request": { + "body": {"query":"{me{name}}"} + }, + "response": { + "body": {"data": { "me": { "name": "test" } } } + } }, { - "request": {"query":"{me{nom:name}}"}, - "response": {"data": { "me": { "nom": "test" } } } + "request": { + "body": {"query":"{me{nom:name}}"} + }, + "response": { + "body": {"data": { "me": { "nom": "test" } } } + } } ] } diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/README.md b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/README.md new file mode 100644 index 0000000000..01ef2996bd --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/README.md @@ -0,0 +1,9 @@ +# Entity cache invalidation + +This tests subgraph response extension based cache invalidation. This is the expected process: +- a query is sent to the "accounts" subgraph and cached +- we reload the subgraph with a mock mutation where the response has an extension to invalidate all data from the "accounts" subgraph +- we do the same query, we should get the same result as the first time (getting data from the cache instead of the subgraph) +- we do the mutation +- we reload the subgraph with a mock of the same query as precedently, but returning a different result +- the query is sent again by the client, we should get the new result now \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/configuration.yaml b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/configuration.yaml new file mode 100644 index 0000000000..b297fee443 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/configuration.yaml @@ -0,0 +1,17 @@ +override_subgraph_url: + products: http://localhost:4005 +include_subgraph_errors: + all: true + +preview_entity_cache: + enabled: true + redis: + urls: + ["redis://localhost:6379",] + subgraph: + all: + enabled: true + subgraphs: + reviews: + ttl: 120s + enabled: true \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/plan.json b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/plan.json new file mode 100644 index 0000000000..f6996f21b8 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/plan.json @@ -0,0 +1,137 @@ +{ + "enterprise": true, + "redis": true, + "actions": [ + { + "type": "Start", + "schema_path": "./supergraph.graphql", + "configuration_path": "./configuration.yaml", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"query InvalidationSubgraphType__accounts__0{me{name id}}","operationName":"InvalidationSubgraphType__accounts__0"} + }, + "response": { + "headers": { + "Cache-Control": "public, max-age=10", + "Content-Type": "application/json" + }, + "body": {"data": { "me": { "name": "invalidation-subgraph-type", "id": "1" } } } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "query InvalidationSubgraphType { me { name id } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph-type", + "id":"1" + } + } + } + }, + { + "type": "ReloadSubgraphs", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"mutation{updateMyAccount{name}}"} + }, + "response": { + "headers": { + "Content-Type": "application/json" + }, + "body": { + "data": { "updateMyAccount": { "name": "invalidation-subgraph-type2" } }, + "extensions": { + "invalidation": [{ + "kind": "type", + "subgraph": "accounts", + "type": "Query" + }] + } + } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "query InvalidationSubgraphType { me { name id } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph-type", + "id":"1" + } + } + } + }, + { + "type": "Request", + "request": { + "query": "mutation { updateMyAccount { name } }" + }, + "expected_response": { + "data":{ + "updateMyAccount":{ + "name":"invalidation-subgraph-type2" + } + } + } + }, + { + "type": "ReloadSubgraphs", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"query InvalidationSubgraphType__accounts__0{me{name id}}", "operationName":"InvalidationSubgraphType__accounts__0"} + }, + "response": { + "headers": { + "Cache-Control": "public, max-age=10", + "Content-Type": "application/json" + }, + "body": {"data": { "me": { "name": "invalidation-subgraph-type2", "id" : "1" } } } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "query InvalidationSubgraphType { me { name id } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph-type2", + "id":"1" + } + } + } + }, + { + "type": "Stop" + } + ] +} diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/supergraph.graphql b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/supergraph.graphql new file mode 100644 index 0000000000..1196414b6f --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph-type/supergraph.graphql @@ -0,0 +1,91 @@ + +schema + @core(feature: "https://specs.apollo.dev/core/v0.2"), + @core(feature: "https://specs.apollo.dev/join/v0.1", for: EXECUTION) + @core(feature: "https://specs.apollo.dev/inaccessible/v0.1", for: SECURITY) +{ + query: Query + mutation: Mutation +} + +directive @core(as: String, feature: String!, for: core__Purpose) repeatable on SCHEMA + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet) on FIELD_DEFINITION + +directive @join__type(graph: join__Graph!, key: join__FieldSet) repeatable on OBJECT | INTERFACE + +directive @join__owner(graph: join__Graph!) on OBJECT | INTERFACE + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | INTERFACE | OBJECT | UNION + +directive @inaccessible on OBJECT | FIELD_DEFINITION | INTERFACE | UNION + +enum core__Purpose { + """ + `EXECUTION` features provide metadata necessary to for operation execution. + """ + EXECUTION + + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY +} + +scalar join__FieldSet + +enum join__Graph { + ACCOUNTS @join__graph(name: "accounts", url: "https://accounts.demo.starstuff.dev") + INVENTORY @join__graph(name: "inventory", url: "https://inventory.demo.starstuff.dev") + PRODUCTS @join__graph(name: "products", url: "https://products.demo.starstuff.dev") + REVIEWS @join__graph(name: "reviews", url: "https://reviews.demo.starstuff.dev") +} +type Mutation { + updateMyAccount: User @join__field(graph: ACCOUNTS) + createProduct(name: String, upc: ID!): Product @join__field(graph: PRODUCTS) + createReview(body: String, id: ID!, upc: ID!): Review @join__field(graph: REVIEWS) +} + +type Product + @join__owner(graph: PRODUCTS) + @join__type(graph: PRODUCTS, key: "upc") + @join__type(graph: INVENTORY, key: "upc") + @join__type(graph: REVIEWS, key: "upc") +{ + inStock: Boolean @join__field(graph: INVENTORY) @tag(name: "private") @inaccessible + name: String @join__field(graph: PRODUCTS) + price: Int @join__field(graph: PRODUCTS) + reviews: [Review] @join__field(graph: REVIEWS) + reviewsForAuthor(authorID: ID!): [Review] @join__field(graph: REVIEWS) + shippingEstimate: Int @join__field(graph: INVENTORY, requires: "price weight") + upc: String! @join__field(graph: PRODUCTS) + weight: Int @join__field(graph: PRODUCTS) +} + +type Query { + me: User @join__field(graph: ACCOUNTS) + topProducts(first: Int = 5): [Product] @join__field(graph: PRODUCTS) +} + +type Review + @join__owner(graph: REVIEWS) + @join__type(graph: REVIEWS, key: "id") +{ + author: User @join__field(graph: REVIEWS, provides: "username") + body: String @join__field(graph: REVIEWS) + id: ID! @join__field(graph: REVIEWS) + product: Product @join__field(graph: REVIEWS) +} + +type User + @join__owner(graph: ACCOUNTS) + @join__type(graph: ACCOUNTS, key: "id") + @join__type(graph: REVIEWS, key: "id") +{ + id: ID! @join__field(graph: ACCOUNTS) + name: String @join__field(graph: ACCOUNTS) + reviews: [Review] @join__field(graph: REVIEWS) + username: String @join__field(graph: ACCOUNTS) +} diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/README.md b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/README.md new file mode 100644 index 0000000000..01ef2996bd --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/README.md @@ -0,0 +1,9 @@ +# Entity cache invalidation + +This tests subgraph response extension based cache invalidation. This is the expected process: +- a query is sent to the "accounts" subgraph and cached +- we reload the subgraph with a mock mutation where the response has an extension to invalidate all data from the "accounts" subgraph +- we do the same query, we should get the same result as the first time (getting data from the cache instead of the subgraph) +- we do the mutation +- we reload the subgraph with a mock of the same query as precedently, but returning a different result +- the query is sent again by the client, we should get the new result now \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/configuration.yaml b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/configuration.yaml new file mode 100644 index 0000000000..b297fee443 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/configuration.yaml @@ -0,0 +1,17 @@ +override_subgraph_url: + products: http://localhost:4005 +include_subgraph_errors: + all: true + +preview_entity_cache: + enabled: true + redis: + urls: + ["redis://localhost:6379",] + subgraph: + all: + enabled: true + subgraphs: + reviews: + ttl: 120s + enabled: true \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/plan.json b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/plan.json new file mode 100644 index 0000000000..cadc7ac809 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/plan.json @@ -0,0 +1,133 @@ +{ + "enterprise": true, + "redis": true, + "actions": [ + { + "type": "Start", + "schema_path": "./supergraph.graphql", + "configuration_path": "./configuration.yaml", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"{me{name}}"} + }, + "response": { + "headers": { + "Cache-Control": "public, max-age=10", + "Content-Type": "application/json" + }, + "body": {"data": { "me": { "name": "invalidation-subgraph" } } } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "{ me { name } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph" + } + } + } + }, + { + "type": "ReloadSubgraphs", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"mutation{updateMyAccount{name}}"} + }, + "response": { + "headers": { + "Content-Type": "application/json" + }, + "body": { + "data": { "updateMyAccount": { "name": "invalidation-subgraph2" } }, + "extensions": { + "invalidation": [{ + "kind": "subgraph", + "subgraph": "accounts" + }] + } + } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "{ me { name } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph" + } + } + } + }, + { + "type": "Request", + "request": { + "query": "mutation { updateMyAccount { name } }" + }, + "expected_response": { + "data":{ + "updateMyAccount":{ + "name":"invalidation-subgraph2" + } + } + } + }, + { + "type": "ReloadSubgraphs", + "subgraphs": { + "accounts": { + "requests": [ + { + "request": { + "body": {"query":"{me{name}}"} + }, + "response": { + "headers": { + "Cache-Control": "public, max-age=10", + "Content-Type": "application/json" + }, + "body": {"data": { "me": { "name": "invalidation-subgraph2" } } } + } + } + ] + } + } + }, + { + "type": "Request", + "request": { + "query": "{ me { name } }" + }, + "expected_response": { + "data":{ + "me":{ + "name":"invalidation-subgraph2" + } + } + } + }, + { + "type": "Stop" + } + ] +} diff --git a/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/supergraph.graphql b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/supergraph.graphql new file mode 100644 index 0000000000..1196414b6f --- /dev/null +++ b/apollo-router/tests/samples/enterprise/entity-cache/invalidation-subgraph/supergraph.graphql @@ -0,0 +1,91 @@ + +schema + @core(feature: "https://specs.apollo.dev/core/v0.2"), + @core(feature: "https://specs.apollo.dev/join/v0.1", for: EXECUTION) + @core(feature: "https://specs.apollo.dev/inaccessible/v0.1", for: SECURITY) +{ + query: Query + mutation: Mutation +} + +directive @core(as: String, feature: String!, for: core__Purpose) repeatable on SCHEMA + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet) on FIELD_DEFINITION + +directive @join__type(graph: join__Graph!, key: join__FieldSet) repeatable on OBJECT | INTERFACE + +directive @join__owner(graph: join__Graph!) on OBJECT | INTERFACE + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | INTERFACE | OBJECT | UNION + +directive @inaccessible on OBJECT | FIELD_DEFINITION | INTERFACE | UNION + +enum core__Purpose { + """ + `EXECUTION` features provide metadata necessary to for operation execution. + """ + EXECUTION + + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY +} + +scalar join__FieldSet + +enum join__Graph { + ACCOUNTS @join__graph(name: "accounts", url: "https://accounts.demo.starstuff.dev") + INVENTORY @join__graph(name: "inventory", url: "https://inventory.demo.starstuff.dev") + PRODUCTS @join__graph(name: "products", url: "https://products.demo.starstuff.dev") + REVIEWS @join__graph(name: "reviews", url: "https://reviews.demo.starstuff.dev") +} +type Mutation { + updateMyAccount: User @join__field(graph: ACCOUNTS) + createProduct(name: String, upc: ID!): Product @join__field(graph: PRODUCTS) + createReview(body: String, id: ID!, upc: ID!): Review @join__field(graph: REVIEWS) +} + +type Product + @join__owner(graph: PRODUCTS) + @join__type(graph: PRODUCTS, key: "upc") + @join__type(graph: INVENTORY, key: "upc") + @join__type(graph: REVIEWS, key: "upc") +{ + inStock: Boolean @join__field(graph: INVENTORY) @tag(name: "private") @inaccessible + name: String @join__field(graph: PRODUCTS) + price: Int @join__field(graph: PRODUCTS) + reviews: [Review] @join__field(graph: REVIEWS) + reviewsForAuthor(authorID: ID!): [Review] @join__field(graph: REVIEWS) + shippingEstimate: Int @join__field(graph: INVENTORY, requires: "price weight") + upc: String! @join__field(graph: PRODUCTS) + weight: Int @join__field(graph: PRODUCTS) +} + +type Query { + me: User @join__field(graph: ACCOUNTS) + topProducts(first: Int = 5): [Product] @join__field(graph: PRODUCTS) +} + +type Review + @join__owner(graph: REVIEWS) + @join__type(graph: REVIEWS, key: "id") +{ + author: User @join__field(graph: REVIEWS, provides: "username") + body: String @join__field(graph: REVIEWS) + id: ID! @join__field(graph: REVIEWS) + product: Product @join__field(graph: REVIEWS) +} + +type User + @join__owner(graph: ACCOUNTS) + @join__type(graph: ACCOUNTS, key: "id") + @join__type(graph: REVIEWS, key: "id") +{ + id: ID! @join__field(graph: ACCOUNTS) + name: String @join__field(graph: ACCOUNTS) + reviews: [Review] @join__field(graph: REVIEWS) + username: String @join__field(graph: ACCOUNTS) +} diff --git a/apollo-router/tests/samples/enterprise/query-planning-redis/plan.json b/apollo-router/tests/samples/enterprise/query-planning-redis/plan.json index efb23b75cb..a864862620 100644 --- a/apollo-router/tests/samples/enterprise/query-planning-redis/plan.json +++ b/apollo-router/tests/samples/enterprise/query-planning-redis/plan.json @@ -1,5 +1,6 @@ { "enterprise": true, + "redis": true, "actions": [ { "type": "Start", @@ -9,12 +10,20 @@ "accounts": { "requests": [ { - "request": {"query":"{me{name}}"}, - "response": {"data": { "me": { "name": "test" } } } + "request": { + "body": {"query":"{me{name}}"} + }, + "response": { + "body": {"data": { "me": { "name": "test" } } } + } }, { - "request": {"query":"{me{nom:name}}"}, - "response": {"data": { "me": { "nom": "test" } } } + "request": { + "body": {"query":"{me{nom:name}}"} + }, + "response": { + "body": {"data": { "me": { "nom": "test" } } } + } } ] } diff --git a/apollo-router/tests/samples_tests.rs b/apollo-router/tests/samples_tests.rs index 05a849b237..605f632c88 100644 --- a/apollo-router/tests/samples_tests.rs +++ b/apollo-router/tests/samples_tests.rs @@ -18,6 +18,8 @@ use serde::Deserialize; use serde_json::Value; use tokio::runtime::Runtime; use wiremock::matchers::body_partial_json; +use wiremock::matchers::header; +use wiremock::matchers::method; use wiremock::Mock; use wiremock::MockServer; use wiremock::ResponseTemplate; @@ -69,16 +71,27 @@ fn lookup_dir( let plan: Plan = match serde_json::from_str(&s) { Ok(data) => data, Err(e) => { - return Err(format!("could not deserialize test plan: {e}").into()); + return Err(format!( + "could not deserialize test plan at {}: {e}", + path.display() + ) + .into()); } }; - if !plan.enterprise - || (std::env::var("TEST_APOLLO_KEY").is_ok() + if plan.enterprise + && !(std::env::var("TEST_APOLLO_KEY").is_ok() && std::env::var("TEST_APOLLO_GRAPH_REF").is_ok()) { - tests.push(Trial::test(name, move || test(&path, plan))); + continue; } + + #[cfg(all(feature = "ci", not(all(target_arch = "x86_64", target_os = "linux"))))] + if plan.redis { + continue; + } + + tests.push(Trial::test(name, move || test(&path, plan))); } else { lookup_dir(&path, &name, tests)?; } @@ -113,6 +126,7 @@ struct TestExecution { router: Option, subgraphs_server: Option, subgraphs: HashMap, + configuration_path: Option, } impl TestExecution { @@ -121,6 +135,7 @@ impl TestExecution { router: None, subgraphs_server: None, subgraphs: HashMap::new(), + configuration_path: None, } } @@ -146,6 +161,7 @@ impl TestExecution { Action::ReloadSchema { schema_path } => { self.reload_schema(schema_path, path, out).await } + Action::ReloadSubgraphs { subgraphs } => self.reload_subgraphs(subgraphs, out).await, Action::Request { request, query_path, @@ -186,9 +202,27 @@ impl TestExecution { let mut subgraph_overrides = HashMap::new(); for (name, subgraph) in subgraphs { - for SubgraphRequest { request, response } in &subgraph.requests { - Mock::given(body_partial_json(request)) - .respond_with(ResponseTemplate::new(200).set_body_json(response)) + for SubgraphRequestMock { request, response } in &subgraph.requests { + let mut builder = Mock::given(body_partial_json(&request.body)); + + if let Some(s) = request.method.as_deref() { + builder = builder.and(method(s)); + } + + if let Some(s) = request.path.as_deref() { + builder = builder.and(wiremock::matchers::path(s)); + } + + for (header_name, header_value) in &request.headers { + builder = builder.and(header(header_name.as_str(), header_value.as_str())); + } + + let mut res = ResponseTemplate::new(response.status.unwrap_or(200)); + for (header_name, header_value) in &response.headers { + res = res.append_header(header_name.as_str(), header_value.as_str()); + } + builder + .respond_with(res.set_body_json(&response.body)) .mount(&subgraphs_server) .await; } @@ -215,6 +249,7 @@ impl TestExecution { self.router = Some(router); self.subgraphs_server = Some(subgraphs_server); self.subgraphs = subgraphs.clone(); + self.configuration_path = Some(configuration_path.to_string()); Ok(()) } @@ -251,9 +286,27 @@ impl TestExecution { let mut subgraph_overrides = HashMap::new(); for (name, subgraph) in &self.subgraphs { - for SubgraphRequest { request, response } in &subgraph.requests { - Mock::given(body_partial_json(request)) - .respond_with(ResponseTemplate::new(200).set_body_json(response)) + for SubgraphRequestMock { request, response } in &subgraph.requests { + let mut builder = Mock::given(body_partial_json(&request.body)); + + if let Some(s) = request.method.as_deref() { + builder = builder.and(method(s)); + } + + if let Some(s) = request.path.as_deref() { + builder = builder.and(wiremock::matchers::path(s)); + } + + for (header_name, header_value) in &request.headers { + builder = builder.and(header(header_name.as_str(), header_value.as_str())); + } + + let mut res = ResponseTemplate::new(response.status.unwrap_or(200)); + for (header_name, header_value) in &response.headers { + res = res.append_header(header_name.as_str(), header_value.as_str()); + } + builder + .respond_with(res.set_body_json(&response.body)) .mount(&subgraphs_server) .await; } @@ -265,6 +318,8 @@ impl TestExecution { } let config = open_file(&path.join(configuration_path), out)?; + self.configuration_path = Some(configuration_path.to_string()); + self.subgraphs_server = Some(subgraphs_server); router.update_config(&config).await; router.assert_reloaded().await; @@ -272,6 +327,46 @@ impl TestExecution { Ok(()) } + async fn reload_subgraphs( + &mut self, + subgraphs: &HashMap, + out: &mut String, + ) -> Result<(), Failed> { + writeln!(out, "reloading subgraphs with: {subgraphs:?}").unwrap(); + + let subgraphs_server = self.subgraphs_server.as_mut().unwrap(); + subgraphs_server.reset().await; + + for subgraph in subgraphs.values() { + for SubgraphRequestMock { request, response } in &subgraph.requests { + let mut builder = Mock::given(body_partial_json(&request.body)); + + if let Some(s) = request.method.as_deref() { + builder = builder.and(method(s)); + } + + if let Some(s) = request.path.as_deref() { + builder = builder.and(wiremock::matchers::path(s)); + } + + for (header_name, header_value) in &request.headers { + builder = builder.and(header(header_name.as_str(), header_value.as_str())); + } + + let mut res = ResponseTemplate::new(response.status.unwrap_or(200)); + for (header_name, header_value) in &response.headers { + res = res.append_header(header_name.as_str(), header_value.as_str()); + } + builder + .respond_with(res.set_body_json(&response.body)) + .mount(subgraphs_server) + .await; + } + } + + Ok(()) + } + async fn reload_schema( &mut self, schema_path: &str, @@ -358,6 +453,8 @@ impl TestExecution { { writeln!(out, "subgraphs received requests:").unwrap(); for request in requests { + writeln!(out, "\tmethod: {}", request.method).unwrap(); + writeln!(out, "\tpath: {}", request.url).unwrap(); writeln!(out, "\t{}\n", std::str::from_utf8(&request.body).unwrap()).unwrap(); } } else { @@ -409,9 +506,12 @@ fn check_path(path: &Path, out: &mut String) -> Result<(), Failed> { } #[derive(Deserialize)] +#[allow(dead_code)] struct Plan { #[serde(default)] enterprise: bool, + #[serde(default)] + redis: bool, actions: Vec, } @@ -429,6 +529,9 @@ enum Action { ReloadSchema { schema_path: String, }, + ReloadSubgraphs { + subgraphs: HashMap, + }, Request { request: Value, query_path: Option, @@ -437,13 +540,30 @@ enum Action { Stop, } -#[derive(Clone, Deserialize)] +#[derive(Clone, Debug, Deserialize)] struct Subgraph { - requests: Vec, + requests: Vec, } -#[derive(Clone, Deserialize)] +#[derive(Clone, Debug, Deserialize)] +struct SubgraphRequestMock { + request: SubgraphRequest, + response: SubgraphResponse, +} + +#[derive(Clone, Debug, Deserialize)] struct SubgraphRequest { - request: Value, - response: Value, + method: Option, + path: Option, + #[serde(default)] + headers: HashMap, + body: Value, +} + +#[derive(Clone, Debug, Deserialize)] +struct SubgraphResponse { + status: Option, + #[serde(default)] + headers: HashMap, + body: Value, } diff --git a/docker-compose.yml b/docker-compose.yml index a795b3393d..f56b40eea1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,4 +10,12 @@ services: - 6831:6831/udp - 6832:6832/udp - 16686:16686 - - 14268:14268 \ No newline at end of file + - 14268:14268 + zipkin: + image: openzipkin/zipkin:latest + ports: + - 9411:9411 + datadog: + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest + ports: + - 8126:8126 \ No newline at end of file diff --git a/dockerfiles/tracing/datadog-subgraph/package-lock.json b/dockerfiles/tracing/datadog-subgraph/package-lock.json index 5feae96eb2..fbdc25d16a 100644 --- a/dockerfiles/tracing/datadog-subgraph/package-lock.json +++ b/dockerfiles/tracing/datadog-subgraph/package-lock.json @@ -16,7 +16,7 @@ "graphql": "^16.5.0" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } }, "node_modules/@apollo/cache-control-types": { @@ -69,9 +69,10 @@ } }, "node_modules/@apollo/server": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.0.tgz", - "integrity": "sha512-pLx//lZ/pvUfWL9G8Np8+y3ujc0pYc8U7dwD6ztt9FAw8NmCPzPaDzlXLBAjGU6WnkqVBOnz8b3dOwRNjLYSUA==", + "version": "4.10.4", + "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.4.tgz", + "integrity": "sha512-HS12CUa1wq8f5zKXOKJRwRdESFp4por9AINecpcsEUV9jsCP/NqPILgx0hCOOFJuKxmnaL7070xO6l5xmOq4Fw==", + "license": "MIT", "dependencies": { "@apollo/cache-control-types": "^1.0.3", "@apollo/server-gateway-interface": "^1.1.1", @@ -152,6 +153,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/@apollo/utils.keyvaluecache/-/utils.keyvaluecache-2.1.1.tgz", "integrity": "sha512-qVo5PvUUMD8oB9oYvq4ViCjYAMWnZ5zZwEjNF37L2m1u528x5mueMlU+Cr1UinupCgdB78g+egA1G98rbJ03Vw==", + "license": "MIT", "dependencies": { "@apollo/utils.logger": "^2.0.1", "lru-cache": "^7.14.1" @@ -164,6 +166,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@apollo/utils.logger/-/utils.logger-2.0.1.tgz", "integrity": "sha512-YuplwLHaHf1oviidB7MxnCXAdHp3IqYV8n0momZ3JfLniae92eYqMIx+j5qJFX6WKJPs6q7bczmV4lXIsTu5Pg==", + "license": "MIT", "engines": { "node": ">=14" } @@ -172,6 +175,7 @@ "version": "7.18.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -335,21 +339,23 @@ } }, "node_modules/@datadog/native-appsec": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@datadog/native-appsec/-/native-appsec-7.0.0.tgz", - "integrity": "sha512-bywstWFW2hWxzPuS0+mFMVHHL0geulx5yQFtsjfszaH2LTAgk2D+Rt40MKbAoZ8q3tRw2dy6aYQ7svO3ca8jpA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@datadog/native-appsec/-/native-appsec-8.0.1.tgz", + "integrity": "sha512-SpWkoo7K4+pwxFze1ogRF1qBaKm8sZjWfZKnQ8Ex67f6L5odLjWOoiiIAs5rp01sLKGXjxU8IJf+X9j4PvI2zQ==", "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { "node-gyp-build": "^3.9.0" }, "engines": { - "node": ">=14" + "node": ">=16" } }, "node_modules/@datadog/native-iast-rewriter": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.3.tgz", - "integrity": "sha512-RCbflf8BJ++h99I7iA4NxTA1lx7YqB+sPQkJNSZKxXyEXtWl9J4XsDV9C/sB9iGbf1PVY77tFvoGm5/WpUV4IA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.3.1.tgz", + "integrity": "sha512-3pmt5G1Ai/+MPyxP7wBerIu/zH/BnAHxEu/EAMr+77IMpK5m7THPDUoWrPRCWcgFBfn0pK5DR7gRItG0wX3e0g==", + "license": "Apache-2.0", "dependencies": { "lru-cache": "^7.14.0", "node-gyp-build": "^4.5.0" @@ -377,10 +383,11 @@ } }, "node_modules/@datadog/native-iast-taint-tracking": { - "version": "1.6.4", - "resolved": "https://registry.npmjs.org/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.4.tgz", - "integrity": "sha512-Owxk7hQ4Dxwv4zJAoMjRga0IvE6lhvxnNc8pJCHsemCWBXchjr/9bqg05Zy5JnMbKUWn4XuZeJD6RFZpRa8bfw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-2.1.0.tgz", + "integrity": "sha512-DjZ6itJcjLrTdKk2vP96hak2xS0ABd0NIB8poZG3OBQU5efkzu8JOQoxbIKMklG/0P2zh7EquvGP88PdVXT9aA==", "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { "node-gyp-build": "^3.9.0" } @@ -399,15 +406,16 @@ } }, "node_modules/@datadog/pprof": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@datadog/pprof/-/pprof-5.0.0.tgz", - "integrity": "sha512-vhNan4SBuNWLpexunDJQ+hNbRAgWdk2qy5Iyh7Nn94uSSHXigAJMAvu4jwMKKQKFfchtobOkWT8GQUWW3tgpFg==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@datadog/pprof/-/pprof-5.3.0.tgz", + "integrity": "sha512-53z2Q3K92T6Pf4vz4Ezh8kfkVEvLzbnVqacZGgcbkP//q0joFzO8q00Etw1S6NdnCX0XmX08ULaF4rUI5r14mw==", "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { "delay": "^5.0.0", "node-gyp-build": "<4.0", "p-limit": "^3.1.0", - "pprof-format": "^2.0.7", + "pprof-format": "^2.1.0", "source-map": "^0.7.4" }, "engines": { @@ -654,9 +662,10 @@ } }, "node_modules/acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "license": "MIT", "bin": { "acorn": "bin/acorn" }, @@ -664,10 +673,11 @@ "node": ">=0.4.0" } }, - "node_modules/acorn-import-assertions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", - "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "license": "MIT", "peerDependencies": { "acorn": "^8" } @@ -876,38 +886,36 @@ } }, "node_modules/dd-trace": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dd-trace/-/dd-trace-5.3.0.tgz", - "integrity": "sha512-YL7rVEgLvwDtrZLCI1VRmgUVNLi4dYDyVefca0Q0a2M4IVu0Bc2+8zCW2Fl4bU5RMztSvGPkf5LvE3RLs3j9gA==", + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/dd-trace/-/dd-trace-5.17.0.tgz", + "integrity": "sha512-XirOYj5pJFYnm9NHvN5RFcvDyN/XMDS72wqTTnJgTPMbE4Dc28oQIdM2XWNxDtAcxqLnZq0/4DtFebGYzBAIYw==", "hasInstallScript": true, + "license": "(Apache-2.0 OR BSD-3-Clause)", "dependencies": { - "@datadog/native-appsec": "7.0.0", - "@datadog/native-iast-rewriter": "2.2.3", - "@datadog/native-iast-taint-tracking": "1.6.4", + "@datadog/native-appsec": "8.0.1", + "@datadog/native-iast-rewriter": "2.3.1", + "@datadog/native-iast-taint-tracking": "2.1.0", "@datadog/native-metrics": "^2.0.0", - "@datadog/pprof": "5.0.0", + "@datadog/pprof": "5.3.0", "@datadog/sketches-js": "^2.1.0", - "@opentelemetry/api": "^1.0.0", + "@opentelemetry/api": ">=1.0.0 <1.9.0", "@opentelemetry/core": "^1.14.0", "crypto-randomuuid": "^1.0.0", "dc-polyfill": "^0.1.4", "ignore": "^5.2.4", - "import-in-the-middle": "^1.7.3", + "import-in-the-middle": "^1.7.4", "int64-buffer": "^0.1.9", - "ipaddr.js": "^2.1.0", "istanbul-lib-coverage": "3.2.0", "jest-docblock": "^29.7.0", "koalas": "^1.0.2", "limiter": "1.1.5", "lodash.sortby": "^4.7.0", "lru-cache": "^7.14.0", - "methods": "^1.1.2", "module-details-from-path": "^1.0.3", "msgpack-lite": "^0.1.26", - "node-abort-controller": "^3.1.1", "opentracing": ">=0.12.1", "path-to-regexp": "^0.1.2", - "pprof-format": "^2.0.7", + "pprof-format": "^2.1.0", "protobufjs": "^7.2.5", "retry": "^0.13.1", "semver": "^7.5.4", @@ -922,6 +930,7 @@ "version": "7.18.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "license": "ISC", "engines": { "node": ">=12" } @@ -1116,9 +1125,10 @@ } }, "node_modules/graphql": { - "version": "16.8.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", + "version": "16.9.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", + "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==", + "license": "MIT", "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" } @@ -1210,12 +1220,13 @@ } }, "node_modules/import-in-the-middle": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.7.3.tgz", - "integrity": "sha512-R2I11NRi0lI3jD2+qjqyVlVEahsejw7LDnYEbGb47QEFjczE3bZYsmWheCTQA+LFs2DzOQxR7Pms7naHW1V4bQ==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.8.1.tgz", + "integrity": "sha512-yhRwoHtiLGvmSozNOALgjRPFI6uYsds60EoMqqnXyyv+JOIW/BrrLejuTGBt+bq0T5tLzOHrN0T7xYTm4Qt/ng==", + "license": "Apache-2.0", "dependencies": { "acorn": "^8.8.2", - "acorn-import-assertions": "^1.9.0", + "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^1.2.2", "module-details-from-path": "^1.0.3" } @@ -1230,14 +1241,6 @@ "resolved": "https://registry.npmjs.org/int64-buffer/-/int64-buffer-0.1.10.tgz", "integrity": "sha512-v7cSY1J8ydZ0GyjUHqF+1bshJ6cnEVLo9EnjB8p+4HDRPZc9N5jjmvUV7NvEsqQOKyH0pmIBFWXVQbiS0+OBbA==" }, - "node_modules/ipaddr.js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.1.0.tgz", - "integrity": "sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ==", - "engines": { - "node": ">= 10" - } - }, "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", @@ -1498,9 +1501,10 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/pprof-format": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/pprof-format/-/pprof-format-2.0.7.tgz", - "integrity": "sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pprof-format/-/pprof-format-2.1.0.tgz", + "integrity": "sha512-0+G5bHH0RNr8E5hoZo/zJYsL92MhkZjwrHp3O2IxmY8RJL9ooKeuZ8Tm0ZNBw5sGZ9TiM71sthTjWoR2Vf5/xw==", + "license": "MIT" }, "node_modules/protobufjs": { "version": "7.2.5", @@ -1764,10 +1768,11 @@ } }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz", + "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==", "dev": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/dockerfiles/tracing/datadog-subgraph/package.json b/dockerfiles/tracing/datadog-subgraph/package.json index 15399a732d..0d38223e9d 100644 --- a/dockerfiles/tracing/datadog-subgraph/package.json +++ b/dockerfiles/tracing/datadog-subgraph/package.json @@ -18,6 +18,6 @@ "graphql": "^16.5.0" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } } diff --git a/dockerfiles/tracing/docker-compose.datadog.yml b/dockerfiles/tracing/docker-compose.datadog.yml index f0c6bce262..d353c22089 100644 --- a/dockerfiles/tracing/docker-compose.datadog.yml +++ b/dockerfiles/tracing/docker-compose.datadog.yml @@ -3,7 +3,7 @@ services: apollo-router: container_name: apollo-router - image: ghcr.io/apollographql/router:v1.50.0 + image: ghcr.io/apollographql/router:v1.51.0 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/datadog.router.yaml:/etc/config/configuration.yaml diff --git a/dockerfiles/tracing/docker-compose.jaeger.yml b/dockerfiles/tracing/docker-compose.jaeger.yml index 25fa9778c2..56d42a866f 100644 --- a/dockerfiles/tracing/docker-compose.jaeger.yml +++ b/dockerfiles/tracing/docker-compose.jaeger.yml @@ -4,7 +4,7 @@ services: apollo-router: container_name: apollo-router #build: ./router - image: ghcr.io/apollographql/router:v1.50.0 + image: ghcr.io/apollographql/router:v1.51.0 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/jaeger.router.yaml:/etc/config/configuration.yaml diff --git a/dockerfiles/tracing/docker-compose.zipkin.yml b/dockerfiles/tracing/docker-compose.zipkin.yml index 88b66c39f6..5b26c558d0 100644 --- a/dockerfiles/tracing/docker-compose.zipkin.yml +++ b/dockerfiles/tracing/docker-compose.zipkin.yml @@ -4,7 +4,7 @@ services: apollo-router: container_name: apollo-router build: ./router - image: ghcr.io/apollographql/router:v1.50.0 + image: ghcr.io/apollographql/router:v1.51.0 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/zipkin.router.yaml:/etc/config/configuration.yaml diff --git a/dockerfiles/tracing/jaeger-subgraph/package-lock.json b/dockerfiles/tracing/jaeger-subgraph/package-lock.json index 5ee551375d..3687cb6aa4 100644 --- a/dockerfiles/tracing/jaeger-subgraph/package-lock.json +++ b/dockerfiles/tracing/jaeger-subgraph/package-lock.json @@ -18,7 +18,7 @@ "opentracing": "^0.14.7" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } }, "node_modules/@apollo/cache-control-types": { @@ -71,9 +71,9 @@ } }, "node_modules/@apollo/server": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.0.tgz", - "integrity": "sha512-pLx//lZ/pvUfWL9G8Np8+y3ujc0pYc8U7dwD6ztt9FAw8NmCPzPaDzlXLBAjGU6WnkqVBOnz8b3dOwRNjLYSUA==", + "version": "4.10.4", + "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.4.tgz", + "integrity": "sha512-HS12CUa1wq8f5zKXOKJRwRdESFp4por9AINecpcsEUV9jsCP/NqPILgx0hCOOFJuKxmnaL7070xO6l5xmOq4Fw==", "dependencies": { "@apollo/cache-control-types": "^1.0.3", "@apollo/server-gateway-interface": "^1.1.1", @@ -896,9 +896,9 @@ } }, "node_modules/graphql": { - "version": "16.8.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", + "version": "16.9.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", + "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==", "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" } @@ -1430,9 +1430,9 @@ } }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz", + "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -1556,9 +1556,9 @@ } }, "@apollo/server": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.0.tgz", - "integrity": "sha512-pLx//lZ/pvUfWL9G8Np8+y3ujc0pYc8U7dwD6ztt9FAw8NmCPzPaDzlXLBAjGU6WnkqVBOnz8b3dOwRNjLYSUA==", + "version": "4.10.4", + "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.4.tgz", + "integrity": "sha512-HS12CUa1wq8f5zKXOKJRwRdESFp4por9AINecpcsEUV9jsCP/NqPILgx0hCOOFJuKxmnaL7070xO6l5xmOq4Fw==", "requires": { "@apollo/cache-control-types": "^1.0.3", "@apollo/server-gateway-interface": "^1.1.1", @@ -2214,9 +2214,9 @@ } }, "graphql": { - "version": "16.8.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==" + "version": "16.9.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", + "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==" }, "graphql-tag": { "version": "2.12.6", @@ -2588,9 +2588,9 @@ } }, "typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz", + "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==", "dev": true }, "unpipe": { diff --git a/dockerfiles/tracing/jaeger-subgraph/package.json b/dockerfiles/tracing/jaeger-subgraph/package.json index 0183ae4a5c..2be04d726e 100644 --- a/dockerfiles/tracing/jaeger-subgraph/package.json +++ b/dockerfiles/tracing/jaeger-subgraph/package.json @@ -19,6 +19,6 @@ "opentracing": "^0.14.7" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } } diff --git a/dockerfiles/tracing/zipkin-subgraph/package-lock.json b/dockerfiles/tracing/zipkin-subgraph/package-lock.json index 5184890761..dbfad73245 100644 --- a/dockerfiles/tracing/zipkin-subgraph/package-lock.json +++ b/dockerfiles/tracing/zipkin-subgraph/package-lock.json @@ -19,7 +19,7 @@ "zipkin-javascript-opentracing": "^3.0.0" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } }, "node_modules/@apollo/cache-control-types": { @@ -72,9 +72,9 @@ } }, "node_modules/@apollo/server": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.0.tgz", - "integrity": "sha512-pLx//lZ/pvUfWL9G8Np8+y3ujc0pYc8U7dwD6ztt9FAw8NmCPzPaDzlXLBAjGU6WnkqVBOnz8b3dOwRNjLYSUA==", + "version": "4.10.4", + "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.4.tgz", + "integrity": "sha512-HS12CUa1wq8f5zKXOKJRwRdESFp4por9AINecpcsEUV9jsCP/NqPILgx0hCOOFJuKxmnaL7070xO6l5xmOq4Fw==", "dependencies": { "@apollo/cache-control-types": "^1.0.3", "@apollo/server-gateway-interface": "^1.1.1", @@ -917,9 +917,9 @@ } }, "node_modules/graphql": { - "version": "16.8.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", + "version": "16.9.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", + "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==", "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" } @@ -1457,9 +1457,9 @@ } }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz", + "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -1615,9 +1615,9 @@ } }, "@apollo/server": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.0.tgz", - "integrity": "sha512-pLx//lZ/pvUfWL9G8Np8+y3ujc0pYc8U7dwD6ztt9FAw8NmCPzPaDzlXLBAjGU6WnkqVBOnz8b3dOwRNjLYSUA==", + "version": "4.10.4", + "resolved": "https://registry.npmjs.org/@apollo/server/-/server-4.10.4.tgz", + "integrity": "sha512-HS12CUa1wq8f5zKXOKJRwRdESFp4por9AINecpcsEUV9jsCP/NqPILgx0hCOOFJuKxmnaL7070xO6l5xmOq4Fw==", "requires": { "@apollo/cache-control-types": "^1.0.3", "@apollo/server-gateway-interface": "^1.1.1", @@ -2279,9 +2279,9 @@ } }, "graphql": { - "version": "16.8.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==" + "version": "16.9.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.9.0.tgz", + "integrity": "sha512-GGTKBX4SD7Wdb8mqeDLni2oaRGYQWjWHGKPQ24ZMnUtKfcsVoiv4uX8+LJr1K6U5VW2Lu1BwJnj7uiori0YtRw==" }, "graphql-tag": { "version": "2.12.6", @@ -2659,9 +2659,9 @@ } }, "typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz", + "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==", "dev": true }, "unpipe": { diff --git a/dockerfiles/tracing/zipkin-subgraph/package.json b/dockerfiles/tracing/zipkin-subgraph/package.json index 9c867da961..de7de6f96b 100644 --- a/dockerfiles/tracing/zipkin-subgraph/package.json +++ b/dockerfiles/tracing/zipkin-subgraph/package.json @@ -20,6 +20,6 @@ "zipkin-javascript-opentracing": "^3.0.0" }, "devDependencies": { - "typescript": "5.3.3" + "typescript": "5.5.3" } } diff --git a/docs/source/configuration/authn-jwt.mdx b/docs/source/configuration/authn-jwt.mdx index 3affa08fa9..2d1bc6061c 100644 --- a/docs/source/configuration/authn-jwt.mdx +++ b/docs/source/configuration/authn-jwt.mdx @@ -1,7 +1,7 @@ --- title: JWT Authentication in the Apollo Router subtitle: Restrict access to credentialed users and systems -description: Protect sensitive data by enabling JWT authentication in the Apollo Router. Restrict access to credentialed users and systems. +description: Protect sensitive data by enabling JWT authentication in the Apollo Router. Restrict access to credentialed users and systems. --- @@ -23,57 +23,58 @@ These are the high-level steps of JWT-based authentication with the Apollo Route 1. Whenever a client authenticates with your system, your IdP issues that client a valid JSON Web Token (JWT). 2. In its subsequent requests to your router, the authenticated client provides its JWT in a designated HTTP header. 3. Whenever your router receives a client request, it extracts the JWT from the designated header (if present). - - **If no JWT is present, the request proceeds.** You can reject requests with no accompanying JWT at a later phase ([see below](#working-with-jwt-claims)). + - **If no JWT is present, the request proceeds.** You can reject requests with no accompanying JWT at a later phase ([see below](#working-with-jwt-claims)). 4. Your router validates the extracted JWT using a corresponding [JSON Web Key](https://www.rfc-editor.org/rfc/rfc7517) (**JWK**). - - Your router obtains all of its known JWKs from URLs that you specify in its configuration file. Each URL provides its keys within a single JSON object called a [JWK Set](https://www.rfc-editor.org/rfc/rfc7517#section-5) (or a **JWKS**). - - **If validation fails, the router rejects the request.** This can occur if the JWT is malformed, or if it's been expired for more than 60 seconds (this window accounts for synchronization issues). + + - Your router obtains all of its known JWKs from URLs that you specify in its configuration file. Each URL provides its keys within a single JSON object called a [JWK Set](https://www.rfc-editor.org/rfc/rfc7517#section-5) (or a **JWKS**). + - **If validation fails, the router rejects the request.** This can occur if the JWT is malformed, or if it's been expired for more than 60 seconds (this window accounts for synchronization issues). 5. The router extracts all **claims** from the validated JWT and includes them in the request's context, making them available to your [router customizations](../customizations/overview/), such as Rhai scripts. 6. Your customizations can handle the request differently depending on the details of the extracted claims, and/or you can propagate the claims to subgraphs to enable more granular access control. - - For examples, [see below](#working-with-jwt-claims). + - For examples, [see below](#working-with-jwt-claims). ## Turning it on If you use your own custom IdP, [advanced configuration is required](#creating-your-own-jwks-advanced). -Otherwise, if you issue JWTs via a popular third-party IdP (Auth0, Okta, PingOne, etc.), enabling JWT authentication in your router is a two step process described below. +Otherwise, if you issue JWTs via a popular third-party IdP (Auth0, Okta, PingOne, etc.), enabling JWT authentication in your router is a two step process described below. 1. Set configuration options for JWT authentication in your router's [YAML config file](./overview/#yaml-config-file), under the `authentication` key: - ```yaml title="router.yaml" - authentication: - router: - jwt: - jwks: # This key is required. - - url: https://dev-zzp5enui.us.auth0.com/.well-known/jwks.json - issuer: - poll_interval: - headers: # optional list of static headers added to the HTTP request to the JWKS URL - - name: User-Agent - value: router - # These keys are optional. Default values are shown. - header_name: Authorization - header_value_prefix: Bearer - # array of alternative token sources - sources: - - type: header - name: X-Authorization - value_prefix: Bearer - - type: cookie - name: authz - ``` - - These options are documented [below](#configuration-options). + ```yaml title="router.yaml" + authentication: + router: + jwt: + jwks: # This key is required. + - url: https://dev-zzp5enui.us.auth0.com/.well-known/jwks.json + issuer: + poll_interval: + headers: # optional list of static headers added to the HTTP request to the JWKS URL + - name: User-Agent + value: router + # These keys are optional. Default values are shown. + header_name: Authorization + header_value_prefix: Bearer + # array of alternative token sources + sources: + - type: header + name: X-Authorization + value_prefix: Bearer + - type: cookie + name: authz + ``` + + These options are documented [below](#configuration-options). 2. Pass all of the following to the `router` executable on startup: - - The path to the router's YAML configuration file (via the `--config` option) - - The graph ref for the [GraphOS variant](/graphos/graphs/#variants) your router should use (via the `APOLLO_GRAPH_REF` environment variable) - - A [graph API key](/graphos/api-keys/#graph-api-keys) that enables the router to authenticate with GraphOS to fetch its supergraph schema (via the `APOLLO_KEY` environment variable) + - The path to the router's YAML configuration file (via the `--config` option) + - The graph ref for the [GraphOS variant](/graphos/graphs/#variants) your router should use (via the `APOLLO_GRAPH_REF` environment variable) + - A [graph API key](/graphos/api-keys/#graph-api-keys) that enables the router to authenticate with GraphOS to fetch its supergraph schema (via the `APOLLO_KEY` environment variable) - ```bash - APOLLO_GRAPH_REF=docs-example-graph@main APOLLO_KEY="..." ./router --config router.yaml - ``` + ```bash + APOLLO_GRAPH_REF=docs-example-graph@main APOLLO_KEY="..." ./router --config router.yaml + ``` When the router starts up, it displays a log message that confirms which `jwks` are in use: @@ -156,17 +157,17 @@ The default value is `Bearer`. This is an array of possible token sources, as it could be provided in different headers depending on the client, or it could be stored in a cookie. If the default token source defined by the above `header_name` and `header_value_prefix` does not find the token, then each of the alternative sources is tried until one matches. ```yaml title="router.yaml" - authentication: - router: - jwt: - jwks: - - url: https://dev-zzp5enui.us.auth0.com/.well-known/jwks.json - sources: - - type: header - name: X-Authorization - value_prefix: Bearer - - type: cookie - name: authz +authentication: + router: + jwt: + jwks: + - url: https://dev-zzp5enui.us.auth0.com/.well-known/jwks.json + sources: + - type: header + name: X-Authorization + value_prefix: Bearer + - type: cookie + name: authz ``` @@ -180,9 +181,15 @@ This is an array of possible token sources, as it could be provided in different -Whether to ignore other prefixes in the `Authorization` header. If set to `false`, or unspecified, the router will only accept tokens with the prefix specified in `header_value_prefix`. If set to `true`, the router will ignore any requests that don't start with the prefix specified in `header_value_prefix`. +This option lets you have a mix of `Authorization` header schemes, for example, both `Basic` and `Bearer`, without requiring you to use an another header. + +By default, the router responds with an error when it encounters an unknown prefix in the `Authorization` header. You must explicitly define prefixes in [`header_value_prefix`](#header_value_prefix) or [`sources`](#sources). -If a header prefix is set to an empty string, this option is ignored. +When `ignore_other_prefixes` is `false` (the default value), the router uses the default behavior and errors when it encounters an unknown prefix in the `Authorization` header. + +If you set `ignore_other_prefixes` to `true`, the router allows requests with unknown prefixes in the `Authorization` header through and does not respond with an error when encountering one. + +If you set `header_value_prefix` to an empty string, the router ignores the `ignore_other_prefixes` setting. The default value is `false`. @@ -342,6 +349,7 @@ You may require information beyond what your JSON web tokens provide. For exampl A [`RouterService` coprocessor](../customizations/coprocessor#how-it-works) is appropriate for augmenting claims since the router calls it directly after receiving a client request. The router calls it after the JWT authentication plugin, so you can use a `RouterService` coprocessor to: + - receive the list of claims extracted from the JWT - use information like the `sub` (subject) claim to look up the user in an external database or service - insert additional data in the claims list @@ -367,40 +375,39 @@ The router sends requests to the coprocessor with this format: ```json { - "version": 1, - "stage": "RouterRequest", - "control": "continue", - "id": "d0a8245df0efe8aa38a80dba1147fb2e", - "context": { - "entries": { - "apollo_authentication::JWT::claims": { - "exp": 10000000000, - "sub": "457f6bb6-789c-4e8b-8560-f3943a09e72a" - } - } - }, - "method": "POST" + "version": 1, + "stage": "RouterRequest", + "control": "continue", + "id": "d0a8245df0efe8aa38a80dba1147fb2e", + "context": { + "entries": { + "apollo_authentication::JWT::claims": { + "exp": 10000000000, + "sub": "457f6bb6-789c-4e8b-8560-f3943a09e72a" + } + } + }, + "method": "POST" } ``` The coprocessor can then look up the user with the identifier specified in the `sub` claim and return a response with more claims: - ```json { - "version": 1, - "stage": "RouterRequest", - "control": "continue", - "id": "d0a8245df0efe8aa38a80dba1147fb2e", - "context": { - "entries": { - "apollo_authentication::JWT::claims": { - "exp": 10000000000, - "sub": "457f6bb6-789c-4e8b-8560-f3943a09e72a", - "scope": "profile:read profile:write" - } - } + "version": 1, + "stage": "RouterRequest", + "control": "continue", + "id": "d0a8245df0efe8aa38a80dba1147fb2e", + "context": { + "entries": { + "apollo_authentication::JWT::claims": { + "exp": 10000000000, + "sub": "457f6bb6-789c-4e8b-8560-f3943a09e72a", + "scope": "profile:read profile:write" + } } + } } ``` @@ -425,7 +432,7 @@ To provide a JWKS to your router, configure your IdP service to do the following 1. Generate a [valid JWKS object](#jwks-format) that includes the details of every JWK that the router requires to perform token validation. 2. Write the JWKS object to a location that your router can reach via a `file://` or `https://` URL. - - ⚠ **If _any_ of your JWKs uses a symmetric signature algorithm (such as `HS256`), always use a `file://` URL.** Symmetric signature algorithms use a shared key that should _never_ be accessible over the network. + - ⚠ **If _any_ of your JWKs uses a symmetric signature algorithm (such as `HS256`), always use a `file://` URL.** Symmetric signature algorithms use a shared key that should _never_ be accessible over the network. diff --git a/docs/source/configuration/overview.mdx b/docs/source/configuration/overview.mdx index 400d766dcc..fe8b06d23e 100644 --- a/docs/source/configuration/overview.mdx +++ b/docs/source/configuration/overview.mdx @@ -578,6 +578,20 @@ The default value of `experimental_parallelism` is `1`. In practice, you should tune `experimental_parallelism` based on metrics and benchmarks gathered from your router. + + +### Introspection response caching + + + +Introspection responses are generated by the query planner for now, so they are expensive to execute and the router stores them in its query planner cache. Unfortunately, they can fill up the cache, so until we move out introspection execution, there is an option to deactivate response caching. + +```yaml title="router.yaml" +supergraph: + query_planning: + legacy_introspection_caching: false +``` + ### Enhanced operation signature normalization diff --git a/docs/source/configuration/telemetry/exporters/tracing/datadog.mdx b/docs/source/configuration/telemetry/exporters/tracing/datadog.mdx index c8a491e760..b7f802a7aa 100644 --- a/docs/source/configuration/telemetry/exporters/tracing/datadog.mdx +++ b/docs/source/configuration/telemetry/exporters/tracing/datadog.mdx @@ -31,12 +31,13 @@ To configure the router, enable the [OTLP exporter](./otlp) and set `endpoint: < ```yaml title="router.yaml" telemetry: exporters: - tracing: - otlp: - enabled: true + tracing: + otlp: + enabled: true - # Optional endpoint, either 'default' or a URL (Defaults to http://127.0.0.1:4317) - endpoint: "${env.DATADOG_AGENT_HOST}:4317" + # Optional endpoint, either 'default' or a URL (Defaults to http://127.0.0.1:4317) + endpoint: "${env.DATADOG_AGENT_HOST}:4317" + ``` For more details about Datadog configuration, see [Datadog Agent configuration](https://docs.datadoghq.com/opentelemetry/otlp_ingest_in_the_agent/?tab=host). @@ -70,18 +71,26 @@ The Apollo Router can be configured to connect to either the native, default Dat ```yaml title="router.yaml" telemetry: exporters: - tracing: - datadog: - enabled: true - # Optional endpoint, either 'default' or a URL (Defaults to http://127.0.0.1:8126) - endpoint: "http://${env.DATADOG_AGENT_HOST}:8126" + tracing: + datadog: + enabled: true + # Optional endpoint, either 'default' or a URL (Defaults to http://127.0.0.1:8126) + endpoint: "http://${env.DATADOG_AGENT_HOST}:8126" + + # Enable graphql.operation.name attribute on supergraph spans. + instrumentation: + spans: + mode: spec_compliant + supergraph: + attributes: + graphql.operation.name: true ``` ### `enabled` Set to true to enable the Datadog exporter. Defaults to false. -### `enable_span_mapping` +### `enable_span_mapping` (default: `true`) [There are some incompatibilities](https://docs.rs/opentelemetry-datadog/latest/opentelemetry_datadog/#quirks) between Datadog and OpenTelemetry, the Datadog exporter might not provide meaningful contextual information in the exported spans. To fix this, you can configure the Apollo Router to perform a mapping for the span name and the span resource name. @@ -119,7 +128,7 @@ Instead, when `enable_span_mapping` is set to `true` the following trace will be ``` | request /graphql | - | router | + | router /graphql | | supergraph MyQuery | | query_planning MyQuery | execution | | fetch fetch | @@ -127,6 +136,56 @@ Instead, when `enable_span_mapping` is set to `true` the following trace will be | subgraph_request MyQuery__my-subgraph-name__0 | ``` + +### `fixed_span_names` (default: `true`) + +When `fixed_span_names: true`, the apollo router to use the original span names instead of the dynamic ones as described by OTel semantic conventions. + +```yaml title="router.yaml" +telemetry: + exporters: + tracing: + datadog: + enabled: true + fixed_span_names: true +``` + +This will allow you to have a finite list of operation names in Datadog on the APM view. + +### `resource_mapping` +When set, `resource_mapping` allows you to specify which attribute to use in the Datadog APM and Trace view. +The default resource mappings are: + +| OpenTelemetry Span Name | Datadog Span Operation Name | +|-------------------------|-----------------------------| +| `request` | `http.route` | +| `router` | `http.route` | +| `supergraph` | `graphql.operation.name` | +| `query_planning` | `graphql.operation.name` | +| `subgraph` | `subgraph.name` | +| `subgraph_request` | `graphql.operation.name` | +| `http_request` | `http.route` | + +You may override these mappings by specifying the `resource_mapping` configuration: + +```yaml title="router.yaml" +telemetry: + exporters: + tracing: + datadog: + enabled: true + resource_mapping: + # Use `my.span.attribute` as the resource name for the `router` span + router: "my.span.attribute" + instrumentation: + spans: + router: + attributes: + # Add a custom attribute to the `router` span + my.span.attribute: + request_header: x-custom-header +``` + ### `batch_processor` diff --git a/docs/source/configuration/telemetry/instrumentation/selectors.mdx b/docs/source/configuration/telemetry/instrumentation/selectors.mdx index 3512cada15..ef48c9b641 100644 --- a/docs/source/configuration/telemetry/instrumentation/selectors.mdx +++ b/docs/source/configuration/telemetry/instrumentation/selectors.mdx @@ -56,6 +56,7 @@ The supergraph service is executed after query parsing but before query executio | `query_variable` | Yes | | The name of a graphql query variable | | `request_header` | Yes | | The name of a request header | | `response_header` | Yes | | The name of a response header | +| `is_primary_response` | No | `true`\|`false` | Boolean returning true if it's the primary response and not events like subscription events or deferred responses | | `response_data` | Yes | | Json Path into the supergraph response body data (it might impact performances) | | `response_errors` | Yes | | Json Path into the supergraph response body errors (it might impact performances) | | `request_context` | Yes | | The name of a request context key | @@ -81,6 +82,7 @@ The subgraph service executes multiple times during query execution, with each e | `subgraph_request_header` | Yes | | The name of a subgraph request header | | `subgraph_response_header` | Yes | | The name of a subgraph response header | | `subgraph_response_status` | Yes | `code`\|`reason` | The status of a subgraph response | +| `subgraph_on_graphql_error` | No | `true`\|`false` | Boolean set to true if the subgraph response payload contains a graphql error | | `supergraph_operation_name` | Yes | `string`\|`hash` | The operation name from the supergraph query | | `supergraph_operation_kind` | Yes | `string` | The operation kind from the supergraph query | | `supergraph_query` | Yes | `string` | The graphql query to the supergraph | diff --git a/docs/source/configuration/traffic-shaping.mdx b/docs/source/configuration/traffic-shaping.mdx index da93735410..2d60fde7fe 100644 --- a/docs/source/configuration/traffic-shaping.mdx +++ b/docs/source/configuration/traffic-shaping.mdx @@ -185,26 +185,7 @@ traffic_shaping: ### HTTP/2 -The router supports subgraph connections over: -- HTTP/1.1 -- HTTP/1.1 with TLS -- HTTP/2 with TLS -- HTTP/2 Cleartext protocol (h2c). This uses HTTP/2 over plaintext connections. - -Use the table below to look up the resulting protocol of a subgraph connection, based on the subgraph URL and the `experimental_http2` option: - -| | URL with `http://` | URL with `https://` | -| ------------------------- | ------------------- | --------------------------- | -| `experimental_http2: disable` | HTTP/1.1 | HTTP/1.1 with TLS | -| `experimental_http2: enable` | HTTP/1.1 | Either HTTP/1.1 or HTTP/2 with TLS, as determined by the TLS handshake with a subgraph | -| `experimental_http2: http2only` | h2c | HTTP/2 with TLS | -| `experimental_http2` not set | HTTP/1.1 | Either HTTP/1.1 or HTTP/2 with TLS, as determined by the TLS handshake with a subgraph | - - - -Configuring `experimental_http2: http2only` for a subgraph that doesn't support HTTP/2 results in a failed subgraph connection. - - + ### Ordering diff --git a/docs/source/customizations/coprocessor.mdx b/docs/source/customizations/coprocessor.mdx index 94af25cdad..96303f2486 100644 --- a/docs/source/customizations/coprocessor.mdx +++ b/docs/source/customizations/coprocessor.mdx @@ -92,6 +92,51 @@ coprocessor: In this case, the `RouterService` only sends a coprocessor request whenever it receives a client request. The coprocessor request body includes _no_ data related to the client request (only "control" data, which is [covered below](#coprocessor-request-format)). +### Conditions + +You can define [conditions](../configuration/telemetry/instrumentation/conditions) for a stage of the request lifecycle that you want to run the coprocessor. You can set coprocessor conditions with [selectors](../configuration//telemetry//instrumentation/selectors) based on headers or context entries. + + + +The `Execution` stage doesn't support coprocessor conditions. + + + + +Example configurations: + + - Run during the `SupergraphResponse` stage only for the first event of a supergraph response. Useful for handling only the first subscription event when a subscription is opened: + + +```yaml title="router.yaml" +coprocessor: + url: http://127.0.0.1:3000 + supergraph: + response: + condition: + eq: + - true + - is_primary_response: true # Will be true only for the first event received on a supergraph response (like classical queries and mutations for example) + body: true + headers: true +``` + +- Run during the `Request` stage only if the request contains a request header: + + +```yaml title="router.yaml" +coprocessor: + url: http://127.0.0.1:3000 + router: + request: + condition: + eq: + - request_header: should-execute-copro # Header name + - "enabled" # Header value + body: true + headers: true +``` + ### Client configuration diff --git a/docs/source/executing-operations/query-batching.mdx b/docs/source/executing-operations/query-batching.mdx index 7058c91249..7676c710cd 100644 --- a/docs/source/executing-operations/query-batching.mdx +++ b/docs/source/executing-operations/query-batching.mdx @@ -87,7 +87,7 @@ batching: - There are limitations on the ability of the router to preserve batches from the client request into the subgraph requests. In particular, certain forms of queries will require data to be present before they are processed. Consequently, the router will only be able to generate batches from queries which are processed which don't contain such constraints. This may result in the router issuing multiple batches or requests. -- If [query deduplication](../configuration/traffic-shaping/#query-deduplication) is enabled, it will not apply to batched queries. Batching will take precedence over query deduplication. Query deduplication will still be performed for non-batched queries. +- If [query deduplication](../configuration/traffic-shaping/#query-deduplication) or [entity caching](../configuration/entity-caching) are enabled, they will not apply to batched queries. Batching will take precedence over query deduplication and entity caching. Query deduplication and Entity caching will still be performed for non-batched queries. diff --git a/examples/supergraph-sdl/rust/Cargo.toml b/examples/supergraph-sdl/rust/Cargo.toml index 0ca8039c8d..5103ff756d 100644 --- a/examples/supergraph-sdl/rust/Cargo.toml +++ b/examples/supergraph-sdl/rust/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] anyhow = "1" -apollo-compiler = "=1.0.0-beta.17" +apollo-compiler = "=1.0.0-beta.18" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" tower = { version = "0.4", features = ["full"] } diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index b52f6cd949..a4e22b5d20 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -13,7 +13,7 @@ cargo-fuzz = true libfuzzer-sys = "0.4" apollo-compiler.workspace = true apollo-parser = "0.7.6" -apollo-smith = "0.7.0" +apollo-smith = "0.8.0" env_logger = "0.10.2" log = "0.4" reqwest = { workspace = true, features = ["json", "blocking"] } diff --git a/helm/chart/router/Chart.yaml b/helm/chart/router/Chart.yaml index 5d01e674ab..3e98c25b35 100644 --- a/helm/chart/router/Chart.yaml +++ b/helm/chart/router/Chart.yaml @@ -20,10 +20,10 @@ type: application # so it matches the shape of our release process and release automation. # By proxy of that decision, this version uses SemVer 2.0.0, though the prefix # of "v" is not included. -version: 1.50.0 +version: 1.51.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "v1.50.0" +appVersion: "v1.51.0" diff --git a/helm/chart/router/README.md b/helm/chart/router/README.md index 0a74530d5a..9282d0e4f4 100644 --- a/helm/chart/router/README.md +++ b/helm/chart/router/README.md @@ -2,7 +2,7 @@ [router](https://github.com/apollographql/router) Rust Graph Routing runtime for Apollo Federation -![Version: 1.50.0](https://img.shields.io/badge/Version-1.50.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v1.50.0](https://img.shields.io/badge/AppVersion-v1.50.0-informational?style=flat-square) +![Version: 1.51.0](https://img.shields.io/badge/Version-1.51.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v1.51.0](https://img.shields.io/badge/AppVersion-v1.51.0-informational?style=flat-square) ## Prerequisites @@ -11,7 +11,7 @@ ## Get Repo Info ```console -helm pull oci://ghcr.io/apollographql/helm-charts/router --version 1.50.0 +helm pull oci://ghcr.io/apollographql/helm-charts/router --version 1.51.0 ``` ## Install Chart @@ -19,7 +19,7 @@ helm pull oci://ghcr.io/apollographql/helm-charts/router --version 1.50.0 **Important:** only helm3 is supported ```console -helm upgrade --install [RELEASE_NAME] oci://ghcr.io/apollographql/helm-charts/router --version 1.50.0 --values my-values.yaml +helm upgrade --install [RELEASE_NAME] oci://ghcr.io/apollographql/helm-charts/router --version 1.51.0 --values my-values.yaml ``` _See [configuration](#configuration) below._ diff --git a/licenses.html b/licenses.html index 904e7381cf..b80c2bae07 100644 --- a/licenses.html +++ b/licenses.html @@ -44,13 +44,13 @@

Third Party Licenses

Overview of licenses:

    -
  • Apache License 2.0 (444)
  • -
  • MIT License (150)
  • +
  • Apache License 2.0 (492)
  • +
  • MIT License (164)
  • +
  • BSD 3-Clause "New" or "Revised" License (11)
  • ISC License (11)
  • -
  • BSD 3-Clause "New" or "Revised" License (9)
  • -
  • BSD 2-Clause "Simplified" License (3)
  • -
  • Elastic License 2.0 (3)
  • -
  • Mozilla Public License 2.0 (3)
  • +
  • Elastic License 2.0 (6)
  • +
  • BSD 2-Clause "Simplified" License (5)
  • +
  • Mozilla Public License 2.0 (5)
  • Creative Commons Zero v1.0 Universal (2)
  • OpenSSL License (2)
  • Unicode License Agreement - Data Files and Software (2016) (1)
  • @@ -2347,7 +2347,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -2537,7 +2537,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Tomasz "Soveu" Marx + Copyright 2020 - 2022 Tatsuya Kawano Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2550,14 +2550,13 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -2747,7 +2746,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 The Fuchsia Authors + Copyright 2020 Tomasz "Soveu" Marx Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2767,18 +2766,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -2968,7 +2956,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 The OpenTelemetry Authors + Copyright 2023 The Fuchsia Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2981,16 +2969,25 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -3180,7 +3177,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2023 The OpenTelemetry Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3199,7 +3196,10 @@

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -3381,7 +3381,7 @@ 

    Used by:

    APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" + boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a @@ -3389,7 +3389,7 @@

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2017 Juniper Networks, Inc. + Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3408,7 +3408,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -3598,7 +3598,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 Michael P. Jung + Copyright 2017 Juniper Networks, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3611,14 +3611,28 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -3808,7 +3822,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 TiKV Project Authors. + Copyright 2017-NOW Actix Team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3827,11 +3841,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -4021,7 +4031,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright {yyyy} {name of copyright owner} + Copyright 2019 Michael P. Jung Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4034,38 +4044,14 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -4255,7 +4241,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright {yyyy} {name of copyright owner} + Copyright 2019 TiKV Project Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4268,14 +4254,17 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -4454,8 +4443,18 @@ 

    Used by:

    END OF TERMS AND CONDITIONS - Copyright 2019 Yoshua Wuyts - Copyright 2016-2018 Michael Tilli (Pyfisch) & `httpdate` contributors + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4474,195 +4473,35 @@

    Used by:

    Apache License 2.0

    Used by:

    -
                                     Apache License
    -                           Version 2.0, January 2004
    -                        http://www.apache.org/licenses/
    -
    -   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    -
    -   1. Definitions.
    -
    -      "License" shall mean the terms and conditions for use, reproduction,
    -      and distribution as defined by Sections 1 through 9 of this document.
    -
    -      "Licensor" shall mean the copyright owner or entity authorized by
    -      the copyright owner that is granting the License.
    -
    -      "Legal Entity" shall mean the union of the acting entity and all
    -      other entities that control, are controlled by, or are under common
    -      control with that entity. For the purposes of this definition,
    -      "control" means (i) the power, direct or indirect, to cause the
    -      direction or management of such entity, whether by contract or
    -      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    -      outstanding shares, or (iii) beneficial ownership of such entity.
    -
    -      "You" (or "Your") shall mean an individual or Legal Entity
    -      exercising permissions granted by this License.
    -
    -      "Source" form shall mean the preferred form for making modifications,
    -      including but not limited to software source code, documentation
    -      source, and configuration files.
    -
    -      "Object" form shall mean any form resulting from mechanical
    -      transformation or translation of a Source form, including but
    -      not limited to compiled object code, generated documentation,
    -      and conversions to other media types.
    -
    -      "Work" shall mean the work of authorship, whether in Source or
    -      Object form, made available under the License, as indicated by a
    -      copyright notice that is included in or attached to the work
    -      (an example is provided in the Appendix below).
    -
    -      "Derivative Works" shall mean any work, whether in Source or Object
    -      form, that is based on (or derived from) the Work and for which the
    -      editorial revisions, annotations, elaborations, or other modifications
    -      represent, as a whole, an original work of authorship. For the purposes
    -      of this License, Derivative Works shall not include works that remain
    -      separable from, or merely link (or bind by name) to the interfaces of,
    -      the Work and Derivative Works thereof.
    -
    -      "Contribution" shall mean any work of authorship, including
    -      the original version of the Work and any modifications or additions
    -      to that Work or Derivative Works thereof, that is intentionally
    -      submitted to Licensor for inclusion in the Work by the copyright owner
    -      or by an individual or Legal Entity authorized to submit on behalf of
    -      the copyright owner. For the purposes of this definition, "submitted"
    -      means any form of electronic, verbal, or written communication sent
    -      to the Licensor or its representatives, including but not limited to
    -      communication on electronic mailing lists, source code control systems,
    -      and issue tracking systems that are managed by, or on behalf of, the
    -      Licensor for the purpose of discussing and improving the Work, but
    -      excluding communication that is conspicuously marked or otherwise
    -      designated in writing by the copyright owner as "Not a Contribution."
    -
    -      "Contributor" shall mean Licensor and any individual or Legal Entity
    -      on behalf of whom a Contribution has been received by Licensor and
    -      subsequently incorporated within the Work.
    -
    -   2. Grant of Copyright License. Subject to the terms and conditions of
    -      this License, each Contributor hereby grants to You a perpetual,
    -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    -      copyright license to reproduce, prepare Derivative Works of,
    -      publicly display, publicly perform, sublicense, and distribute the
    -      Work and such Derivative Works in Source or Object form.
    -
    -   3. Grant of Patent License. Subject to the terms and conditions of
    -      this License, each Contributor hereby grants to You a perpetual,
    -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    -      (except as stated in this section) patent license to make, have made,
    -      use, offer to sell, sell, import, and otherwise transfer the Work,
    -      where such license applies only to those patent claims licensable
    -      by such Contributor that are necessarily infringed by their
    -      Contribution(s) alone or by combination of their Contribution(s)
    -      with the Work to which such Contribution(s) was submitted. If You
    -      institute patent litigation against any entity (including a
    -      cross-claim or counterclaim in a lawsuit) alleging that the Work
    -      or a Contribution incorporated within the Work constitutes direct
    -      or contributory patent infringement, then any patent licenses
    -      granted to You under this License for that Work shall terminate
    -      as of the date such litigation is filed.
    -
    -   4. Redistribution. You may reproduce and distribute copies of the
    -      Work or Derivative Works thereof in any medium, with or without
    -      modifications, and in Source or Object form, provided that You
    -      meet the following conditions:
    -
    -      (a) You must give any other recipients of the Work or
    -          Derivative Works a copy of this License; and
    -
    -      (b) You must cause any modified files to carry prominent notices
    -          stating that You changed the files; and
    -
    -      (c) You must retain, in the Source form of any Derivative Works
    -          that You distribute, all copyright, patent, trademark, and
    -          attribution notices from the Source form of the Work,
    -          excluding those notices that do not pertain to any part of
    -          the Derivative Works; and
    -
    -      (d) If the Work includes a "NOTICE" text file as part of its
    -          distribution, then any Derivative Works that You distribute must
    -          include a readable copy of the attribution notices contained
    -          within such NOTICE file, excluding those notices that do not
    -          pertain to any part of the Derivative Works, in at least one
    -          of the following places: within a NOTICE text file distributed
    -          as part of the Derivative Works; within the Source form or
    -          documentation, if provided along with the Derivative Works; or,
    -          within a display generated by the Derivative Works, if and
    -          wherever such third-party notices normally appear. The contents
    -          of the NOTICE file are for informational purposes only and
    -          do not modify the License. You may add Your own attribution
    -          notices within Derivative Works that You distribute, alongside
    -          or as an addendum to the NOTICE text from the Work, provided
    -          that such additional attribution notices cannot be construed
    -          as modifying the License.
    -
    -      You may add Your own copyright statement to Your modifications and
    -      may provide additional or different license terms and conditions
    -      for use, reproduction, or distribution of Your modifications, or
    -      for any such Derivative Works as a whole, provided Your use,
    -      reproduction, and distribution of the Work otherwise complies with
    -      the conditions stated in this License.
    -
    -   5. Submission of Contributions. Unless You explicitly state otherwise,
    -      any Contribution intentionally submitted for inclusion in the Work
    -      by You to the Licensor shall be under the terms and conditions of
    -      this License, without any additional terms or conditions.
    -      Notwithstanding the above, nothing herein shall supersede or modify
    -      the terms of any separate license agreement you may have executed
    -      with Licensor regarding such Contributions.
    -
    -   6. Trademarks. This License does not grant permission to use the trade
    -      names, trademarks, service marks, or product names of the Licensor,
    -      except as required for reasonable and customary use in describing the
    -      origin of the Work and reproducing the content of the NOTICE file.
    -
    -   7. Disclaimer of Warranty. Unless required by applicable law or
    -      agreed to in writing, Licensor provides the Work (and each
    -      Contributor provides its Contributions) on an "AS IS" BASIS,
    -      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    -      implied, including, without limitation, any warranties or conditions
    -      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    -      PARTICULAR PURPOSE. You are solely responsible for determining the
    -      appropriateness of using or redistributing the Work and assume any
    -      risks associated with Your exercise of permissions under this License.
    -
    -   8. Limitation of Liability. In no event and under no legal theory,
    -      whether in tort (including negligence), contract, or otherwise,
    -      unless required by applicable law (such as deliberate and grossly
    -      negligent acts) or agreed to in writing, shall any Contributor be
    -      liable to You for damages, including any direct, indirect, special,
    -      incidental, or consequential damages of any character arising as a
    -      result of this License or out of the use or inability to use the
    -      Work (including but not limited to damages for loss of goodwill,
    -      work stoppage, computer failure or malfunction, or any and all
    -      other commercial damages or losses), even if such Contributor
    -      has been advised of the possibility of such damages.
    -
    -   9. Accepting Warranty or Additional Liability. While redistributing
    -      the Work or Derivative Works thereof, You may choose to offer,
    -      and charge a fee for, acceptance of support, warranty, indemnity,
    -      or other liability obligations and/or rights consistent with this
    -      License. However, in accepting such obligations, You may act only
    -      on Your own behalf and on Your sole responsibility, not on behalf
    -      of any other Contributor, and only if You agree to indemnify,
    -      defend, and hold each Contributor harmless for any liability
    -      incurred by, or claims asserted against, such Contributor by reason
    -      of your accepting any such warranty or additional liability.
    -
    -   END OF TERMS AND CONDITIONS
    -
    -
  • -
  • -

    Apache License 2.0

    -

    Used by:

    - -
                                    Apache License
    +                
                                     Apache License
                                Version 2.0, January 2004
                             http://www.apache.org/licenses/
     
    @@ -4850,7 +4689,7 @@ 

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4870,122 +4709,717 @@

    Used by:

    Apache License 2.0

    Used by:

    -
                                   Apache License
    -                         Version 2.0, January 2004
    -                      http://www.apache.org/licenses/
    +                
                                     Apache License
    +                           Version 2.0, January 2004
    +                        http://www.apache.org/licenses/
     
    -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
     
    -1. Definitions.
    +   1. Definitions.
     
    -  "License" shall mean the terms and conditions for use, reproduction,
    -  and distribution as defined by Sections 1 through 9 of this document.
    +      "License" shall mean the terms and conditions for use, reproduction,
    +      and distribution as defined by Sections 1 through 9 of this document.
     
    -  "Licensor" shall mean the copyright owner or entity authorized by
    -  the copyright owner that is granting the License.
    +      "Licensor" shall mean the copyright owner or entity authorized by
    +      the copyright owner that is granting the License.
     
    -  "Legal Entity" shall mean the union of the acting entity and all
    -  other entities that control, are controlled by, or are under common
    -  control with that entity. For the purposes of this definition,
    -  "control" means (i) the power, direct or indirect, to cause the
    -  direction or management of such entity, whether by contract or
    -  otherwise, or (ii) ownership of fifty percent (50%) or more of the
    -  outstanding shares, or (iii) beneficial ownership of such entity.
    +      "Legal Entity" shall mean the union of the acting entity and all
    +      other entities that control, are controlled by, or are under common
    +      control with that entity. For the purposes of this definition,
    +      "control" means (i) the power, direct or indirect, to cause the
    +      direction or management of such entity, whether by contract or
    +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +      outstanding shares, or (iii) beneficial ownership of such entity.
     
    -  "You" (or "Your") shall mean an individual or Legal Entity
    -  exercising permissions granted by this License.
    +      "You" (or "Your") shall mean an individual or Legal Entity
    +      exercising permissions granted by this License.
     
    -  "Source" form shall mean the preferred form for making modifications,
    -  including but not limited to software source code, documentation
    -  source, and configuration files.
    +      "Source" form shall mean the preferred form for making modifications,
    +      including but not limited to software source code, documentation
    +      source, and configuration files.
     
    -  "Object" form shall mean any form resulting from mechanical
    -  transformation or translation of a Source form, including but
    -  not limited to compiled object code, generated documentation,
    -  and conversions to other media types.
    +      "Object" form shall mean any form resulting from mechanical
    +      transformation or translation of a Source form, including but
    +      not limited to compiled object code, generated documentation,
    +      and conversions to other media types.
     
    -  "Work" shall mean the work of authorship, whether in Source or
    -  Object form, made available under the License, as indicated by a
    -  copyright notice that is included in or attached to the work
    -  (an example is provided in the Appendix below).
    +      "Work" shall mean the work of authorship, whether in Source or
    +      Object form, made available under the License, as indicated by a
    +      copyright notice that is included in or attached to the work
    +      (an example is provided in the Appendix below).
     
    -  "Derivative Works" shall mean any work, whether in Source or Object
    -  form, that is based on (or derived from) the Work and for which the
    -  editorial revisions, annotations, elaborations, or other modifications
    -  represent, as a whole, an original work of authorship. For the purposes
    -  of this License, Derivative Works shall not include works that remain
    -  separable from, or merely link (or bind by name) to the interfaces of,
    -  the Work and Derivative Works thereof.
    +      "Derivative Works" shall mean any work, whether in Source or Object
    +      form, that is based on (or derived from) the Work and for which the
    +      editorial revisions, annotations, elaborations, or other modifications
    +      represent, as a whole, an original work of authorship. For the purposes
    +      of this License, Derivative Works shall not include works that remain
    +      separable from, or merely link (or bind by name) to the interfaces of,
    +      the Work and Derivative Works thereof.
     
    -  "Contribution" shall mean any work of authorship, including
    -  the original version of the Work and any modifications or additions
    -  to that Work or Derivative Works thereof, that is intentionally
    -  submitted to Licensor for inclusion in the Work by the copyright owner
    -  or by an individual or Legal Entity authorized to submit on behalf of
    -  the copyright owner. For the purposes of this definition, "submitted"
    -  means any form of electronic, verbal, or written communication sent
    -  to the Licensor or its representatives, including but not limited to
    -  communication on electronic mailing lists, source code control systems,
    -  and issue tracking systems that are managed by, or on behalf of, the
    -  Licensor for the purpose of discussing and improving the Work, but
    -  excluding communication that is conspicuously marked or otherwise
    -  designated in writing by the copyright owner as "Not a Contribution."
    +      "Contribution" shall mean any work of authorship, including
    +      the original version of the Work and any modifications or additions
    +      to that Work or Derivative Works thereof, that is intentionally
    +      submitted to Licensor for inclusion in the Work by the copyright owner
    +      or by an individual or Legal Entity authorized to submit on behalf of
    +      the copyright owner. For the purposes of this definition, "submitted"
    +      means any form of electronic, verbal, or written communication sent
    +      to the Licensor or its representatives, including but not limited to
    +      communication on electronic mailing lists, source code control systems,
    +      and issue tracking systems that are managed by, or on behalf of, the
    +      Licensor for the purpose of discussing and improving the Work, but
    +      excluding communication that is conspicuously marked or otherwise
    +      designated in writing by the copyright owner as "Not a Contribution."
     
    -  "Contributor" shall mean Licensor and any individual or Legal Entity
    -  on behalf of whom a Contribution has been received by Licensor and
    -  subsequently incorporated within the Work.
    +      "Contributor" shall mean Licensor and any individual or Legal Entity
    +      on behalf of whom a Contribution has been received by Licensor and
    +      subsequently incorporated within the Work.
     
    -2. Grant of Copyright License. Subject to the terms and conditions of
    -  this License, each Contributor hereby grants to You a perpetual,
    -  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    -  copyright license to reproduce, prepare Derivative Works of,
    -  publicly display, publicly perform, sublicense, and distribute the
    -  Work and such Derivative Works in Source or Object form.
    +   2. Grant of Copyright License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      copyright license to reproduce, prepare Derivative Works of,
    +      publicly display, publicly perform, sublicense, and distribute the
    +      Work and such Derivative Works in Source or Object form.
     
    -3. Grant of Patent License. Subject to the terms and conditions of
    -  this License, each Contributor hereby grants to You a perpetual,
    -  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    -  (except as stated in this section) patent license to make, have made,
    -  use, offer to sell, sell, import, and otherwise transfer the Work,
    -  where such license applies only to those patent claims licensable
    -  by such Contributor that are necessarily infringed by their
    -  Contribution(s) alone or by combination of their Contribution(s)
    -  with the Work to which such Contribution(s) was submitted. If You
    -  institute patent litigation against any entity (including a
    -  cross-claim or counterclaim in a lawsuit) alleging that the Work
    -  or a Contribution incorporated within the Work constitutes direct
    -  or contributory patent infringement, then any patent licenses
    -  granted to You under this License for that Work shall terminate
    -  as of the date such litigation is filed.
    +   3. Grant of Patent License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      (except as stated in this section) patent license to make, have made,
    +      use, offer to sell, sell, import, and otherwise transfer the Work,
    +      where such license applies only to those patent claims licensable
    +      by such Contributor that are necessarily infringed by their
    +      Contribution(s) alone or by combination of their Contribution(s)
    +      with the Work to which such Contribution(s) was submitted. If You
    +      institute patent litigation against any entity (including a
    +      cross-claim or counterclaim in a lawsuit) alleging that the Work
    +      or a Contribution incorporated within the Work constitutes direct
    +      or contributory patent infringement, then any patent licenses
    +      granted to You under this License for that Work shall terminate
    +      as of the date such litigation is filed.
     
    -4. Redistribution. You may reproduce and distribute copies of the
    -  Work or Derivative Works thereof in any medium, with or without
    -  modifications, and in Source or Object form, provided that You
    -  meet the following conditions:
    +   4. Redistribution. You may reproduce and distribute copies of the
    +      Work or Derivative Works thereof in any medium, with or without
    +      modifications, and in Source or Object form, provided that You
    +      meet the following conditions:
     
    -  (a) You must give any other recipients of the Work or
    -      Derivative Works a copy of this License; and
    +      (a) You must give any other recipients of the Work or
    +          Derivative Works a copy of this License; and
     
    -  (b) You must cause any modified files to carry prominent notices
    -      stating that You changed the files; and
    +      (b) You must cause any modified files to carry prominent notices
    +          stating that You changed the files; and
     
    -  (c) You must retain, in the Source form of any Derivative Works
    -      that You distribute, all copyright, patent, trademark, and
    -      attribution notices from the Source form of the Work,
    -      excluding those notices that do not pertain to any part of
    -      the Derivative Works; and
    +      (c) You must retain, in the Source form of any Derivative Works
    +          that You distribute, all copyright, patent, trademark, and
    +          attribution notices from the Source form of the Work,
    +          excluding those notices that do not pertain to any part of
    +          the Derivative Works; and
     
    -  (d) If the Work includes a "NOTICE" text file as part of its
    -      distribution, then any Derivative Works that You distribute must
    -      include a readable copy of the attribution notices contained
    -      within such NOTICE file, excluding those notices that do not
    -      pertain to any part of the Derivative Works, in at least one
    -      of the following places: within a NOTICE text file distributed
    -      as part of the Derivative Works; within the Source form or
    -      documentation, if provided along with the Derivative Works; or,
    -      within a display generated by the Derivative Works, if and
    +      (d) If the Work includes a "NOTICE" text file as part of its
    +          distribution, then any Derivative Works that You distribute must
    +          include a readable copy of the attribution notices contained
    +          within such NOTICE file, excluding those notices that do not
    +          pertain to any part of the Derivative Works, in at least one
    +          of the following places: within a NOTICE text file distributed
    +          as part of the Derivative Works; within the Source form or
    +          documentation, if provided along with the Derivative Works; or,
    +          within a display generated by the Derivative Works, if and
    +          wherever such third-party notices normally appear. The contents
    +          of the NOTICE file are for informational purposes only and
    +          do not modify the License. You may add Your own attribution
    +          notices within Derivative Works that You distribute, alongside
    +          or as an addendum to the NOTICE text from the Work, provided
    +          that such additional attribution notices cannot be construed
    +          as modifying the License.
    +
    +      You may add Your own copyright statement to Your modifications and
    +      may provide additional or different license terms and conditions
    +      for use, reproduction, or distribution of Your modifications, or
    +      for any such Derivative Works as a whole, provided Your use,
    +      reproduction, and distribution of the Work otherwise complies with
    +      the conditions stated in this License.
    +
    +   5. Submission of Contributions. Unless You explicitly state otherwise,
    +      any Contribution intentionally submitted for inclusion in the Work
    +      by You to the Licensor shall be under the terms and conditions of
    +      this License, without any additional terms or conditions.
    +      Notwithstanding the above, nothing herein shall supersede or modify
    +      the terms of any separate license agreement you may have executed
    +      with Licensor regarding such Contributions.
    +
    +   6. Trademarks. This License does not grant permission to use the trade
    +      names, trademarks, service marks, or product names of the Licensor,
    +      except as required for reasonable and customary use in describing the
    +      origin of the Work and reproducing the content of the NOTICE file.
    +
    +   7. Disclaimer of Warranty. Unless required by applicable law or
    +      agreed to in writing, Licensor provides the Work (and each
    +      Contributor provides its Contributions) on an "AS IS" BASIS,
    +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +      implied, including, without limitation, any warranties or conditions
    +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +      PARTICULAR PURPOSE. You are solely responsible for determining the
    +      appropriateness of using or redistributing the Work and assume any
    +      risks associated with Your exercise of permissions under this License.
    +
    +   8. Limitation of Liability. In no event and under no legal theory,
    +      whether in tort (including negligence), contract, or otherwise,
    +      unless required by applicable law (such as deliberate and grossly
    +      negligent acts) or agreed to in writing, shall any Contributor be
    +      liable to You for damages, including any direct, indirect, special,
    +      incidental, or consequential damages of any character arising as a
    +      result of this License or out of the use or inability to use the
    +      Work (including but not limited to damages for loss of goodwill,
    +      work stoppage, computer failure or malfunction, or any and all
    +      other commercial damages or losses), even if such Contributor
    +      has been advised of the possibility of such damages.
    +
    +   9. Accepting Warranty or Additional Liability. While redistributing
    +      the Work or Derivative Works thereof, You may choose to offer,
    +      and charge a fee for, acceptance of support, warranty, indemnity,
    +      or other liability obligations and/or rights consistent with this
    +      License. However, in accepting such obligations, You may act only
    +      on Your own behalf and on Your sole responsibility, not on behalf
    +      of any other Contributor, and only if You agree to indemnify,
    +      defend, and hold each Contributor harmless for any liability
    +      incurred by, or claims asserted against, such Contributor by reason
    +      of your accepting any such warranty or additional liability.
    +
    +   END OF TERMS AND CONDITIONS
    +
    +   Copyright 2019 Yoshua Wuyts
    +   Copyright 2016-2018 Michael Tilli (Pyfisch) & `httpdate` contributors
    +
    +   Licensed under the Apache License, Version 2.0 (the "License");
    +   you may not use this file except in compliance with the License.
    +   You may obtain a copy of the License at
    +
    +       http://www.apache.org/licenses/LICENSE-2.0
    +
    +   Unless required by applicable law or agreed to in writing, software
    +   distributed under the License is distributed on an "AS IS" BASIS,
    +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +   See the License for the specific language governing permissions and
    +   limitations under the License.
    +
    +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                     Apache License
    +                           Version 2.0, January 2004
    +                        http://www.apache.org/licenses/
    +
    +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +   1. Definitions.
    +
    +      "License" shall mean the terms and conditions for use, reproduction,
    +      and distribution as defined by Sections 1 through 9 of this document.
    +
    +      "Licensor" shall mean the copyright owner or entity authorized by
    +      the copyright owner that is granting the License.
    +
    +      "Legal Entity" shall mean the union of the acting entity and all
    +      other entities that control, are controlled by, or are under common
    +      control with that entity. For the purposes of this definition,
    +      "control" means (i) the power, direct or indirect, to cause the
    +      direction or management of such entity, whether by contract or
    +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +      outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +      "You" (or "Your") shall mean an individual or Legal Entity
    +      exercising permissions granted by this License.
    +
    +      "Source" form shall mean the preferred form for making modifications,
    +      including but not limited to software source code, documentation
    +      source, and configuration files.
    +
    +      "Object" form shall mean any form resulting from mechanical
    +      transformation or translation of a Source form, including but
    +      not limited to compiled object code, generated documentation,
    +      and conversions to other media types.
    +
    +      "Work" shall mean the work of authorship, whether in Source or
    +      Object form, made available under the License, as indicated by a
    +      copyright notice that is included in or attached to the work
    +      (an example is provided in the Appendix below).
    +
    +      "Derivative Works" shall mean any work, whether in Source or Object
    +      form, that is based on (or derived from) the Work and for which the
    +      editorial revisions, annotations, elaborations, or other modifications
    +      represent, as a whole, an original work of authorship. For the purposes
    +      of this License, Derivative Works shall not include works that remain
    +      separable from, or merely link (or bind by name) to the interfaces of,
    +      the Work and Derivative Works thereof.
    +
    +      "Contribution" shall mean any work of authorship, including
    +      the original version of the Work and any modifications or additions
    +      to that Work or Derivative Works thereof, that is intentionally
    +      submitted to Licensor for inclusion in the Work by the copyright owner
    +      or by an individual or Legal Entity authorized to submit on behalf of
    +      the copyright owner. For the purposes of this definition, "submitted"
    +      means any form of electronic, verbal, or written communication sent
    +      to the Licensor or its representatives, including but not limited to
    +      communication on electronic mailing lists, source code control systems,
    +      and issue tracking systems that are managed by, or on behalf of, the
    +      Licensor for the purpose of discussing and improving the Work, but
    +      excluding communication that is conspicuously marked or otherwise
    +      designated in writing by the copyright owner as "Not a Contribution."
    +
    +      "Contributor" shall mean Licensor and any individual or Legal Entity
    +      on behalf of whom a Contribution has been received by Licensor and
    +      subsequently incorporated within the Work.
    +
    +   2. Grant of Copyright License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      copyright license to reproduce, prepare Derivative Works of,
    +      publicly display, publicly perform, sublicense, and distribute the
    +      Work and such Derivative Works in Source or Object form.
    +
    +   3. Grant of Patent License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      (except as stated in this section) patent license to make, have made,
    +      use, offer to sell, sell, import, and otherwise transfer the Work,
    +      where such license applies only to those patent claims licensable
    +      by such Contributor that are necessarily infringed by their
    +      Contribution(s) alone or by combination of their Contribution(s)
    +      with the Work to which such Contribution(s) was submitted. If You
    +      institute patent litigation against any entity (including a
    +      cross-claim or counterclaim in a lawsuit) alleging that the Work
    +      or a Contribution incorporated within the Work constitutes direct
    +      or contributory patent infringement, then any patent licenses
    +      granted to You under this License for that Work shall terminate
    +      as of the date such litigation is filed.
    +
    +   4. Redistribution. You may reproduce and distribute copies of the
    +      Work or Derivative Works thereof in any medium, with or without
    +      modifications, and in Source or Object form, provided that You
    +      meet the following conditions:
    +
    +      (a) You must give any other recipients of the Work or
    +          Derivative Works a copy of this License; and
    +
    +      (b) You must cause any modified files to carry prominent notices
    +          stating that You changed the files; and
    +
    +      (c) You must retain, in the Source form of any Derivative Works
    +          that You distribute, all copyright, patent, trademark, and
    +          attribution notices from the Source form of the Work,
    +          excluding those notices that do not pertain to any part of
    +          the Derivative Works; and
    +
    +      (d) If the Work includes a "NOTICE" text file as part of its
    +          distribution, then any Derivative Works that You distribute must
    +          include a readable copy of the attribution notices contained
    +          within such NOTICE file, excluding those notices that do not
    +          pertain to any part of the Derivative Works, in at least one
    +          of the following places: within a NOTICE text file distributed
    +          as part of the Derivative Works; within the Source form or
    +          documentation, if provided along with the Derivative Works; or,
    +          within a display generated by the Derivative Works, if and
    +          wherever such third-party notices normally appear. The contents
    +          of the NOTICE file are for informational purposes only and
    +          do not modify the License. You may add Your own attribution
    +          notices within Derivative Works that You distribute, alongside
    +          or as an addendum to the NOTICE text from the Work, provided
    +          that such additional attribution notices cannot be construed
    +          as modifying the License.
    +
    +      You may add Your own copyright statement to Your modifications and
    +      may provide additional or different license terms and conditions
    +      for use, reproduction, or distribution of Your modifications, or
    +      for any such Derivative Works as a whole, provided Your use,
    +      reproduction, and distribution of the Work otherwise complies with
    +      the conditions stated in this License.
    +
    +   5. Submission of Contributions. Unless You explicitly state otherwise,
    +      any Contribution intentionally submitted for inclusion in the Work
    +      by You to the Licensor shall be under the terms and conditions of
    +      this License, without any additional terms or conditions.
    +      Notwithstanding the above, nothing herein shall supersede or modify
    +      the terms of any separate license agreement you may have executed
    +      with Licensor regarding such Contributions.
    +
    +   6. Trademarks. This License does not grant permission to use the trade
    +      names, trademarks, service marks, or product names of the Licensor,
    +      except as required for reasonable and customary use in describing the
    +      origin of the Work and reproducing the content of the NOTICE file.
    +
    +   7. Disclaimer of Warranty. Unless required by applicable law or
    +      agreed to in writing, Licensor provides the Work (and each
    +      Contributor provides its Contributions) on an "AS IS" BASIS,
    +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +      implied, including, without limitation, any warranties or conditions
    +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +      PARTICULAR PURPOSE. You are solely responsible for determining the
    +      appropriateness of using or redistributing the Work and assume any
    +      risks associated with Your exercise of permissions under this License.
    +
    +   8. Limitation of Liability. In no event and under no legal theory,
    +      whether in tort (including negligence), contract, or otherwise,
    +      unless required by applicable law (such as deliberate and grossly
    +      negligent acts) or agreed to in writing, shall any Contributor be
    +      liable to You for damages, including any direct, indirect, special,
    +      incidental, or consequential damages of any character arising as a
    +      result of this License or out of the use or inability to use the
    +      Work (including but not limited to damages for loss of goodwill,
    +      work stoppage, computer failure or malfunction, or any and all
    +      other commercial damages or losses), even if such Contributor
    +      has been advised of the possibility of such damages.
    +
    +   9. Accepting Warranty or Additional Liability. While redistributing
    +      the Work or Derivative Works thereof, You may choose to offer,
    +      and charge a fee for, acceptance of support, warranty, indemnity,
    +      or other liability obligations and/or rights consistent with this
    +      License. However, in accepting such obligations, You may act only
    +      on Your own behalf and on Your sole responsibility, not on behalf
    +      of any other Contributor, and only if You agree to indemnify,
    +      defend, and hold each Contributor harmless for any liability
    +      incurred by, or claims asserted against, such Contributor by reason
    +      of your accepting any such warranty or additional liability.
    +
    +   END OF TERMS AND CONDITIONS
    +
    +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                    Apache License
    +                           Version 2.0, January 2004
    +                        http://www.apache.org/licenses/
    +
    +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +   1. Definitions.
    +
    +      "License" shall mean the terms and conditions for use, reproduction,
    +      and distribution as defined by Sections 1 through 9 of this document.
    +
    +      "Licensor" shall mean the copyright owner or entity authorized by
    +      the copyright owner that is granting the License.
    +
    +      "Legal Entity" shall mean the union of the acting entity and all
    +      other entities that control, are controlled by, or are under common
    +      control with that entity. For the purposes of this definition,
    +      "control" means (i) the power, direct or indirect, to cause the
    +      direction or management of such entity, whether by contract or
    +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +      outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +      "You" (or "Your") shall mean an individual or Legal Entity
    +      exercising permissions granted by this License.
    +
    +      "Source" form shall mean the preferred form for making modifications,
    +      including but not limited to software source code, documentation
    +      source, and configuration files.
    +
    +      "Object" form shall mean any form resulting from mechanical
    +      transformation or translation of a Source form, including but
    +      not limited to compiled object code, generated documentation,
    +      and conversions to other media types.
    +
    +      "Work" shall mean the work of authorship, whether in Source or
    +      Object form, made available under the License, as indicated by a
    +      copyright notice that is included in or attached to the work
    +      (an example is provided in the Appendix below).
    +
    +      "Derivative Works" shall mean any work, whether in Source or Object
    +      form, that is based on (or derived from) the Work and for which the
    +      editorial revisions, annotations, elaborations, or other modifications
    +      represent, as a whole, an original work of authorship. For the purposes
    +      of this License, Derivative Works shall not include works that remain
    +      separable from, or merely link (or bind by name) to the interfaces of,
    +      the Work and Derivative Works thereof.
    +
    +      "Contribution" shall mean any work of authorship, including
    +      the original version of the Work and any modifications or additions
    +      to that Work or Derivative Works thereof, that is intentionally
    +      submitted to Licensor for inclusion in the Work by the copyright owner
    +      or by an individual or Legal Entity authorized to submit on behalf of
    +      the copyright owner. For the purposes of this definition, "submitted"
    +      means any form of electronic, verbal, or written communication sent
    +      to the Licensor or its representatives, including but not limited to
    +      communication on electronic mailing lists, source code control systems,
    +      and issue tracking systems that are managed by, or on behalf of, the
    +      Licensor for the purpose of discussing and improving the Work, but
    +      excluding communication that is conspicuously marked or otherwise
    +      designated in writing by the copyright owner as "Not a Contribution."
    +
    +      "Contributor" shall mean Licensor and any individual or Legal Entity
    +      on behalf of whom a Contribution has been received by Licensor and
    +      subsequently incorporated within the Work.
    +
    +   2. Grant of Copyright License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      copyright license to reproduce, prepare Derivative Works of,
    +      publicly display, publicly perform, sublicense, and distribute the
    +      Work and such Derivative Works in Source or Object form.
    +
    +   3. Grant of Patent License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      (except as stated in this section) patent license to make, have made,
    +      use, offer to sell, sell, import, and otherwise transfer the Work,
    +      where such license applies only to those patent claims licensable
    +      by such Contributor that are necessarily infringed by their
    +      Contribution(s) alone or by combination of their Contribution(s)
    +      with the Work to which such Contribution(s) was submitted. If You
    +      institute patent litigation against any entity (including a
    +      cross-claim or counterclaim in a lawsuit) alleging that the Work
    +      or a Contribution incorporated within the Work constitutes direct
    +      or contributory patent infringement, then any patent licenses
    +      granted to You under this License for that Work shall terminate
    +      as of the date such litigation is filed.
    +
    +   4. Redistribution. You may reproduce and distribute copies of the
    +      Work or Derivative Works thereof in any medium, with or without
    +      modifications, and in Source or Object form, provided that You
    +      meet the following conditions:
    +
    +      (a) You must give any other recipients of the Work or
    +          Derivative Works a copy of this License; and
    +
    +      (b) You must cause any modified files to carry prominent notices
    +          stating that You changed the files; and
    +
    +      (c) You must retain, in the Source form of any Derivative Works
    +          that You distribute, all copyright, patent, trademark, and
    +          attribution notices from the Source form of the Work,
    +          excluding those notices that do not pertain to any part of
    +          the Derivative Works; and
    +
    +      (d) If the Work includes a "NOTICE" text file as part of its
    +          distribution, then any Derivative Works that You distribute must
    +          include a readable copy of the attribution notices contained
    +          within such NOTICE file, excluding those notices that do not
    +          pertain to any part of the Derivative Works, in at least one
    +          of the following places: within a NOTICE text file distributed
    +          as part of the Derivative Works; within the Source form or
    +          documentation, if provided along with the Derivative Works; or,
    +          within a display generated by the Derivative Works, if and
    +          wherever such third-party notices normally appear. The contents
    +          of the NOTICE file are for informational purposes only and
    +          do not modify the License. You may add Your own attribution
    +          notices within Derivative Works that You distribute, alongside
    +          or as an addendum to the NOTICE text from the Work, provided
    +          that such additional attribution notices cannot be construed
    +          as modifying the License.
    +
    +      You may add Your own copyright statement to Your modifications and
    +      may provide additional or different license terms and conditions
    +      for use, reproduction, or distribution of Your modifications, or
    +      for any such Derivative Works as a whole, provided Your use,
    +      reproduction, and distribution of the Work otherwise complies with
    +      the conditions stated in this License.
    +
    +   5. Submission of Contributions. Unless You explicitly state otherwise,
    +      any Contribution intentionally submitted for inclusion in the Work
    +      by You to the Licensor shall be under the terms and conditions of
    +      this License, without any additional terms or conditions.
    +      Notwithstanding the above, nothing herein shall supersede or modify
    +      the terms of any separate license agreement you may have executed
    +      with Licensor regarding such Contributions.
    +
    +   6. Trademarks. This License does not grant permission to use the trade
    +      names, trademarks, service marks, or product names of the Licensor,
    +      except as required for reasonable and customary use in describing the
    +      origin of the Work and reproducing the content of the NOTICE file.
    +
    +   7. Disclaimer of Warranty. Unless required by applicable law or
    +      agreed to in writing, Licensor provides the Work (and each
    +      Contributor provides its Contributions) on an "AS IS" BASIS,
    +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +      implied, including, without limitation, any warranties or conditions
    +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +      PARTICULAR PURPOSE. You are solely responsible for determining the
    +      appropriateness of using or redistributing the Work and assume any
    +      risks associated with Your exercise of permissions under this License.
    +
    +   8. Limitation of Liability. In no event and under no legal theory,
    +      whether in tort (including negligence), contract, or otherwise,
    +      unless required by applicable law (such as deliberate and grossly
    +      negligent acts) or agreed to in writing, shall any Contributor be
    +      liable to You for damages, including any direct, indirect, special,
    +      incidental, or consequential damages of any character arising as a
    +      result of this License or out of the use or inability to use the
    +      Work (including but not limited to damages for loss of goodwill,
    +      work stoppage, computer failure or malfunction, or any and all
    +      other commercial damages or losses), even if such Contributor
    +      has been advised of the possibility of such damages.
    +
    +   9. Accepting Warranty or Additional Liability. While redistributing
    +      the Work or Derivative Works thereof, You may choose to offer,
    +      and charge a fee for, acceptance of support, warranty, indemnity,
    +      or other liability obligations and/or rights consistent with this
    +      License. However, in accepting such obligations, You may act only
    +      on Your own behalf and on Your sole responsibility, not on behalf
    +      of any other Contributor, and only if You agree to indemnify,
    +      defend, and hold each Contributor harmless for any liability
    +      incurred by, or claims asserted against, such Contributor by reason
    +      of your accepting any such warranty or additional liability.
    +
    +   END OF TERMS AND CONDITIONS
    +
    +   APPENDIX: How to apply the Apache License to your work.
    +
    +      To apply the Apache License to your work, attach the following
    +      boilerplate notice, with the fields enclosed by brackets "{}"
    +      replaced with your own identifying information. (Don't include
    +      the brackets!)  The text should be enclosed in the appropriate
    +      comment syntax for the file format. We also recommend that a
    +      file or class name and description of purpose be included on the
    +      same "printed page" as the copyright notice for easier
    +      identification within third-party archives.
    +
    +   Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
    +
    +   Licensed under the Apache License, Version 2.0 (the "License");
    +   you may not use this file except in compliance with the License.
    +   You may obtain a copy of the License at
    +
    +       http://www.apache.org/licenses/LICENSE-2.0
    +
    +   Unless required by applicable law or agreed to in writing, software
    +   distributed under the License is distributed on an "AS IS" BASIS,
    +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +   See the License for the specific language governing permissions and
    +   limitations under the License.
    +
    +
    +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                   Apache License
    +                         Version 2.0, January 2004
    +                      http://www.apache.org/licenses/
    +
    +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +1. Definitions.
    +
    +  "License" shall mean the terms and conditions for use, reproduction,
    +  and distribution as defined by Sections 1 through 9 of this document.
    +
    +  "Licensor" shall mean the copyright owner or entity authorized by
    +  the copyright owner that is granting the License.
    +
    +  "Legal Entity" shall mean the union of the acting entity and all
    +  other entities that control, are controlled by, or are under common
    +  control with that entity. For the purposes of this definition,
    +  "control" means (i) the power, direct or indirect, to cause the
    +  direction or management of such entity, whether by contract or
    +  otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +  outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +  "You" (or "Your") shall mean an individual or Legal Entity
    +  exercising permissions granted by this License.
    +
    +  "Source" form shall mean the preferred form for making modifications,
    +  including but not limited to software source code, documentation
    +  source, and configuration files.
    +
    +  "Object" form shall mean any form resulting from mechanical
    +  transformation or translation of a Source form, including but
    +  not limited to compiled object code, generated documentation,
    +  and conversions to other media types.
    +
    +  "Work" shall mean the work of authorship, whether in Source or
    +  Object form, made available under the License, as indicated by a
    +  copyright notice that is included in or attached to the work
    +  (an example is provided in the Appendix below).
    +
    +  "Derivative Works" shall mean any work, whether in Source or Object
    +  form, that is based on (or derived from) the Work and for which the
    +  editorial revisions, annotations, elaborations, or other modifications
    +  represent, as a whole, an original work of authorship. For the purposes
    +  of this License, Derivative Works shall not include works that remain
    +  separable from, or merely link (or bind by name) to the interfaces of,
    +  the Work and Derivative Works thereof.
    +
    +  "Contribution" shall mean any work of authorship, including
    +  the original version of the Work and any modifications or additions
    +  to that Work or Derivative Works thereof, that is intentionally
    +  submitted to Licensor for inclusion in the Work by the copyright owner
    +  or by an individual or Legal Entity authorized to submit on behalf of
    +  the copyright owner. For the purposes of this definition, "submitted"
    +  means any form of electronic, verbal, or written communication sent
    +  to the Licensor or its representatives, including but not limited to
    +  communication on electronic mailing lists, source code control systems,
    +  and issue tracking systems that are managed by, or on behalf of, the
    +  Licensor for the purpose of discussing and improving the Work, but
    +  excluding communication that is conspicuously marked or otherwise
    +  designated in writing by the copyright owner as "Not a Contribution."
    +
    +  "Contributor" shall mean Licensor and any individual or Legal Entity
    +  on behalf of whom a Contribution has been received by Licensor and
    +  subsequently incorporated within the Work.
    +
    +2. Grant of Copyright License. Subject to the terms and conditions of
    +  this License, each Contributor hereby grants to You a perpetual,
    +  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +  copyright license to reproduce, prepare Derivative Works of,
    +  publicly display, publicly perform, sublicense, and distribute the
    +  Work and such Derivative Works in Source or Object form.
    +
    +3. Grant of Patent License. Subject to the terms and conditions of
    +  this License, each Contributor hereby grants to You a perpetual,
    +  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +  (except as stated in this section) patent license to make, have made,
    +  use, offer to sell, sell, import, and otherwise transfer the Work,
    +  where such license applies only to those patent claims licensable
    +  by such Contributor that are necessarily infringed by their
    +  Contribution(s) alone or by combination of their Contribution(s)
    +  with the Work to which such Contribution(s) was submitted. If You
    +  institute patent litigation against any entity (including a
    +  cross-claim or counterclaim in a lawsuit) alleging that the Work
    +  or a Contribution incorporated within the Work constitutes direct
    +  or contributory patent infringement, then any patent licenses
    +  granted to You under this License for that Work shall terminate
    +  as of the date such litigation is filed.
    +
    +4. Redistribution. You may reproduce and distribute copies of the
    +  Work or Derivative Works thereof in any medium, with or without
    +  modifications, and in Source or Object form, provided that You
    +  meet the following conditions:
    +
    +  (a) You must give any other recipients of the Work or
    +      Derivative Works a copy of this License; and
    +
    +  (b) You must cause any modified files to carry prominent notices
    +      stating that You changed the files; and
    +
    +  (c) You must retain, in the Source form of any Derivative Works
    +      that You distribute, all copyright, patent, trademark, and
    +      attribution notices from the Source form of the Work,
    +      excluding those notices that do not pertain to any part of
    +      the Derivative Works; and
    +
    +  (d) If the Work includes a "NOTICE" text file as part of its
    +      distribution, then any Derivative Works that You distribute must
    +      include a readable copy of the attribution notices contained
    +      within such NOTICE file, excluding those notices that do not
    +      pertain to any part of the Derivative Works, in at least one
    +      of the following places: within a NOTICE text file distributed
    +      as part of the Derivative Works; within the Source form or
    +      documentation, if provided along with the Derivative Works; or,
    +      within a display generated by the Derivative Works, if and
           wherever such third-party notices normally appear. The contents
           of the NOTICE file are for informational purposes only and
           do not modify the License. You may add Your own attribution
    @@ -6323,6 +6757,7 @@ 

    Apache License 2.0

    Used by:

                                  Apache License
                             Version 2.0, January 2004
    @@ -8213,17 +8648,22 @@ 

    Used by:

  • arc-swap
  • async-channel
  • async-compression
  • +
  • async-io
  • +
  • async-lock
  • autocfg
  • backtrace
  • base64
  • base64
  • +
  • base64
  • bitflags
  • bitflags
  • bstr
  • bumpalo
  • bytes-utils
  • +
  • camino
  • cc
  • cfg-if
  • +
  • cfg-if
  • ci_info
  • cmake
  • concurrent-queue
  • @@ -8233,13 +8673,17 @@

    Used by:

  • core-foundation-sys
  • countme
  • crossbeam-channel
  • +
  • crossbeam-epoch
  • +
  • crossbeam-utils
  • crossbeam-utils
  • debugid
  • derivative
  • derive_arbitrary
  • either
  • +
  • env_logger
  • envmnt
  • equivalent
  • +
  • error-chain
  • event-listener
  • fastrand
  • fastrand
  • @@ -8253,10 +8697,13 @@

    Used by:

  • futures-lite
  • gimli
  • git2
  • +
  • glob
  • hashbrown
  • hashbrown
  • hdrhistogram
  • heck
  • +
  • heck
  • +
  • hermit-abi
  • hermit-abi
  • httparse
  • humantime-serde
  • @@ -8268,11 +8715,11 @@

    Used by:

  • indexmap
  • indexmap
  • inventory
  • +
  • io-lifetimes
  • ipconfig
  • itertools
  • itertools
  • itertools
  • -
  • itertools
  • jobserver
  • js-sys
  • lazy_static
  • @@ -8282,10 +8729,12 @@

    Used by:

  • libz-ng-sys
  • libz-sys
  • linux-raw-sys
  • +
  • linux-raw-sys
  • lock_api
  • log
  • maplit
  • match_cfg
  • +
  • maybe-uninit
  • mime
  • mockall
  • mockall_derive
  • @@ -8316,7 +8765,6 @@

    Used by:

  • pkg-config
  • proc-macro2
  • prost
  • -
  • prost
  • prost-build
  • prost-derive
  • prost-derive
  • @@ -8335,6 +8783,7 @@

    Used by:

  • rustc_version
  • rustc_version
  • rustix
  • +
  • rustix
  • rustls
  • rustls-native-certs
  • rustls-pemfile
  • @@ -8349,8 +8798,10 @@

    Used by:

  • shellexpand
  • signal-hook-registry
  • similar
  • +
  • skeptic
  • smallvec
  • socket2
  • +
  • socket2
  • stable_deref_trait
  • syn
  • syn
  • @@ -8359,7 +8810,6 @@

    Used by:

  • threadpool
  • tikv-jemalloc-sys
  • tikv-jemallocator
  • -
  • triomphe
  • try_match
  • tungstenite
  • typed-builder
  • @@ -8378,6 +8828,7 @@

    Used by:

  • waker-fn
  • wasi
  • wasi
  • +
  • wasi
  • wasm-bindgen
  • wasm-bindgen-backend
  • wasm-bindgen-futures
  • @@ -8583,7 +9034,221 @@

    Used by:

    you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +
    +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                  Apache License
    +                        Version 2.0, January 2004
    +                     http://www.apache.org/licenses/
    +
    +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +1. Definitions.
    +
    +   "License" shall mean the terms and conditions for use, reproduction,
    +   and distribution as defined by Sections 1 through 9 of this document.
    +
    +   "Licensor" shall mean the copyright owner or entity authorized by
    +   the copyright owner that is granting the License.
    +
    +   "Legal Entity" shall mean the union of the acting entity and all
    +   other entities that control, are controlled by, or are under common
    +   control with that entity. For the purposes of this definition,
    +   "control" means (i) the power, direct or indirect, to cause the
    +   direction or management of such entity, whether by contract or
    +   otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +   outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +   "You" (or "Your") shall mean an individual or Legal Entity
    +   exercising permissions granted by this License.
    +
    +   "Source" form shall mean the preferred form for making modifications,
    +   including but not limited to software source code, documentation
    +   source, and configuration files.
    +
    +   "Object" form shall mean any form resulting from mechanical
    +   transformation or translation of a Source form, including but
    +   not limited to compiled object code, generated documentation,
    +   and conversions to other media types.
    +
    +   "Work" shall mean the work of authorship, whether in Source or
    +   Object form, made available under the License, as indicated by a
    +   copyright notice that is included in or attached to the work
    +   (an example is provided in the Appendix below).
    +
    +   "Derivative Works" shall mean any work, whether in Source or Object
    +   form, that is based on (or derived from) the Work and for which the
    +   editorial revisions, annotations, elaborations, or other modifications
    +   represent, as a whole, an original work of authorship. For the purposes
    +   of this License, Derivative Works shall not include works that remain
    +   separable from, or merely link (or bind by name) to the interfaces of,
    +   the Work and Derivative Works thereof.
    +
    +   "Contribution" shall mean any work of authorship, including
    +   the original version of the Work and any modifications or additions
    +   to that Work or Derivative Works thereof, that is intentionally
    +   submitted to Licensor for inclusion in the Work by the copyright owner
    +   or by an individual or Legal Entity authorized to submit on behalf of
    +   the copyright owner. For the purposes of this definition, "submitted"
    +   means any form of electronic, verbal, or written communication sent
    +   to the Licensor or its representatives, including but not limited to
    +   communication on electronic mailing lists, source code control systems,
    +   and issue tracking systems that are managed by, or on behalf of, the
    +   Licensor for the purpose of discussing and improving the Work, but
    +   excluding communication that is conspicuously marked or otherwise
    +   designated in writing by the copyright owner as "Not a Contribution."
    +
    +   "Contributor" shall mean Licensor and any individual or Legal Entity
    +   on behalf of whom a Contribution has been received by Licensor and
    +   subsequently incorporated within the Work.
    +
    +2. Grant of Copyright License. Subject to the terms and conditions of
    +   this License, each Contributor hereby grants to You a perpetual,
    +   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +   copyright license to reproduce, prepare Derivative Works of,
    +   publicly display, publicly perform, sublicense, and distribute the
    +   Work and such Derivative Works in Source or Object form.
    +
    +3. Grant of Patent License. Subject to the terms and conditions of
    +   this License, each Contributor hereby grants to You a perpetual,
    +   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +   (except as stated in this section) patent license to make, have made,
    +   use, offer to sell, sell, import, and otherwise transfer the Work,
    +   where such license applies only to those patent claims licensable
    +   by such Contributor that are necessarily infringed by their
    +   Contribution(s) alone or by combination of their Contribution(s)
    +   with the Work to which such Contribution(s) was submitted. If You
    +   institute patent litigation against any entity (including a
    +   cross-claim or counterclaim in a lawsuit) alleging that the Work
    +   or a Contribution incorporated within the Work constitutes direct
    +   or contributory patent infringement, then any patent licenses
    +   granted to You under this License for that Work shall terminate
    +   as of the date such litigation is filed.
    +
    +4. Redistribution. You may reproduce and distribute copies of the
    +   Work or Derivative Works thereof in any medium, with or without
    +   modifications, and in Source or Object form, provided that You
    +   meet the following conditions:
    +
    +   (a) You must give any other recipients of the Work or
    +       Derivative Works a copy of this License; and
    +
    +   (b) You must cause any modified files to carry prominent notices
    +       stating that You changed the files; and
    +
    +   (c) You must retain, in the Source form of any Derivative Works
    +       that You distribute, all copyright, patent, trademark, and
    +       attribution notices from the Source form of the Work,
    +       excluding those notices that do not pertain to any part of
    +       the Derivative Works; and
    +
    +   (d) If the Work includes a "NOTICE" text file as part of its
    +       distribution, then any Derivative Works that You distribute must
    +       include a readable copy of the attribution notices contained
    +       within such NOTICE file, excluding those notices that do not
    +       pertain to any part of the Derivative Works, in at least one
    +       of the following places: within a NOTICE text file distributed
    +       as part of the Derivative Works; within the Source form or
    +       documentation, if provided along with the Derivative Works; or,
    +       within a display generated by the Derivative Works, if and
    +       wherever such third-party notices normally appear. The contents
    +       of the NOTICE file are for informational purposes only and
    +       do not modify the License. You may add Your own attribution
    +       notices within Derivative Works that You distribute, alongside
    +       or as an addendum to the NOTICE text from the Work, provided
    +       that such additional attribution notices cannot be construed
    +       as modifying the License.
    +
    +   You may add Your own copyright statement to Your modifications and
    +   may provide additional or different license terms and conditions
    +   for use, reproduction, or distribution of Your modifications, or
    +   for any such Derivative Works as a whole, provided Your use,
    +   reproduction, and distribution of the Work otherwise complies with
    +   the conditions stated in this License.
    +
    +5. Submission of Contributions. Unless You explicitly state otherwise,
    +   any Contribution intentionally submitted for inclusion in the Work
    +   by You to the Licensor shall be under the terms and conditions of
    +   this License, without any additional terms or conditions.
    +   Notwithstanding the above, nothing herein shall supersede or modify
    +   the terms of any separate license agreement you may have executed
    +   with Licensor regarding such Contributions.
    +
    +6. Trademarks. This License does not grant permission to use the trade
    +   names, trademarks, service marks, or product names of the Licensor,
    +   except as required for reasonable and customary use in describing the
    +   origin of the Work and reproducing the content of the NOTICE file.
    +
    +7. Disclaimer of Warranty. Unless required by applicable law or
    +   agreed to in writing, Licensor provides the Work (and each
    +   Contributor provides its Contributions) on an "AS IS" BASIS,
    +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +   implied, including, without limitation, any warranties or conditions
    +   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +   PARTICULAR PURPOSE. You are solely responsible for determining the
    +   appropriateness of using or redistributing the Work and assume any
    +   risks associated with Your exercise of permissions under this License.
    +
    +8. Limitation of Liability. In no event and under no legal theory,
    +   whether in tort (including negligence), contract, or otherwise,
    +   unless required by applicable law (such as deliberate and grossly
    +   negligent acts) or agreed to in writing, shall any Contributor be
    +   liable to You for damages, including any direct, indirect, special,
    +   incidental, or consequential damages of any character arising as a
    +   result of this License or out of the use or inability to use the
    +   Work (including but not limited to damages for loss of goodwill,
    +   work stoppage, computer failure or malfunction, or any and all
    +   other commercial damages or losses), even if such Contributor
    +   has been advised of the possibility of such damages.
    +
    +9. Accepting Warranty or Additional Liability. While redistributing
    +   the Work or Derivative Works thereof, You may choose to offer,
    +   and charge a fee for, acceptance of support, warranty, indemnity,
    +   or other liability obligations and/or rights consistent with this
    +   License. However, in accepting such obligations, You may act only
    +   on Your own behalf and on Your sole responsibility, not on behalf
    +   of any other Contributor, and only if You agree to indemnify,
    +   defend, and hold each Contributor harmless for any liability
    +   incurred by, or claims asserted against, such Contributor by reason
    +   of your accepting any such warranty or additional liability.
    +
    +END OF TERMS AND CONDITIONS
    +
    +APPENDIX: How to apply the Apache License to your work.
    +
    +   To apply the Apache License to your work, attach the following
    +   boilerplate notice, with the fields enclosed by brackets "[]"
    +   replaced with your own identifying information. (Don't include
    +   the brackets!)  The text should be enclosed in the appropriate
    +   comment syntax for the file format. We also recommend that a
    +   file or class name and description of purpose be included on the
    +   same "printed page" as the copyright notice for easier
    +   identification within third-party archives.
    +
    +Copyright [yyyy] [name of copyright owner]
    +
    +Licensed under the Apache License, Version 2.0 (the "License");
    +you may not use this file except in compliance with the License.
    +You may obtain a copy of the License at
    +
    +    http://www.apache.org/licenses/LICENSE-2.0
     
     Unless required by applicable law or agreed to in writing, software
     distributed under the License is distributed on an "AS IS" BASIS,
    @@ -8596,11 +9261,13 @@ 

    Used by:

    Apache License 2.0

    Used by:

                                  Apache License
                             Version 2.0, January 2004
    @@ -8796,7 +9463,7 @@ 

    Used by:

    you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -8809,13 +9476,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                  Apache License
                             Version 2.0, January 2004
    @@ -8999,7 +9660,7 @@ 

    Used by:

    To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate + the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier @@ -9011,7 +9672,7 @@

    Used by:

    you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -9619,6 +10280,7 @@

    Apache License 2.0

    Used by:

    @@ -10013,6 +10675,7 @@

    Used by:

    Apache License 2.0

    Used by:

                                  Apache License
    @@ -10223,6 +10886,7 @@ 

    Used by:

                                  Apache License
    @@ -10879,6 +11543,7 @@ 

    Used by:

  • apollo-encoder
  • apollo-parser
  • apollo-smith
  • +
  • apollo-smith
../../LICENSE-APACHE
@@ -11529,6 +12194,10 @@

Used by:

Apache License 2.0

Used by:

    +
  • async-graphql-actix-web
  • +
  • async-graphql-derive
  • +
  • async-graphql-parser
  • +
  • async-graphql-value
  • deadpool-runtime
  • deno-proc-macro-rules
  • deno-proc-macro-rules-macros
  • @@ -11538,13 +12207,15 @@

    Used by:

  • graphql_client_codegen
  • graphql_query_derive
  • http-serde
  • +
  • ident_case
  • +
  • language-tags
  • libssh2-sys
  • linkme-impl
  • md5
  • num-cmp
  • +
  • prost
  • rhai_codegen
  • siphasher
  • -
  • sptr
  • system-configuration
  • system-configuration-sys
  • thrift
  • @@ -11626,6 +12297,27 @@

    Used by:

    http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +
  • +

    Apache License 2.0

    +

    Used by:

    + +
    Copyright 2021 Oliver Giersch
    +
    +Licensed under the Apache License, Version 2.0 (the "License");
    +you may not use this file except in compliance with the License.
    +You may obtain a copy of the License at
    +
    +    http://www.apache.org/licenses/LICENSE-2.0
    +
     Unless required by applicable law or agreed to in writing, software
     distributed under the License is distributed on an "AS IS" BASIS,
     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    @@ -11777,6 +12469,38 @@ 

    Used by:

    BSD 2-Clause "Simplified" License

    Used by:

    +
    Copyright (c) 2015, Nick Fitzgerald
    +All rights reserved.
    +
    +Redistribution and use in source and binary forms, with or without modification,
    +are permitted provided that the following conditions are met:
    +
    +1. Redistributions of source code must retain the above copyright notice, this
    +   list of conditions and the following disclaimer.
    +
    +2. Redistributions in binary form must reproduce the above copyright notice,
    +   this list of conditions and the following disclaimer in the documentation
    +   and/or other materials provided with the distribution.
    +
    +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
    +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
    +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
    +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
    +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
    +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
    +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
    +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
    +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
    +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    +
    +
  • +
  • +

    BSD 2-Clause "Simplified" License

    +

    Used by:

    +
    Copyright (c) <year> <owner> 
    @@ -11833,6 +12557,8 @@ 

    Used by:

    Copyright (c) 2016 Dropbox, Inc.
    @@ -12296,9 +13022,119 @@ 

    Elastic License 2.0

    Used by:

    +
    Copyright 2021 Apollo Graph, Inc.
    +
    +Elastic License 2.0
    +
    +## Acceptance
    +
    +By using the software, you agree to all of the terms and conditions below.
    +
    +## Copyright License
    +
    +The licensor grants you a non-exclusive, royalty-free, worldwide,
    +non-sublicensable, non-transferable license to use, copy, distribute, make
    +available, and prepare derivative works of the software, in each case subject to
    +the limitations and conditions below.
    +
    +## Limitations
    +
    +You may not provide the software to third parties as a hosted or managed
    +service, where the service provides users with access to any substantial set of
    +the features or functionality of the software.
    +
    +You may not move, change, disable, or circumvent the license key functionality
    +in the software, and you may not remove or obscure any functionality in the
    +software that is protected by the license key.
    +
    +You may not alter, remove, or obscure any licensing, copyright, or other notices
    +of the licensor in the software. Any use of the licensor’s trademarks is subject
    +to applicable law.
    +
    +## Patents
    +
    +The licensor grants you a license, under any patent claims the licensor can
    +license, or becomes able to license, to make, have made, use, sell, offer for
    +sale, import and have imported the software, in each case subject to the
    +limitations and conditions in this license. This license does not cover any
    +patent claims that you cause to be infringed by modifications or additions to
    +the software. If you or your company make any written claim that the software
    +infringes or contributes to infringement of any patent, your patent license for
    +the software granted under these terms ends immediately. If your company makes
    +such a claim, your patent license ends immediately for work on behalf of your
    +company.
    +
    +## Notices
    +
    +You must ensure that anyone who gets a copy of any part of the software from you
    +also gets a copy of these terms.
    +
    +If you modify the software, you must include in any modified copies of the
    +software prominent notices stating that you have modified the software.
    +
    +## No Other Rights
    +
    +These terms do not imply any licenses other than those expressly granted in
    +these terms.
    +
    +## Termination
    +
    +If you use the software in violation of these terms, such use is not licensed,
    +and your licenses will automatically terminate. If the licensor provides you
    +with a notice of your violation, and you cease all violation of this license no
    +later than 30 days after you receive that notice, your licenses will be
    +reinstated retroactively. However, if you violate these terms after such
    +reinstatement, any additional violation of these terms will cause your licenses
    +to terminate automatically and permanently.
    +
    +## No Liability
    +
    +*As far as the law allows, the software comes as is, without any warranty or
    +condition, and the licensor will not be liable to you for any damages arising
    +out of these terms or the use or nature of the software, under any kind of
    +legal claim.*
    +
    +## Definitions
    +
    +The **licensor** is the entity offering these terms, and the **software** is the
    +software the licensor makes available under these terms, including any portion
    +of it.
    +
    +**you** refers to the individual or entity agreeing to these terms.
    +
    +**your company** is any legal entity, sole proprietorship, or other kind of
    +organization that you work for, plus all organizations that have control over,
    +are under the control of, or are under common control with that
    +organization. **control** means ownership of substantially all the assets of an
    +entity, or the power to direct its management and policies by vote, contract, or
    +otherwise. Control can be direct or indirect.
    +
    +**your licenses** are all the licenses granted to you for the software under
    +these terms.
    +
    +**use** means anything you do with the software requiring one of your licenses.
    +
    +**trademark** means trademarks, service marks, and similar rights.
    +
    +--------------------------------------------------------------------------------
    +
    +
  • +
  • +

    Elastic License 2.0

    +

    Used by:

    +
    Copyright 2021 Apollo Graph, Inc.
     
    +Source code in this repository is covered by (i) the Elastic License 2.0 or (ii) an MIT compatible license, in each case, as designated by a licensing file in a subdirectory or file header. The default throughout the repository is a license under the Elastic License 2.0, unless a file header or a licensing file in a subdirectory specifies another license.
    +
    +--------------------------------------------------------------------------------
    +
     Elastic License 2.0
     
     ## Acceptance
    @@ -12389,268 +13225,574 @@ 

    Used by:

    **use** means anything you do with the software requiring one of your licenses. -**trademark** means trademarks, service marks, and similar rights. +**trademark** means trademarks, service marks, and similar rights. + +--------------------------------------------------------------------------------
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
       Copyright 2015-2016 Brian Smith.
    +
    +   Permission to use, copy, modify, and/or distribute this software for any
    +   purpose with or without fee is hereby granted, provided that the above
    +   copyright notice and this permission notice appear in all copies.
    +
    +   THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
    +   WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    +   MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
    +   SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    +   WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
    +   OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
    +   CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
    /* Copyright (c) 2015, Google Inc.
    + *
    + * Permission to use, copy, modify, and/or distribute this software for any
    + * purpose with or without fee is hereby granted, provided that the above
    + * copyright notice and this permission notice appear in all copies.
    + *
    + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
    + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
    + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
    + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
    + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
    +
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
    // Copyright 2015-2016 Brian Smith.
    +//
    +// Permission to use, copy, modify, and/or distribute this software for any
    +// purpose with or without fee is hereby granted, provided that the above
    +// copyright notice and this permission notice appear in all copies.
    +//
    +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
    +// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    +// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR
    +// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    +// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
    +// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
    +// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    +
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
    Copyright (c) 2017 Adam Wick
    +
    +Permission to use, copy, modify, and/or distribute this software for any purpose
    +with or without fee is hereby granted, provided that the above copyright notice
    +and this permission notice appear in all copies.
     
    ---------------------------------------------------------------------------------
    +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    +THIS SOFTWARE.
     
  • -

    Elastic License 2.0

    +

    ISC License

    Used by:

    -
    Copyright 2021 Apollo Graph, Inc.
    -
    -Source code in this repository is covered by (i) the Elastic License 2.0 or (ii) an MIT compatible license, in each case, as designated by a licensing file in a subdirectory or file header. The default throughout the repository is a license under the Elastic License 2.0, unless a file header or a licensing file in a subdirectory specifies another license.
    +                
    Copyright (c) Hanno Braun and contributors
     
    ---------------------------------------------------------------------------------
    +Permission to use, copy, modify, and/or distribute this software for any purpose
    +with or without fee is hereby granted, provided that the above copyright notice
    +and this permission notice appear in all copies.
     
    -Elastic License 2.0
    +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    +THIS SOFTWARE.
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
    Copyright (c) Hanno Braun and contributors
     
    -## Acceptance
    +Permission to use, copy, modify, and/or distribute this software for any purpose
    +with or without fee is hereby granted, provided that the above copyright notice
    +and this permission notice appear in all copies.
     
    -By using the software, you agree to all of the terms and conditions below.
    +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    +THIS SOFTWARE.
    +
    +
  • +
  • +

    ISC License

    +

    Used by:

    + +
    ISC License:
     
    -## Copyright License
    +Copyright (c) 2004-2010 by Internet Systems Consortium, Inc. ("ISC")
    +Copyright (c) 1995-2003 by Internet Software Consortium
     
    -The licensor grants you a non-exclusive, royalty-free, worldwide,
    -non-sublicensable, non-transferable license to use, copy, distribute, make
    -available, and prepare derivative works of the software, in each case subject to
    -the limitations and conditions below.
    +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
     
    -## Limitations
    +THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    // Copyright (c) 2019 Nuclear Furnace
    +//
    +// Permission is hereby granted, free of charge, to any person obtaining a copy
    +// of this software and associated documentation files (the "Software"), to deal
    +// in the Software without restriction, including without limitation the rights
    +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +// copies of the Software, and to permit persons to whom the Software is
    +// furnished to do so, subject to the following conditions:
    +//
    +// The above copyright notice and this permission notice shall be included in all
    +// copies or substantial portions of the Software.
    +//
    +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +// SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014 Alex Crichton
     
    -You may not provide the software to third parties as a hosted or managed
    -service, where the service provides users with access to any substantial set of
    -the features or functionality of the software.
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -You may not move, change, disable, or circumvent the license key functionality
    -in the software, and you may not remove or obscure any functionality in the
    -software that is protected by the license key.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -You may not alter, remove, or obscure any licensing, copyright, or other notices
    -of the licensor in the software. Any use of the licensor’s trademarks is subject
    -to applicable law.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014 Carl Lerche and other MIO contributors
     
    -## Patents
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -The licensor grants you a license, under any patent claims the licensor can
    -license, or becomes able to license, to make, have made, use, sell, offer for
    -sale, import and have imported the software, in each case subject to the
    -limitations and conditions in this license. This license does not cover any
    -patent claims that you cause to be infringed by modifications or additions to
    -the software. If you or your company make any written claim that the software
    -infringes or contributes to infringement of any patent, your patent license for
    -the software granted under these terms ends immediately. If your company makes
    -such a claim, your patent license ends immediately for work on behalf of your
    -company.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -## Notices
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014-2019 Geoffroy Couprie
     
    -You must ensure that anyone who gets a copy of any part of the software from you
    -also gets a copy of these terms.
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -If you modify the software, you must include in any modified copies of the
    -software prominent notices stating that you have modified the software.
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
     
    -## No Other Rights
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014-2019 Sean McArthur
     
    -These terms do not imply any licenses other than those expressly granted in
    -these terms.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -## Termination
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -If you use the software in violation of these terms, such use is not licensed,
    -and your licenses will automatically terminate. If the licensor provides you
    -with a notice of your violation, and you cease all violation of this license no
    -later than 30 days after you receive that notice, your licenses will be
    -reinstated retroactively. However, if you violate these terms after such
    -reinstatement, any additional violation of these terms will cause your licenses
    -to terminate automatically and permanently.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
     
    -## No Liability
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014-2020 Optimal Computing (NZ) Ltd
     
    -*As far as the law allows, the software comes as is, without any warranty or
    -condition, and the licensor will not be liable to you for any damages arising
    -out of these terms or the use or nature of the software, under any kind of
    -legal claim.*
    +Permission is hereby granted, free of charge, to any person obtaining a copy of
    +this software and associated documentation files (the "Software"), to deal in
    +the Software without restriction, including without limitation the rights to
    +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
    +of the Software, and to permit persons to whom the Software is furnished to do
    +so, subject to the following conditions:
     
    -## Definitions
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
     
    -The **licensor** is the entity offering these terms, and the **software** is the
    -software the licensor makes available under these terms, including any portion
    -of it.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
    +IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014-2021 Sean McArthur
     
    -**you** refers to the individual or entity agreeing to these terms.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -**your company** is any legal entity, sole proprietorship, or other kind of
    -organization that you work for, plus all organizations that have control over,
    -are under the control of, or are under common control with that
    -organization. **control** means ownership of substantially all the assets of an
    -entity, or the power to direct its management and policies by vote, contract, or
    -otherwise. Control can be direct or indirect.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -**your licenses** are all the licenses granted to you for the software under
    -these terms.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2015 Igor Shaula
     
    -**use** means anything you do with the software requiring one of your licenses.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -**trademark** means trademarks, service marks, and similar rights.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    ---------------------------------------------------------------------------------
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +
  • -

    ISC License

    +

    MIT License

    Used by:

    -
       Copyright 2015-2016 Brian Smith.
    +                
    Copyright (c) 2015 Jan-Erik Rediger, Hendrik Sollich
     
    -   Permission to use, copy, modify, and/or distribute this software for any
    -   purpose with or without fee is hereby granted, provided that the above
    -   copyright notice and this permission notice appear in all copies.
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -   THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
    -   WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    -   MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
    -   SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    -   WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
    -   OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
    -   CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    /* Copyright (c) 2015, Google Inc.
    - *
    - * Permission to use, copy, modify, and/or distribute this software for any
    - * purpose with or without fee is hereby granted, provided that the above
    - * copyright notice and this permission notice appear in all copies.
    - *
    - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
    - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
    - * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
    - * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
    - * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
    +                
    Copyright (c) 2015 Jonathan Reem
    +
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
    +
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    // Copyright 2015-2016 Brian Smith.
    -//
    -// Permission to use, copy, modify, and/or distribute this software for any
    -// purpose with or without fee is hereby granted, provided that the above
    -// copyright notice and this permission notice appear in all copies.
    -//
    -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
    -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
    -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR
    -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
    -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
    -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    +                
    Copyright (c) 2015 fangyuanziti
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
     
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    Copyright (c) 2017 Adam Wick
    +                
    Copyright (c) 2015 steffengy
     
    -Permission to use, copy, modify, and/or distribute this software for any purpose
    -with or without fee is hereby granted, provided that the above copyright notice
    -and this permission notice appear in all copies.
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    -THIS SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    Copyright (c) Hanno Braun and contributors
    +                
    Copyright (c) 2015-2016 the fiat-crypto authors (see
    +https://github.com/mit-plv/fiat-crypto/blob/master/AUTHORS).
     
    -Permission to use, copy, modify, and/or distribute this software for any purpose
    -with or without fee is hereby granted, provided that the above copyright notice
    -and this permission notice appear in all copies.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    -THIS SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    Copyright (c) Hanno Braun and contributors
    +                
    Copyright (c) 2015-2019 Doug Tangren
     
    -Permission to use, copy, modify, and/or distribute this software for any purpose
    -with or without fee is hereby granted, provided that the above copyright notice
    -and this permission notice appear in all copies.
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
    -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
    -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
    -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
    -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    -THIS SOFTWARE.
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • -

    ISC License

    +

    MIT License

    Used by:

    -
    ISC License:
    +                
    Copyright (c) 2015-2020 Doug Tangren
     
    -Copyright (c) 2004-2010 by Internet Systems Consortium, Inc. ("ISC")
    -Copyright (c) 1995-2003 by Internet Software Consortium
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
    -
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2014 Alex Crichton
    +                
    Copyright (c) 2016 Anatoly Ikorsky
     
     Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -12675,15 +13817,45 @@ 

    Used by:

    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    -
    Copyright (c) 2014 Carl Lerche and other MIO contributors
    +                
    Copyright (c) 2016 William Orr <will@worrbase.com>
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2017 Daniel Abramov
    +Copyright (c) 2017 Alexey Galakhov
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -12708,9 +13880,9 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2014-2019 Geoffroy Couprie
    +                
    Copyright (c) 2017 Doug Tangren
     
     Permission is hereby granted, free of charge, to any person obtaining
     a copy of this software and associated documentation files (the
    @@ -12736,10 +13908,10 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2014-2019 Sean McArthur
    +                
    Copyright (c) 2017 Gilad Naaman
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -12748,34 +13920,6 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    Copyright (c) 2014-2020 Optimal Computing (NZ) Ltd
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy of
    -this software and associated documentation files (the "Software"), to deal in
    -the Software without restriction, including without limitation the rights to
    -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
    -of the Software, and to permit persons to whom the Software is furnished to do
    -so, subject to the following conditions:
    -
     The above copyright notice and this permission notice shall be included in all
     copies or substantial portions of the Software.
     
    @@ -12783,72 +13927,81 @@ 

    Used by:

    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2014-2021 Sean McArthur
    +                
    Copyright (c) 2017 Redox OS Developers
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +MIT License
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2015 Igor Shaula
    +                
    Copyright (c) 2017 h2 authors
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2015 Jan-Erik Rediger, Hendrik Sollich
    +                
    Copyright (c) 2017-2019 Geoffroy Couprie
     
     Permission is hereby granted, free of charge, to any person obtaining
     a copy of this software and associated documentation files (the
    @@ -12874,10 +14027,9 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2015 Jonathan Reem
    +                
    Copyright (c) 2018 Carl Lerche
     
     Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -12908,9 +14060,38 @@ 

    Used by:

    MIT License

    Used by:

    +
    Copyright (c) 2018 Sean McArthur
    +Copyright (c) 2016 Alex Crichton
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + -
    Copyright (c) 2015 fangyuanziti
    +                
    Copyright (c) 2018-2019 Sean McArthur
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -12929,86 +14110,109 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    -
    Copyright (c) 2015 steffengy
    +                
    Copyright (c) 2019 Axum Contributors
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2015-2016 the fiat-crypto authors (see
    -https://github.com/mit-plv/fiat-crypto/blob/master/AUTHORS).
    +                
    Copyright (c) 2019 Carl Lerche
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2015-2020 Doug Tangren
    +                
    Copyright (c) 2019 Carl Lerche
     
    -Permission is hereby granted, free of charge, to any person obtaining
    -a copy of this software and associated documentation files (the
    -"Software"), to deal in the Software without restriction, including
    -without limitation the rights to use, copy, modify, merge, publish,
    -distribute, sublicense, and/or sell copies of the Software, and to
    -permit persons to whom the Software is furnished to do so, subject to
    -the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be
    -included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    Copyright (c) 2016 Anatoly Ikorsky
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
    +
    +Copyright (c) 2018 David Tolnay
     
     Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -13033,17 +14237,30 @@ 

    Used by:

    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2019 David Pedersen
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
     
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2016 William Orr <will@worrbase.com>
    +                
    Copyright (c) 2019 Eliza Weisman
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13052,26 +14269,25 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2017 Daniel Abramov
    -Copyright (c) 2017 Alexey Galakhov
    +                
    Copyright (c) 2019 Eliza Weisman
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13080,53 +14296,59 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2017 Doug Tangren
    +                
    Copyright (c) 2019 Hyper Contributors
     
    -Permission is hereby granted, free of charge, to any person obtaining
    -a copy of this software and associated documentation files (the
    -"Software"), to deal in the Software without restriction, including
    -without limitation the rights to use, copy, modify, merge, publish,
    -distribute, sublicense, and/or sell copies of the Software, and to
    -permit persons to whom the Software is furnished to do so, subject to
    -the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be
    -included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2017 Gilad Naaman
    +                
    Copyright (c) 2019 Stepan Koltsov
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13138,52 +14360,95 @@ 

    Used by:

    The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2019 Tokio Contributors
    +
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
    +
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
    +
  • MIT License

    Used by:

    -
    Copyright (c) 2017 Redox OS Developers
    -
    -MIT License
    +                
    Copyright (c) 2019 Tower Contributors
     
    -Permission is hereby granted, free of charge, to any person obtaining
    -a copy of this software and associated documentation files (the
    -"Software"), to deal in the Software without restriction, including
    -without limitation the rights to use, copy, modify, merge, publish,
    -distribute, sublicense, and/or sell copies of the Software, and to
    -permit persons to whom the Software is furnished to do so, subject to
    -the following conditions:
    +Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -The above copyright notice and this permission notice shall be
    -included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2017 h2 authors
    +                
    Copyright (c) 2019-2021 Tower Contributors
     
     Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -13214,37 +14479,38 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2017-2019 Geoffroy Couprie
    +                
    Copyright (c) 2020 Lucio Franco
     
    -Permission is hereby granted, free of charge, to any person obtaining
    -a copy of this software and associated documentation files (the
    -"Software"), to deal in the Software without restriction, including
    -without limitation the rights to use, copy, modify, merge, publish,
    -distribute, sublicense, and/or sell copies of the Software, and to
    -permit persons to whom the Software is furnished to do so, subject to
    -the following conditions:
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -The above copyright notice and this permission notice shall be
    -included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2018 Carl Lerche
    +                
    Copyright (c) 2023 Tokio Contributors
     
     Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -13275,10 +14541,57 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2018 Sean McArthur
    -Copyright (c) 2016 Alex Crichton
    +                
    Copyright 2021 Alec Embke
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright 2021 Axum Contributors
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright 2023 Alec Embke
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License
    +
    +Copyright (c) 2016 Jerome Froelich
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13287,26 +14600,27 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2018-2019 Sean McArthur
    +                
    MIT License
    +
    +Copyright (c) 2016 fengcen
    +Copyright (c) 2019 svartalf
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13315,167 +14629,175 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Axum Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2017 Evgeny Safronov
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Carl Lerche
    -
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +                
    MIT License
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Copyright (c) 2017 Ted Driggs
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Carl Lerche
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2018 Canop
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
     
    -Copyright (c) 2018 David Tolnay
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2019 Acrimon
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 David Pedersen
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +Copyright (c) 2019 Bojan
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Eliza Weisman
    +                
    MIT License
    +
    +Copyright (c) 2019 Graham Esau
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13484,25 +14806,27 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Eliza Weisman
    +                
    MIT License
    +
    +Copyright (c) 2019 Hannes Karppila
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13520,49 +14844,50 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -
    +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Hyper Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2019 Peter Glotfelty
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Stepan Koltsov
    +                
    MIT License
    +
    +Copyright (c) 2019 Yoshua Wuyts
    +Copyright (c) Tokio Contributors
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13574,129 +14899,111 @@ 

    Used by:

    The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE -OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Tokio Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2019 brunoczim
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019 Tower Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2020 Rousan Ali
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2019-2021 Tower Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2021 MarcusGrass
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2020 Lucio Franco
    +                
    MIT License
    +
    +Copyright (c) 2021 the Deno authors
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13705,88 +15012,56 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright (c) 2023 Tokio Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Copyright (c) 2021-2022 Joshua Barretto 
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright (c) 2023 Tokio Contributors
    -
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    -
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    -
    -The MIT License (MIT)
    +                
    MIT License
     
    -Copyright (c) 2019 Yoshua Wuyts
    +Copyright (c) 2022 Ibraheem Ahmed
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13811,40 +15086,86 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright 2021 Alec Embke
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +Copyright (c) 2022 Nugine
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
     
    -
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    Used by:

    -
    Copyright 2021 Axum Contributors
    +                
    MIT License
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +Copyright (c) 2022 picoHz
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    Copyright 2023 Alec Embke
    +                
    MIT License
    +
    +Copyright (c) <year> <copyright holders>
     
     Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
     
    @@ -13857,11 +15178,11 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2016 Jerome Froelich
    +Copyright (c) Tokio Contributors
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13879,18 +15200,18 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. +
  • MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2016 fengcen
    -Copyright (c) 2019 svartalf
    +Copyright (c) [2019] [Changseok Han]
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13908,18 +15229,17 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -
    +SOFTWARE.
  • MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2017 Evgeny Safronov
    +Copyright (c) [2021] [Boris Zhguchev]
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13937,47 +15257,121 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License
    +
    +Copyright (c) 2019 Daniel Augusto Rizzi Salvadori
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License (MIT)
    +
    +Copyright (c) 2017 Felix Köpge
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    MIT License
    +                
    Permission is hereby granted, free of charge, to any
    +person obtaining a copy of this software and associated
    +documentation files (the "Software"), to deal in the
    +Software without restriction, including without
    +limitation the rights to use, copy, modify, merge,
    +publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software
    +is furnished to do so, subject to the following
    +conditions:
     
    -Copyright (c) 2018 Canop
    +The above copyright notice and this permission notice
    +shall be included in all copies or substantial portions
    +of the Software.
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    +DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License
     
    -Copyright (c) 2019 Acrimon
    +Copyright 2015 Google Inc. All rights reserved.
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -13986,27 +15380,27 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License
     
    -Copyright (c) 2019 Bojan
    +Copyright 2015 The Fancy Regex Authors.
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14015,28 +15409,29 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2019 Graham Esau
    +Copyright (c) 2014 Benjamin Sago
    +Copyright (c) 2021-2022 The Nushell Project Developers
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14061,11 +15456,12 @@ 

    Used by:

    MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2019 Hannes Karppila
    +Copyright (c) 2014 Mathijs van de Nes
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14089,14 +15485,11 @@ 

    Used by:

    MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2019 Peter Glotfelty
    +Copyright (c) 2014 Mathijs van de Nes
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14121,11 +15514,14 @@ 

    Used by:

    MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2019 brunoczim
    +Copyright (c) 2014 Ning Sun
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14144,17 +15540,24 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2020 Rousan Ali
    +Copyright (c) 2015 Andrew Gallant
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14163,27 +15566,27 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2021 MarcusGrass
    +Copyright (c) 2015 Austin Bonander
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14202,17 +15605,21 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2021 the Deno authors
    +Copyright (c) 2015 Danny Guo
    +Copyright (c) 2016 Titus Wormer <tituswormer@gmail.com>
    +Copyright (c) 2018 Akash Kurdekar
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14237,11 +15644,11 @@ 

    Used by:

    MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2021-2022 Joshua Barretto 
    +Copyright (c) 2015 Gerd Zellweger
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14250,27 +15657,26 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2022 Ibraheem Ahmed
    +Copyright (c) 2015 Siyu Wang
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14289,17 +15695,18 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2022 Nugine
    +Copyright (c) 2015 Vincent Prouillet
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14317,17 +15724,19 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. +
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) 2022 picoHz
    +Copyright (c) 2015-2020 Julien Cretin
    +Copyright (c) 2017-2020 Google Inc.
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14352,45 +15761,42 @@ 

    Used by:

    MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) <year> <copyright holders>
    +Copyright (c) 2016 Google Inc. (lewinb@google.com) -- though not an official
    +Google product or in any way related!
    +Copyright (c) 2018-2020 Lewin Bormann (lbo@spheniscida.de)
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to
    +deal in the Software without restriction, including without limitation the
    +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
    +sell copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
    +IN THE SOFTWARE.
     
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) [2019] [Changseok Han]
    +Copyright (c) 2016 Jelte Fennema
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14408,17 +15814,18 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. +
  • MIT License

    Used by:

    -
    MIT License
    +                
    The MIT License (MIT)
     
    -Copyright (c) [2021] [Boris Zhguchev]
    +Copyright (c) 2016 Jonathan Creekmore
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14436,120 +15843,19 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    MIT License
    -
    -Copyright (c) 2019 Daniel Augusto Rizzi Salvadori
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    -
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    MIT License (MIT)
    -
    -Copyright (c) 2017 Felix Köpge
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    -
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    -
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    Permission is hereby granted, free of charge, to any person obtaining
    -a copy of this software and associated documentation files (the
    -"Software"), to deal in the Software without restriction, including
    -without limitation the rights to use, copy, modify, merge, publish,
    -distribute, sublicense, and/or sell copies of the Software, and to
    -permit persons to whom the Software is furnished to do so, subject to
    -the following conditions:
    -
    -The above copyright notice and this permission notice shall be
    -included in all copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +SOFTWARE.
     
  • MIT License

    Used by:

    -
    The MIT License
    +                
    The MIT License (MIT)
     
    -Copyright 2015 The Fancy Regex Authors.
    +Copyright (c) 2017 Andrew Gallant
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14574,13 +15880,13 @@ 

    Used by:

    MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2014 Benjamin Sago
    -Copyright (c) 2021-2022 The Nushell Project Developers
    +Copyright (c) 2017 Armin Ronacher <armin.ronacher@active-4.com>
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14599,18 +15905,18 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2014 Mathijs van de Nes
    +Copyright (c) 2017 Jose Narvaez
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14628,17 +15934,21 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. + +
  • MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2014 Mathijs van de Nes
    +Copyright (c) 2018 pyros2097
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14657,18 +15967,18 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
  • MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2014 Ning Sun
    +Copyright (c) 2020 Benjamin Coenen
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14677,17 +15987,89 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    The MIT License (MIT)
    +Copyright (c) 2016 Alexandre Bury
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
     
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    The MIT License (MIT)
    +
    +Copyright (c) 2015 BartƂomiej KamiƄski
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    The MIT License (MIT)
    +
    +Copyright (c) 2015 Markus Westerlind
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
     
  • @@ -14699,12 +16081,24 @@

    Used by:

  • globset
  • memchr
  • regex-automata
  • +
  • same-file
  • termcolor
  • walkdir
  • +
  • winapi-util
-
The MIT License (MIT)
+                
This project is dual-licensed under the Unlicense and MIT licenses.
 
-Copyright (c) 2015 Andrew Gallant
+You may use this code under the terms of either license.
+
+ +
  • +

    MIT License

    +

    Used by:

    + +
    © 2016 Bertram Truong
    +© 2021 Kornel LesiƄski
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -14726,579 +16120,433 @@ 

    Used by:

  • -

    MIT License

    +

    Mozilla Public License 2.0

    Used by:

    -
    The MIT License (MIT)
    +                
    Mozilla Public License Version 2.0
    +==================================
     
    -Copyright (c) 2015 Austin Bonander
    +### 1. Definitions
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +**1.1. “Contributor”**
    +    means each individual or legal entity that creates, contributes to
    +    the creation of, or owns Covered Software.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +**1.2. “Contributor Version”**
    +    means the combination of the Contributions of others (if any) used
    +    by a Contributor and that particular Contributor's Contribution.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +**1.3. “Contribution”**
    +    means Covered Software of a particular Contributor.
     
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +**1.4. “Covered Software”**
    +    means Source Code Form to which the initial Contributor has attached
    +    the notice in Exhibit A, the Executable Form of such Source Code
    +    Form, and Modifications of such Source Code Form, in each case
    +    including portions thereof.
     
    -Copyright (c) 2015 Danny Guo
    -Copyright (c) 2016 Titus Wormer <tituswormer@gmail.com>
    -Copyright (c) 2018 Akash Kurdekar
    +**1.5. “Incompatible With Secondary Licenses”**
    +    means
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +* **(a)** that the initial Contributor has attached the notice described
    +    in Exhibit B to the Covered Software; or
    +* **(b)** that the Covered Software was made available under the terms of
    +    version 1.1 or earlier of the License, but not also under the
    +    terms of a Secondary License.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +**1.6. “Executable Form”**
    +    means any form of the work other than Source Code Form.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +**1.7. “Larger Work”**
    +    means a work that combines Covered Software with other material, in
    +    a separate file or files, that is not Covered Software.
     
    -Copyright (c) 2015 Siyu Wang
    +**1.8. “License”**
    +    means this document.
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +**1.9. “Licensable”**
    +    means having the right to grant, to the maximum extent possible,
    +    whether at the time of the initial grant or subsequently, any and
    +    all of the rights conveyed by this License.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +**1.10. “Modifications”**
    +    means any of the following:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +* **(a)** any file in Source Code Form that results from an addition to,
    +    deletion from, or modification of the contents of Covered
    +    Software; or
    +* **(b)** any new file in Source Code Form that contains any Covered
    +    Software.
     
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +**1.11. “Patent Claims” of a Contributor**
    +    means any patent claim(s), including without limitation, method,
    +    process, and apparatus claims, in any patent Licensable by such
    +    Contributor that would be infringed, but for the grant of the
    +    License, by the making, using, selling, offering for sale, having
    +    made, import, or transfer of either its Contributions or its
    +    Contributor Version.
    +
    +**1.12. “Secondary License”**
    +    means either the GNU General Public License, Version 2.0, the GNU
    +    Lesser General Public License, Version 2.1, the GNU Affero General
    +    Public License, Version 3.0, or any later versions of those
    +    licenses.
    +
    +**1.13. “Source Code Form”**
    +    means the form of the work preferred for making modifications.
    +
    +**1.14. “You” (or “Your”)**
    +    means an individual or a legal entity exercising rights under this
    +    License. For legal entities, “You” includes any entity that
    +    controls, is controlled by, or is under common control with You. For
    +    purposes of this definition, “control” means **(a)** the power, direct
    +    or indirect, to cause the direction or management of such entity,
    +    whether by contract or otherwise, or **(b)** ownership of more than
    +    fifty percent (50%) of the outstanding shares or beneficial
    +    ownership of such entity.
    +
    +
    +### 2. License Grants and Conditions
    +
    +#### 2.1. Grants
    +
    +Each Contributor hereby grants You a world-wide, royalty-free,
    +non-exclusive license:
    +
    +* **(a)** under intellectual property rights (other than patent or trademark)
    +    Licensable by such Contributor to use, reproduce, make available,
    +    modify, display, perform, distribute, and otherwise exploit its
    +    Contributions, either on an unmodified basis, with Modifications, or
    +    as part of a Larger Work; and
    +* **(b)** under Patent Claims of such Contributor to make, use, sell, offer
    +    for sale, have made, import, and otherwise transfer either its
    +    Contributions or its Contributor Version.
    +
    +#### 2.2. Effective Date
    +
    +The licenses granted in Section 2.1 with respect to any Contribution
    +become effective for each Contribution on the date the Contributor first
    +distributes such Contribution.
    +
    +#### 2.3. Limitations on Grant Scope
    +
    +The licenses granted in this Section 2 are the only rights granted under
    +this License. No additional rights or licenses will be implied from the
    +distribution or licensing of Covered Software under this License.
    +Notwithstanding Section 2.1(b) above, no patent license is granted by a
    +Contributor:
    +
    +* **(a)** for any code that a Contributor has removed from Covered Software;
    +    or
    +* **(b)** for infringements caused by: **(i)** Your and any other third party's
    +    modifications of Covered Software, or **(ii)** the combination of its
    +    Contributions with other software (except as part of its Contributor
    +    Version); or
    +* **(c)** under Patent Claims infringed by Covered Software in the absence of
    +    its Contributions.
    +
    +This License does not grant any rights in the trademarks, service marks,
    +or logos of any Contributor (except as may be necessary to comply with
    +the notice requirements in Section 3.4).
    +
    +#### 2.4. Subsequent Licenses
    +
    +No Contributor makes additional grants as a result of Your choice to
    +distribute the Covered Software under a subsequent version of this
    +License (see Section 10.2) or under the terms of a Secondary License (if
    +permitted under the terms of Section 3.3).
    +
    +#### 2.5. Representation
    +
    +Each Contributor represents that the Contributor believes its
    +Contributions are its original creation(s) or it has sufficient rights
    +to grant the rights to its Contributions conveyed by this License.
    +
    +#### 2.6. Fair Use
    +
    +This License is not intended to limit any rights You have under
    +applicable copyright doctrines of fair use, fair dealing, or other
    +equivalents.
     
    -Copyright (c) 2015 Vincent Prouillet
    +#### 2.7. Conditions
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
    +in Section 2.1.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +### 3. Responsibilities
     
    -Copyright (c) 2015-2020 Julien Cretin
    -Copyright (c) 2017-2020 Google Inc.
    +#### 3.1. Distribution of Source Form
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +All distribution of Covered Software in Source Code Form, including any
    +Modifications that You create or to which You contribute, must be under
    +the terms of this License. You must inform recipients that the Source
    +Code Form of the Covered Software is governed by the terms of this
    +License, and how they can obtain a copy of this License. You may not
    +attempt to alter or restrict the recipients' rights in the Source Code
    +Form.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +#### 3.2. Distribution of Executable Form
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +If You distribute Covered Software in Executable Form then:
     
    -Copyright (c) 2016 Google Inc. (lewinb@google.com) -- though not an official
    -Google product or in any way related!
    -Copyright (c) 2018-2020 Lewin Bormann (lbo@spheniscida.de)
    +* **(a)** such Covered Software must also be made available in Source Code
    +    Form, as described in Section 3.1, and You must inform recipients of
    +    the Executable Form how they can obtain a copy of such Source Code
    +    Form by reasonable means in a timely manner, at a charge no more
    +    than the cost of distribution to the recipient; and
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to
    -deal in the Software without restriction, including without limitation the
    -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
    -sell copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +* **(b)** You may distribute such Executable Form under the terms of this
    +    License, or sublicense it under different terms, provided that the
    +    license for the Executable Form does not attempt to limit or alter
    +    the recipients' rights in the Source Code Form under this License.
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +#### 3.3. Distribution of a Larger Work
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
    -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
    -IN THE SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +You may create and distribute a Larger Work under terms of Your choice,
    +provided that You also comply with the requirements of this License for
    +the Covered Software. If the Larger Work is a combination of Covered
    +Software with a work governed by one or more Secondary Licenses, and the
    +Covered Software is not Incompatible With Secondary Licenses, this
    +License permits You to additionally distribute such Covered Software
    +under the terms of such Secondary License(s), so that the recipient of
    +the Larger Work may, at their option, further distribute the Covered
    +Software under the terms of either this License or such Secondary
    +License(s).
     
    -Copyright (c) 2016 Jelte Fennema
    +#### 3.4. Notices
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +You may not remove or alter the substance of any license notices
    +(including copyright notices, patent notices, disclaimers of warranty,
    +or limitations of liability) contained within the Source Code Form of
    +the Covered Software, except that You may alter any license notices to
    +the extent required to remedy known factual inaccuracies.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +#### 3.5. Application of Additional Terms
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +You may choose to offer, and to charge a fee for, warranty, support,
    +indemnity or liability obligations to one or more recipients of Covered
    +Software. However, You may do so only on Your own behalf, and not on
    +behalf of any Contributor. You must make it absolutely clear that any
    +such warranty, support, indemnity, or liability obligation is offered by
    +You alone, and You hereby agree to indemnify every Contributor for any
    +liability incurred by such Contributor as a result of warranty, support,
    +indemnity or liability terms You offer. You may include additional
    +disclaimers of warranty and limitations of liability specific to any
    +jurisdiction.
     
    -Copyright (c) 2016 Jonathan Creekmore
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +### 4. Inability to Comply Due to Statute or Regulation
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +If it is impossible for You to comply with any of the terms of this
    +License with respect to some or all of the Covered Software due to
    +statute, judicial order, or regulation then You must: **(a)** comply with
    +the terms of this License to the maximum extent possible; and **(b)**
    +describe the limitations and the code they affect. Such description must
    +be placed in a text file included with all distributions of the Covered
    +Software under this License. Except to the extent prohibited by statute
    +or regulation, such description must be sufficiently detailed for a
    +recipient of ordinary skill to be able to understand it.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
     
    -Copyright (c) 2017 Andrew Gallant
    +### 5. Termination
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +**5.1.** The rights granted under this License will terminate automatically
    +if You fail to comply with any of its terms. However, if You become
    +compliant, then the rights granted under this License from a particular
    +Contributor are reinstated **(a)** provisionally, unless and until such
    +Contributor explicitly and finally terminates Your grants, and **(b)** on an
    +ongoing basis, if such Contributor fails to notify You of the
    +non-compliance by some reasonable means prior to 60 days after You have
    +come back into compliance. Moreover, Your grants from a particular
    +Contributor are reinstated on an ongoing basis if such Contributor
    +notifies You of the non-compliance by some reasonable means, this is the
    +first time You have received notice of non-compliance with this License
    +from such Contributor, and You become compliant prior to 30 days after
    +Your receipt of the notice.
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +**5.2.** If You initiate litigation against any entity by asserting a patent
    +infringement claim (excluding declaratory judgment actions,
    +counter-claims, and cross-claims) alleging that a Contributor Version
    +directly or indirectly infringes any patent, then the rights granted to
    +You by any and all Contributors for the Covered Software under Section
    +2.1 of this License shall terminate.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +**5.3.** In the event of termination under Sections 5.1 or 5.2 above, all
    +end user license agreements (excluding distributors and resellers) which
    +have been validly granted by You or Your distributors under this License
    +prior to termination shall survive termination.
     
    -Copyright (c) 2017 Armin Ronacher <armin.ronacher@active-4.com>
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +### 6. Disclaimer of Warranty
    +
    +> Covered Software is provided under this License on an “as is”
    +> basis, without warranty of any kind, either expressed, implied, or
    +> statutory, including, without limitation, warranties that the
    +> Covered Software is free of defects, merchantable, fit for a
    +> particular purpose or non-infringing. The entire risk as to the
    +> quality and performance of the Covered Software is with You.
    +> Should any Covered Software prove defective in any respect, You
    +> (not any Contributor) assume the cost of any necessary servicing,
    +> repair, or correction. This disclaimer of warranty constitutes an
    +> essential part of this License. No use of any Covered Software is
    +> authorized under this License except under this disclaimer.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +### 7. Limitation of Liability
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +> Under no circumstances and under no legal theory, whether tort
    +> (including negligence), contract, or otherwise, shall any
    +> Contributor, or anyone who distributes Covered Software as
    +> permitted above, be liable to You for any direct, indirect,
    +> special, incidental, or consequential damages of any character
    +> including, without limitation, damages for lost profits, loss of
    +> goodwill, work stoppage, computer failure or malfunction, or any
    +> and all other commercial damages or losses, even if such party
    +> shall have been informed of the possibility of such damages. This
    +> limitation of liability shall not apply to liability for death or
    +> personal injury resulting from such party's negligence to the
    +> extent applicable law prohibits such limitation. Some
    +> jurisdictions do not allow the exclusion or limitation of
    +> incidental or consequential damages, so this exclusion and
    +> limitation may not apply to You.
     
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
     
    -Copyright (c) 2017 Jose Narvaez
    +### 8. Litigation
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +Any litigation relating to this License may be brought only in the
    +courts of a jurisdiction where the defendant maintains its principal
    +place of business and such litigation shall be governed by laws of that
    +jurisdiction, without reference to its conflict-of-law provisions.
    +Nothing in this Section shall prevent a party's ability to bring
    +cross-claims or counter-claims.
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +### 9. Miscellaneous
     
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +This License represents the complete agreement concerning the subject
    +matter hereof. If any provision of this License is held to be
    +unenforceable, such provision shall be reformed only to the extent
    +necessary to make it enforceable. Any law or regulation which provides
    +that the language of a contract shall be construed against the drafter
    +shall not be used to construe this License against a Contributor.
     
    -Copyright (c) 2018 pyros2097
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +### 10. Versions of the License
     
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    +#### 10.1. New Versions
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +Mozilla Foundation is the license steward. Except as provided in Section
    +10.3, no one other than the license steward has the right to modify or
    +publish new versions of this License. Each version will be given a
    +distinguishing version number.
     
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    +#### 10.2. Effect of New Versions
     
    -Copyright (c) 2020 Benjamin Coenen
    +You may distribute the Covered Software under the terms of the version
    +of the License under which You originally received the Covered Software,
    +or under the terms of any subsequent version published by the license
    +steward.
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +#### 10.3. Modified Versions
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +If you create software not governed by this License, and you want to
    +create a new license for such software, you may create and use a
    +modified version of this License if you rename the license and remove
    +any references to the name of the license steward (except to note that
    +such modified license differs from this License).
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    -Copyright (c) 2016 Alexandre Bury
    +#### 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +If You choose to distribute Source Code Form that is Incompatible With
    +Secondary Licenses under the terms of this version of the License, the
    +notice described in Exhibit B of this License must be attached.
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +## Exhibit A - Source Code Form License Notice
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    -
    -Copyright (c) 2015 BartƂomiej KamiƄski
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    -
    -The above copyright notice and this permission notice shall be included in all
    -copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    -
    -Copyright (c) 2015 Markus Westerlind
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    -
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    -
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    This project is dual-licensed under the Unlicense and MIT licenses.
    +    This Source Code Form is subject to the terms of the Mozilla Public
    +    License, v. 2.0. If a copy of the MPL was not distributed with this
    +    file, You can obtain one at http://mozilla.org/MPL/2.0/.
     
    -You may use this code under the terms of either license.
    -
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    © 2016 Bertram Truong
    -© 2021 Kornel LesiƄski
    +If it is not possible or desirable to put the notice in a particular
    +file, then You may include the notice in a location (such as a LICENSE
    +file in a relevant directory) where a recipient would be likely to look
    +for such a notice.
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +You may add additional accurate notices of copyright ownership.
     
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    +## Exhibit B - “Incompatible With Secondary Licenses” Notice
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
    +    This Source Code Form is "Incompatible With Secondary Licenses", as
    +    defined by the Mozilla Public License, v. 2.0.
     
  • Mozilla Public License 2.0

    Used by:

    Mozilla Public License Version 2.0
     ==================================
     
    -### 1. Definitions
    +1. Definitions
    +--------------
     
    -**1.1. “Contributor”**
    +1.1. "Contributor"
         means each individual or legal entity that creates, contributes to
         the creation of, or owns Covered Software.
     
    -**1.2. “Contributor Version”**
    +1.2. "Contributor Version"
         means the combination of the Contributions of others (if any) used
         by a Contributor and that particular Contributor's Contribution.
     
    -**1.3. “Contribution”**
    +1.3. "Contribution"
         means Covered Software of a particular Contributor.
     
    -**1.4. “Covered Software”**
    +1.4. "Covered Software"
         means Source Code Form to which the initial Contributor has attached
         the notice in Exhibit A, the Executable Form of such Source Code
         Form, and Modifications of such Source Code Form, in each case
         including portions thereof.
     
    -**1.5. “Incompatible With Secondary Licenses”**
    +1.5. "Incompatible With Secondary Licenses"
         means
     
    -* **(a)** that the initial Contributor has attached the notice described
    -    in Exhibit B to the Covered Software; or
    -* **(b)** that the Covered Software was made available under the terms of
    -    version 1.1 or earlier of the License, but not also under the
    -    terms of a Secondary License.
    +    (a) that the initial Contributor has attached the notice described
    +        in Exhibit B to the Covered Software; or
     
    -**1.6. “Executable Form”**
    +    (b) that the Covered Software was made available under the terms of
    +        version 1.1 or earlier of the License, but not also under the
    +        terms of a Secondary License.
    +
    +1.6. "Executable Form"
         means any form of the work other than Source Code Form.
     
    -**1.7. “Larger Work”**
    -    means a work that combines Covered Software with other material, in
    +1.7. "Larger Work"
    +    means a work that combines Covered Software with other material, in 
         a separate file or files, that is not Covered Software.
     
    -**1.8. “License”**
    +1.8. "License"
         means this document.
     
    -**1.9. “Licensable”**
    +1.9. "Licensable"
         means having the right to grant, to the maximum extent possible,
         whether at the time of the initial grant or subsequently, any and
         all of the rights conveyed by this License.
     
    -**1.10. “Modifications”**
    +1.10. "Modifications"
         means any of the following:
     
    -* **(a)** any file in Source Code Form that results from an addition to,
    -    deletion from, or modification of the contents of Covered
    -    Software; or
    -* **(b)** any new file in Source Code Form that contains any Covered
    -    Software.
    +    (a) any file in Source Code Form that results from an addition to,
    +        deletion from, or modification of the contents of Covered
    +        Software; or
     
    -**1.11. “Patent Claims” of a Contributor**
    +    (b) any new file in Source Code Form that contains any Covered
    +        Software.
    +
    +1.11. "Patent Claims" of a Contributor
         means any patent claim(s), including without limitation, method,
         process, and apparatus claims, in any patent Licensable by such
         Contributor that would be infringed, but for the grant of the
    @@ -15306,49 +16554,50 @@ 

    Used by:

    made, import, or transfer of either its Contributions or its Contributor Version. -**1.12. “Secondary License”** +1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. -**1.13. “Source Code Form”** +1.13. "Source Code Form" means the form of the work preferred for making modifications. -**1.14. “You” (or “Your”)** +1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this - License. For legal entities, “You” includes any entity that + License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For - purposes of this definition, “control” means **(a)** the power, direct + purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or **(b)** ownership of more than + whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. +2. License Grants and Conditions +-------------------------------- -### 2. License Grants and Conditions - -#### 2.1. Grants +2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: -* **(a)** under intellectual property rights (other than patent or trademark) +(a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and -* **(b)** under Patent Claims of such Contributor to make, use, sell, offer + +(b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. -#### 2.2. Effective Date +2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. -#### 2.3. Limitations on Grant Scope +2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the @@ -15356,47 +16605,49 @@

    Used by:

    Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: -* **(a)** for any code that a Contributor has removed from Covered Software; +(a) for any code that a Contributor has removed from Covered Software; or -* **(b)** for infringements caused by: **(i)** Your and any other third party's - modifications of Covered Software, or **(ii)** the combination of its + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or -* **(c)** under Patent Claims infringed by Covered Software in the absence of + +(c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). -#### 2.4. Subsequent Licenses +2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). -#### 2.5. Representation +2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. -#### 2.6. Fair Use +2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. -#### 2.7. Conditions +2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. +3. Responsibilities +------------------- -### 3. Responsibilities - -#### 3.1. Distribution of Source Form +3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under @@ -15406,22 +16657,22 @@

    Used by:

    attempt to alter or restrict the recipients' rights in the Source Code Form. -#### 3.2. Distribution of Executable Form +3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: -* **(a)** such Covered Software must also be made available in Source Code +(a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and -* **(b)** You may distribute such Executable Form under the terms of this +(b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. -#### 3.3. Distribution of a Larger Work +3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for @@ -15434,7 +16685,7 @@

    Used by:

    Software under the terms of either this License or such Secondary License(s). -#### 3.4. Notices +3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, @@ -15442,7 +16693,7 @@

    Used by:

    the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. -#### 3.5. Application of Additional Terms +3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered @@ -15455,27 +16706,27 @@

    Used by:

    disclaimers of warranty and limitations of liability specific to any jurisdiction. - -### 4. Inability to Comply Due to Statute or Regulation +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: **(a)** comply with -the terms of this License to the maximum extent possible; and **(b)** +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. +5. Termination +-------------- -### 5. Termination - -**5.1.** The rights granted under this License will terminate automatically +5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular -Contributor are reinstated **(a)** provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and **(b)** on an +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular @@ -15485,53 +16736,62 @@

    Used by:

    from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. -**5.2.** If You initiate litigation against any entity by asserting a patent +5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. -**5.3.** In the event of termination under Sections 5.1 or 5.2 above, all +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ -### 6. Disclaimer of Warranty - -> Covered Software is provided under this License on an “as is” -> basis, without warranty of any kind, either expressed, implied, or -> statutory, including, without limitation, warranties that the -> Covered Software is free of defects, merchantable, fit for a -> particular purpose or non-infringing. The entire risk as to the -> quality and performance of the Covered Software is with You. -> Should any Covered Software prove defective in any respect, You -> (not any Contributor) assume the cost of any necessary servicing, -> repair, or correction. This disclaimer of warranty constitutes an -> essential part of this License. No use of any Covered Software is -> authorized under this License except under this disclaimer. - -### 7. Limitation of Liability - -> Under no circumstances and under no legal theory, whether tort -> (including negligence), contract, or otherwise, shall any -> Contributor, or anyone who distributes Covered Software as -> permitted above, be liable to You for any direct, indirect, -> special, incidental, or consequential damages of any character -> including, without limitation, damages for lost profits, loss of -> goodwill, work stoppage, computer failure or malfunction, or any -> and all other commercial damages or losses, even if such party -> shall have been informed of the possibility of such damages. This -> limitation of liability shall not apply to liability for death or -> personal injury resulting from such party's negligence to the -> extent applicable law prohibits such limitation. Some -> jurisdictions do not allow the exclusion or limitation of -> incidental or consequential damages, so this exclusion and -> limitation may not apply to You. - +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ -### 8. Litigation +8. Litigation +------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal @@ -15540,8 +16800,8 @@

    Used by:

    Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. - -### 9. Miscellaneous +9. Miscellaneous +---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be @@ -15550,24 +16810,24 @@

    Used by:

    that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. +10. Versions of the License +--------------------------- -### 10. Versions of the License - -#### 10.1. New Versions +10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. -#### 10.2. Effect of New Versions +10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. -#### 10.3. Modified Versions +10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a @@ -15575,17 +16835,19 @@

    Used by:

    any references to the name of the license steward (except to note that such modified license differs from this License). -#### 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. -## Exhibit A - Source Code Form License Notice +Exhibit A - Source Code Form License Notice +------------------------------------------- - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE @@ -15594,16 +16856,19 @@

    Used by:

    You may add additional accurate notices of copyright ownership. -## Exhibit B - “Incompatible With Secondary Licenses” Notice +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0.
  • Mozilla Public License 2.0

    Used by:

    Mozilla Public License Version 2.0
    diff --git a/scripts/install.sh b/scripts/install.sh
    index 49b3de7c84..f2e7e7a0f4 100755
    --- a/scripts/install.sh
    +++ b/scripts/install.sh
    @@ -11,7 +11,7 @@ BINARY_DOWNLOAD_PREFIX="https://github.com/apollographql/router/releases/downloa
     
     # Router version defined in apollo-router's Cargo.toml
     # Note: Change this line manually during the release steps.
    -PACKAGE_VERSION="v1.50.0"
    +PACKAGE_VERSION="v1.51.0"
     
     download_binary() {
         downloader --check