From b248daa62a1b685e4489d8e4d87481d190682098 Mon Sep 17 00:00:00 2001 From: Trey Ivy Date: Fri, 11 Oct 2024 02:29:09 +0000 Subject: [PATCH 1/2] dev branch --- bazel-demo/.bazelrc | 4 +- ent/gen/ent/actioncachestatistics.go | 52 +- .../actioncachestatistics.go | 47 +- ent/gen/ent/actioncachestatistics/where.go | 4 +- ent/gen/ent/actioncachestatistics_create.go | 31 +- ent/gen/ent/actioncachestatistics_query.go | 190 +- ent/gen/ent/actioncachestatistics_update.go | 150 +- ent/gen/ent/actiondata.go | 51 +- ent/gen/ent/actiondata/actiondata.go | 36 +- ent/gen/ent/actiondata/where.go | 2 +- ent/gen/ent/actiondata_create.go | 27 +- ent/gen/ent/actiondata_query.go | 117 +- ent/gen/ent/actiondata_update.go | 126 +- ent/gen/ent/actionsummary.go | 40 +- ent/gen/ent/actionsummary/actionsummary.go | 51 +- ent/gen/ent/actionsummary/where.go | 8 +- ent/gen/ent/actionsummary_create.go | 36 +- ent/gen/ent/actionsummary_query.go | 266 +- ent/gen/ent/actionsummary_update.go | 182 +- ent/gen/ent/artifactmetrics.go | 222 +- .../ent/artifactmetrics/artifactmetrics.go | 114 +- ent/gen/ent/artifactmetrics/where.go | 10 +- ent/gen/ent/artifactmetrics_create.go | 128 +- ent/gen/ent/artifactmetrics_query.go | 420 +- ent/gen/ent/artifactmetrics_update.go | 606 +- .../ent/bazelinvocation/bazelinvocation.go | 25 +- ent/gen/ent/bazelinvocation/where.go | 4 +- ent/gen/ent/bazelinvocation_create.go | 8 +- ent/gen/ent/bazelinvocation_query.go | 136 +- ent/gen/ent/bazelinvocation_update.go | 48 +- ent/gen/ent/buildgraphmetrics.go | 267 +- .../buildgraphmetrics/buildgraphmetrics.go | 132 +- ent/gen/ent/buildgraphmetrics/where.go | 12 +- ent/gen/ent/buildgraphmetrics_create.go | 153 +- ent/gen/ent/buildgraphmetrics_query.go | 486 +- ent/gen/ent/buildgraphmetrics_update.go | 724 +- ent/gen/ent/client.go | 142 +- ent/gen/ent/cumulativemetrics.go | 51 +- .../cumulativemetrics/cumulativemetrics.go | 36 +- ent/gen/ent/cumulativemetrics/where.go | 2 +- ent/gen/ent/cumulativemetrics_create.go | 27 +- ent/gen/ent/cumulativemetrics_query.go | 117 +- ent/gen/ent/cumulativemetrics_update.go | 126 +- ent/gen/ent/dynamicexecutionmetrics.go | 50 +- .../dynamicexecutionmetrics.go | 47 +- ent/gen/ent/dynamicexecutionmetrics/where.go | 4 +- ent/gen/ent/dynamicexecutionmetrics_create.go | 31 +- ent/gen/ent/dynamicexecutionmetrics_query.go | 170 +- ent/gen/ent/dynamicexecutionmetrics_update.go | 150 +- ent/gen/ent/evaluationstat.go | 81 +- ent/gen/ent/evaluationstat/evaluationstat.go | 32 +- ent/gen/ent/evaluationstat/where.go | 2 +- ent/gen/ent/evaluationstat_create.go | 27 +- ent/gen/ent/evaluationstat_query.go | 116 +- ent/gen/ent/evaluationstat_update.go | 126 +- ent/gen/ent/exectioninfo.go | 46 +- ent/gen/ent/exectioninfo/exectioninfo.go | 37 +- ent/gen/ent/exectioninfo/where.go | 6 +- ent/gen/ent/exectioninfo_create.go | 32 +- ent/gen/ent/exectioninfo_query.go | 168 +- ent/gen/ent/exectioninfo_update.go | 150 +- ent/gen/ent/filesmetric.go | 71 +- ent/gen/ent/filesmetric/filesmetric.go | 31 +- ent/gen/ent/filesmetric/where.go | 2 +- ent/gen/ent/filesmetric_create.go | 27 +- ent/gen/ent/filesmetric_query.go | 114 +- ent/gen/ent/filesmetric_update.go | 126 +- ent/gen/ent/garbagemetrics.go | 51 +- ent/gen/ent/garbagemetrics/garbagemetrics.go | 36 +- ent/gen/ent/garbagemetrics/where.go | 2 +- ent/gen/ent/garbagemetrics_create.go | 27 +- ent/gen/ent/garbagemetrics_query.go | 117 +- ent/gen/ent/garbagemetrics_update.go | 126 +- ent/gen/ent/gql_collection.go | 356 +- ent/gen/ent/gql_edge.go | 612 +- ent/gen/ent/gql_pagination.go | 133 + ent/gen/ent/gql_where_input.go | 42 + ent/gen/ent/memorymetrics.go | 50 +- ent/gen/ent/memorymetrics/memorymetrics.go | 47 +- ent/gen/ent/memorymetrics/where.go | 4 +- ent/gen/ent/memorymetrics_create.go | 31 +- ent/gen/ent/memorymetrics_query.go | 170 +- ent/gen/ent/memorymetrics_update.go | 150 +- ent/gen/ent/metrics.go | 361 +- ent/gen/ent/metrics/metrics.go | 234 +- ent/gen/ent/metrics/where.go | 20 +- ent/gen/ent/metrics_create.go | 258 +- ent/gen/ent/metrics_query.go | 938 +- ent/gen/ent/metrics_update.go | 1252 +-- ent/gen/ent/migrate/schema.go | 1033 +- ent/gen/ent/missdetail.go | 51 +- ent/gen/ent/missdetail/missdetail.go | 36 +- ent/gen/ent/missdetail/where.go | 2 +- ent/gen/ent/missdetail_create.go | 27 +- ent/gen/ent/missdetail_query.go | 119 +- ent/gen/ent/missdetail_update.go | 126 +- ent/gen/ent/mutation.go | 3085 ++--- ent/gen/ent/namedsetoffiles.go | 48 +- .../ent/namedsetoffiles/namedsetoffiles.go | 18 +- ent/gen/ent/namedsetoffiles/where.go | 2 +- ent/gen/ent/namedsetoffiles_create.go | 25 +- ent/gen/ent/namedsetoffiles_query.go | 88 +- ent/gen/ent/namedsetoffiles_update.go | 118 +- ent/gen/ent/networkmetrics.go | 87 +- ent/gen/ent/networkmetrics/networkmetrics.go | 51 +- ent/gen/ent/networkmetrics/where.go | 4 +- ent/gen/ent/networkmetrics_create.go | 51 +- ent/gen/ent/networkmetrics_query.go | 152 +- ent/gen/ent/networkmetrics_update.go | 244 +- ent/gen/ent/outputgroup.go | 52 +- ent/gen/ent/outputgroup/outputgroup.go | 23 +- ent/gen/ent/outputgroup/where.go | 4 +- ent/gen/ent/outputgroup_create.go | 28 +- ent/gen/ent/outputgroup_query.go | 124 +- ent/gen/ent/outputgroup_update.go | 126 +- ent/gen/ent/packageloadmetrics.go | 51 +- .../packageloadmetrics/packageloadmetrics.go | 36 +- ent/gen/ent/packageloadmetrics/where.go | 2 +- ent/gen/ent/packageloadmetrics_create.go | 27 +- ent/gen/ent/packageloadmetrics_query.go | 119 +- ent/gen/ent/packageloadmetrics_update.go | 126 +- ent/gen/ent/packagemetrics.go | 50 +- ent/gen/ent/packagemetrics/packagemetrics.go | 47 +- ent/gen/ent/packagemetrics/where.go | 4 +- ent/gen/ent/packagemetrics_create.go | 31 +- ent/gen/ent/packagemetrics_query.go | 170 +- ent/gen/ent/packagemetrics_update.go | 150 +- ent/gen/ent/racestatistics.go | 51 +- ent/gen/ent/racestatistics/racestatistics.go | 36 +- ent/gen/ent/racestatistics/where.go | 2 +- ent/gen/ent/racestatistics_create.go | 27 +- ent/gen/ent/racestatistics_query.go | 119 +- ent/gen/ent/racestatistics_update.go | 126 +- ent/gen/ent/resourceusage.go | 51 +- ent/gen/ent/resourceusage/resourceusage.go | 36 +- ent/gen/ent/resourceusage/where.go | 2 +- ent/gen/ent/resourceusage_create.go | 27 +- ent/gen/ent/resourceusage_query.go | 117 +- ent/gen/ent/resourceusage_update.go | 126 +- ent/gen/ent/runnercount.go | 51 +- ent/gen/ent/runnercount/runnercount.go | 36 +- ent/gen/ent/runnercount/where.go | 2 +- ent/gen/ent/runnercount_create.go | 27 +- ent/gen/ent/runnercount_query.go | 117 +- ent/gen/ent/runnercount_update.go | 126 +- ent/gen/ent/schema-viz.html | 2 +- .../systemnetworkstats/systemnetworkstats.go | 2 +- ent/gen/ent/systemnetworkstats/where.go | 2 +- ent/gen/ent/systemnetworkstats_create.go | 2 +- ent/gen/ent/systemnetworkstats_query.go | 2 +- ent/gen/ent/systemnetworkstats_update.go | 8 +- ent/gen/ent/targetcomplete.go | 50 +- ent/gen/ent/targetcomplete/targetcomplete.go | 23 +- ent/gen/ent/targetcomplete/where.go | 4 +- ent/gen/ent/targetcomplete_create.go | 28 +- ent/gen/ent/targetcomplete_query.go | 102 +- ent/gen/ent/targetcomplete_update.go | 126 +- ent/gen/ent/targetconfigured.go | 51 +- .../ent/targetconfigured/targetconfigured.go | 28 +- ent/gen/ent/targetconfigured/where.go | 2 +- ent/gen/ent/targetconfigured_create.go | 25 +- ent/gen/ent/targetconfigured_query.go | 87 +- ent/gen/ent/targetconfigured_update.go | 118 +- ent/gen/ent/targetmetrics.go | 51 +- ent/gen/ent/targetmetrics/targetmetrics.go | 36 +- ent/gen/ent/targetmetrics/where.go | 2 +- ent/gen/ent/targetmetrics_create.go | 27 +- ent/gen/ent/targetmetrics_query.go | 117 +- ent/gen/ent/targetmetrics_update.go | 126 +- ent/gen/ent/targetpair.go | 61 +- ent/gen/ent/targetpair/targetpair.go | 38 +- ent/gen/ent/targetpair/where.go | 6 +- ent/gen/ent/targetpair_create.go | 33 +- ent/gen/ent/targetpair_query.go | 182 +- ent/gen/ent/targetpair_update.go | 142 +- ent/gen/ent/testcollection.go | 69 +- ent/gen/ent/testcollection/testcollection.go | 41 +- ent/gen/ent/testcollection/where.go | 61 +- ent/gen/ent/testcollection_create.go | 49 +- ent/gen/ent/testcollection_query.go | 154 +- ent/gen/ent/testcollection_update.go | 187 +- ent/gen/ent/testfile.go | 69 +- ent/gen/ent/testfile/testfile.go | 28 +- ent/gen/ent/testfile/where.go | 2 +- ent/gen/ent/testfile_create.go | 27 +- ent/gen/ent/testfile_query.go | 114 +- ent/gen/ent/testfile_update.go | 126 +- ent/gen/ent/testresultbes.go | 16 +- ent/gen/ent/testresultbes/testresultbes.go | 19 +- ent/gen/ent/testresultbes/where.go | 4 +- ent/gen/ent/testresultbes_create.go | 7 +- ent/gen/ent/testresultbes_query.go | 106 +- ent/gen/ent/testresultbes_update.go | 32 +- ent/gen/ent/testsummary.go | 54 +- ent/gen/ent/testsummary/testsummary.go | 28 +- ent/gen/ent/testsummary/where.go | 2 +- ent/gen/ent/testsummary_create.go | 25 +- ent/gen/ent/testsummary_query.go | 94 +- ent/gen/ent/testsummary_update.go | 118 +- ent/gen/ent/timingbreakdown.go | 52 +- .../ent/timingbreakdown/timingbreakdown.go | 40 +- ent/gen/ent/timingbreakdown/where.go | 4 +- ent/gen/ent/timingbreakdown_create.go | 29 +- ent/gen/ent/timingbreakdown_query.go | 158 +- ent/gen/ent/timingbreakdown_update.go | 142 +- ent/gen/ent/timingchild.go | 51 +- ent/gen/ent/timingchild/timingchild.go | 36 +- ent/gen/ent/timingchild/where.go | 2 +- ent/gen/ent/timingchild_create.go | 27 +- ent/gen/ent/timingchild_query.go | 117 +- ent/gen/ent/timingchild_update.go | 126 +- ent/gen/ent/timingmetrics.go | 51 +- ent/gen/ent/timingmetrics/timingmetrics.go | 36 +- ent/gen/ent/timingmetrics/where.go | 2 +- ent/gen/ent/timingmetrics_create.go | 27 +- ent/gen/ent/timingmetrics_query.go | 117 +- ent/gen/ent/timingmetrics_update.go | 126 +- ent/schema/actioncachestatistics.go | 3 +- ent/schema/actiondata.go | 3 +- ent/schema/actionsummary.go | 3 +- ent/schema/artifactmetrics.go | 16 +- ent/schema/bazelinvocation.go | 4 +- ent/schema/buildgraphmetrics.go | 18 +- ent/schema/cumulativemetrics.go | 4 +- ent/schema/dynamicexecutionmetrics.go | 4 +- ent/schema/evaluationstat.go | 3 +- ent/schema/executioninfo.go | 4 +- ent/schema/filesmetric.go | 3 +- ent/schema/garbagemetrics.go | 3 +- ent/schema/memorymetrics.go | 3 +- ent/schema/metrics.go | 30 +- ent/schema/missdetail.go | 4 +- ent/schema/namedsetoffiles.go | 3 +- ent/schema/networkmetrics.go | 7 +- ent/schema/outputgroup.go | 3 +- ent/schema/packageloadmetrics.go | 4 +- ent/schema/packagemetrics.go | 4 +- ent/schema/racestatistics.go | 4 +- ent/schema/resourceusage.go | 3 +- ent/schema/runnercount.go | 3 +- ent/schema/systemnetworkstats.go | 4 +- ent/schema/targetcomplete.go | 3 +- ent/schema/targetconfigured.go | 3 +- ent/schema/targetmetrics.go | 4 +- ent/schema/targetpair.go | 9 +- ent/schema/testcollection.go | 35 +- ent/schema/testfile.go | 4 +- ent/schema/testresult.go | 7 +- ent/schema/testsummary.go | 3 +- ent/schema/timingbreakdown.go | 4 +- ent/schema/timingchild.go | 4 +- ent/schema/timingmetrics.go | 4 +- frontend/.env | 3 + frontend/package-lock.json | 9937 +++++++++++++++++ frontend/package.json | 5 +- frontend/src/app/layout.tsx | 38 +- frontend/src/app/page.tsx | 14 +- frontend/src/app/tests/[slug]/graphql.ts | 0 frontend/src/app/tests/[slug]/page.tsx | 34 + frontend/src/app/tests/index.graphql.ts | 92 + frontend/src/app/tests/page.tsx | 26 + frontend/src/app/trends/index.graphql.ts | 28 + frontend/src/app/trends/page.tsx | 121 + .../components/ActionCacheMetrics/index.tsx | 30 +- .../components/ActionDataMetrics/index.tsx | 68 +- .../src/components/ApolloWrapper/index.tsx | 12 +- frontend/src/components/AppBar/AppBarMenu.tsx | 2 +- .../src/components/AppBar/AppBarTitle.tsx | 2 +- .../src/components/AppBar/index.module.css | 7 +- frontend/src/components/AppBar/index.tsx | 2 + frontend/src/components/Artifacts/index.tsx | 29 +- .../src/components/BazelInvocation/index.tsx | 57 +- .../BazelInvocationsTable/Columns.tsx | 2 + frontend/src/components/Breadcrumbs/index.tsx | 2 +- frontend/src/components/FooterBar/index.tsx | 27 +- .../src/components/MemoryMetrics/index.tsx | 9 +- .../src/components/NetworkMetrics/index.tsx | 2 +- .../src/components/RunnerMetrics/index.tsx | 9 +- .../src/components/TargetMetrics/index.tsx | 29 +- .../src/components/TestDetails/graphql.ts | 34 + frontend/src/components/TestDetails/index.tsx | 111 + frontend/src/components/TestGrid/index.tsx | 169 + frontend/src/components/TestGridBtn/index.tsx | 78 + frontend/src/components/TestGridRow/index.tsx | 52 + .../src/components/TestStatusTag/index.tsx | 45 +- .../src/components/TestsMetrics/index.tsx | 35 +- .../src/components/TimingMetrics/index.tsx | 6 +- frontend/src/components/Utilities/time.ts | 15 + frontend/src/graphql/__generated__/gql.ts | 35 + frontend/src/graphql/__generated__/graphql.ts | 330 +- .../__generated__/persisted-documents.json | 9 +- frontend/src/theme/theme.module.css | 6 + internal/api/grpc/bes/bes.go | 11 +- internal/graphql/BUILD.bazel | 1 - internal/graphql/custom.resolvers.go | 188 + internal/graphql/ent.resolvers.go | 9 +- internal/graphql/helpers/resolver.helpers.go | 19 + internal/graphql/model/BUILD.bazel | 2 + internal/graphql/model/models_gen.go | 33 + internal/graphql/schema/custom.graphql | 39 + internal/graphql/schema/ent.graphql | 222 +- internal/graphql/server_gen.go | 5690 +++++++--- internal/graphql/testdata/snapshot.db | Bin 507904 -> 393216 bytes .../found-(by-URL).golden.json | 442 +- .../found-(by-UUID).golden.json | 442 +- ...ocation-analysis-failed-target.golden.json | 442 +- ...-progress-if-action-has-output.golden.json | 482 +- ...single-failed-bazel-invocation.golden.json | 790 +- .../get-successful-bazel-build.golden.json | 778 +- .../get-successful-bazel-test.golden.json | 786 +- pkg/processing/save.go | 45 +- pkg/summary/summarizer.go | 21 +- pkg/summary/summary.go | 1 + .../nextjs_build.bep.ndjson.golden.json | 33 +- .../nextjs_build_fail.bep.ndjson.golden.json | 24 +- .../nextjs_test.bep.ndjson.golden.json | 33 +- .../nextjs_test_fail.bep.ndjson.golden.json | 9 +- 317 files changed, 25957 insertions(+), 20070 deletions(-) create mode 100644 frontend/package-lock.json create mode 100644 frontend/src/app/tests/[slug]/graphql.ts create mode 100644 frontend/src/app/tests/[slug]/page.tsx create mode 100644 frontend/src/app/tests/index.graphql.ts create mode 100644 frontend/src/app/tests/page.tsx create mode 100644 frontend/src/app/trends/index.graphql.ts create mode 100644 frontend/src/app/trends/page.tsx create mode 100644 frontend/src/components/TestDetails/graphql.ts create mode 100644 frontend/src/components/TestDetails/index.tsx create mode 100644 frontend/src/components/TestGrid/index.tsx create mode 100644 frontend/src/components/TestGridBtn/index.tsx create mode 100644 frontend/src/components/TestGridRow/index.tsx diff --git a/bazel-demo/.bazelrc b/bazel-demo/.bazelrc index 9455409..e1320f1 100644 --- a/bazel-demo/.bazelrc +++ b/bazel-demo/.bazelrc @@ -1,4 +1,4 @@ build --bes_backend=grpc://localhost:8082 -build --bes_results_url= +build --bes_results_url=http://localhost:8081/bazel-invocations/ build --build_metadata=user_ldap=demo-user -build --build_metadata=user_email= +build --build_metadata=user_email=demo-user@example.com diff --git a/ent/gen/ent/actioncachestatistics.go b/ent/gen/ent/actioncachestatistics.go index 0469743..8b69663 100644 --- a/ent/gen/ent/actioncachestatistics.go +++ b/ent/gen/ent/actioncachestatistics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/actioncachestatistics" + "github.com/buildbarn/bb-portal/ent/gen/ent/actionsummary" ) // ActionCacheStatistics is the model entity for the ActionCacheStatistics schema. @@ -28,14 +29,15 @@ type ActionCacheStatistics struct { Misses int32 `json:"misses,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the ActionCacheStatisticsQuery when eager-loading is set. - Edges ActionCacheStatisticsEdges `json:"edges"` - selectValues sql.SelectValues + Edges ActionCacheStatisticsEdges `json:"edges"` + action_summary_action_cache_statistics *int + selectValues sql.SelectValues } // ActionCacheStatisticsEdges holds the relations/edges for other nodes in the graph. type ActionCacheStatisticsEdges struct { // ActionSummary holds the value of the action_summary edge. - ActionSummary []*ActionSummary `json:"action_summary,omitempty"` + ActionSummary *ActionSummary `json:"action_summary,omitempty"` // MissDetails holds the value of the miss_details edge. MissDetails []*MissDetail `json:"miss_details,omitempty"` // loadedTypes holds the information for reporting if a @@ -44,15 +46,16 @@ type ActionCacheStatisticsEdges struct { // totalCount holds the count of the edges above. totalCount [2]map[string]int - namedActionSummary map[string][]*ActionSummary - namedMissDetails map[string][]*MissDetail + namedMissDetails map[string][]*MissDetail } // ActionSummaryOrErr returns the ActionSummary value or an error if the edge -// was not loaded in eager-loading. -func (e ActionCacheStatisticsEdges) ActionSummaryOrErr() ([]*ActionSummary, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ActionCacheStatisticsEdges) ActionSummaryOrErr() (*ActionSummary, error) { + if e.ActionSummary != nil { return e.ActionSummary, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: actionsummary.Label} } return nil, &NotLoadedError{edge: "action_summary"} } @@ -73,6 +76,8 @@ func (*ActionCacheStatistics) scanValues(columns []string) ([]any, error) { switch columns[i] { case actioncachestatistics.FieldID, actioncachestatistics.FieldSizeInBytes, actioncachestatistics.FieldSaveTimeInMs, actioncachestatistics.FieldLoadTimeInMs, actioncachestatistics.FieldHits, actioncachestatistics.FieldMisses: values[i] = new(sql.NullInt64) + case actioncachestatistics.ForeignKeys[0]: // action_summary_action_cache_statistics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -124,6 +129,13 @@ func (acs *ActionCacheStatistics) assignValues(columns []string, values []any) e } else if value.Valid { acs.Misses = int32(value.Int64) } + case actioncachestatistics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field action_summary_action_cache_statistics", value) + } else if value.Valid { + acs.action_summary_action_cache_statistics = new(int) + *acs.action_summary_action_cache_statistics = int(value.Int64) + } default: acs.selectValues.Set(columns[i], values[i]) } @@ -188,30 +200,6 @@ func (acs *ActionCacheStatistics) String() string { return builder.String() } -// NamedActionSummary returns the ActionSummary named value or an error if the edge was not -// loaded in eager-loading with this name. -func (acs *ActionCacheStatistics) NamedActionSummary(name string) ([]*ActionSummary, error) { - if acs.Edges.namedActionSummary == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := acs.Edges.namedActionSummary[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (acs *ActionCacheStatistics) appendNamedActionSummary(name string, edges ...*ActionSummary) { - if acs.Edges.namedActionSummary == nil { - acs.Edges.namedActionSummary = make(map[string][]*ActionSummary) - } - if len(edges) == 0 { - acs.Edges.namedActionSummary[name] = []*ActionSummary{} - } else { - acs.Edges.namedActionSummary[name] = append(acs.Edges.namedActionSummary[name], edges...) - } -} - // NamedMissDetails returns the MissDetails named value or an error if the edge was not // loaded in eager-loading with this name. func (acs *ActionCacheStatistics) NamedMissDetails(name string) ([]*MissDetail, error) { diff --git a/ent/gen/ent/actioncachestatistics/actioncachestatistics.go b/ent/gen/ent/actioncachestatistics/actioncachestatistics.go index 7db2c5f..277b01a 100644 --- a/ent/gen/ent/actioncachestatistics/actioncachestatistics.go +++ b/ent/gen/ent/actioncachestatistics/actioncachestatistics.go @@ -28,16 +28,20 @@ const ( EdgeMissDetails = "miss_details" // Table holds the table name of the actioncachestatistics in the database. Table = "action_cache_statistics" - // ActionSummaryTable is the table that holds the action_summary relation/edge. The primary key declared below. - ActionSummaryTable = "action_summary_action_cache_statistics" + // ActionSummaryTable is the table that holds the action_summary relation/edge. + ActionSummaryTable = "action_cache_statistics" // ActionSummaryInverseTable is the table name for the ActionSummary entity. // It exists in this package in order to avoid circular dependency with the "actionsummary" package. ActionSummaryInverseTable = "action_summaries" - // MissDetailsTable is the table that holds the miss_details relation/edge. The primary key declared below. - MissDetailsTable = "action_cache_statistics_miss_details" + // ActionSummaryColumn is the table column denoting the action_summary relation/edge. + ActionSummaryColumn = "action_summary_action_cache_statistics" + // MissDetailsTable is the table that holds the miss_details relation/edge. + MissDetailsTable = "miss_details" // MissDetailsInverseTable is the table name for the MissDetail entity. // It exists in this package in order to avoid circular dependency with the "missdetail" package. MissDetailsInverseTable = "miss_details" + // MissDetailsColumn is the table column denoting the miss_details relation/edge. + MissDetailsColumn = "action_cache_statistics_miss_details" ) // Columns holds all SQL columns for actioncachestatistics fields. @@ -50,14 +54,11 @@ var Columns = []string{ FieldMisses, } -var ( - // ActionSummaryPrimaryKey and ActionSummaryColumn2 are the table columns denoting the - // primary key for the action_summary relation (M2M). - ActionSummaryPrimaryKey = []string{"action_summary_id", "action_cache_statistics_id"} - // MissDetailsPrimaryKey and MissDetailsColumn2 are the table columns denoting the - // primary key for the miss_details relation (M2M). - MissDetailsPrimaryKey = []string{"action_cache_statistics_id", "miss_detail_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "action_cache_statistics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "action_summary_action_cache_statistics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -66,6 +67,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -102,17 +108,10 @@ func ByMisses(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldMisses, opts...).ToFunc() } -// ByActionSummaryCount orders the results by action_summary count. -func ByActionSummaryCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionSummaryStep(), opts...) - } -} - -// ByActionSummary orders the results by action_summary terms. -func ByActionSummary(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByActionSummaryField orders the results by action_summary field. +func ByActionSummaryField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), sql.OrderByField(field, opts...)) } } @@ -133,13 +132,13 @@ func newActionSummaryStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionSummaryInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, ActionSummaryTable, ActionSummaryColumn), ) } func newMissDetailsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MissDetailsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, MissDetailsTable, MissDetailsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, MissDetailsTable, MissDetailsColumn), ) } diff --git a/ent/gen/ent/actioncachestatistics/where.go b/ent/gen/ent/actioncachestatistics/where.go index 14b938b..4227cec 100644 --- a/ent/gen/ent/actioncachestatistics/where.go +++ b/ent/gen/ent/actioncachestatistics/where.go @@ -333,7 +333,7 @@ func HasActionSummary() predicate.ActionCacheStatistics { return predicate.ActionCacheStatistics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, ActionSummaryTable, ActionSummaryColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -356,7 +356,7 @@ func HasMissDetails() predicate.ActionCacheStatistics { return predicate.ActionCacheStatistics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, MissDetailsTable, MissDetailsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, MissDetailsTable, MissDetailsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/actioncachestatistics_create.go b/ent/gen/ent/actioncachestatistics_create.go index a102f13..e21ba11 100644 --- a/ent/gen/ent/actioncachestatistics_create.go +++ b/ent/gen/ent/actioncachestatistics_create.go @@ -90,19 +90,23 @@ func (acsc *ActionCacheStatisticsCreate) SetNillableMisses(i *int32) *ActionCach return acsc } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (acsc *ActionCacheStatisticsCreate) AddActionSummaryIDs(ids ...int) *ActionCacheStatisticsCreate { - acsc.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (acsc *ActionCacheStatisticsCreate) SetActionSummaryID(id int) *ActionCacheStatisticsCreate { + acsc.mutation.SetActionSummaryID(id) return acsc } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (acsc *ActionCacheStatisticsCreate) AddActionSummary(a ...*ActionSummary) *ActionCacheStatisticsCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (acsc *ActionCacheStatisticsCreate) SetNillableActionSummaryID(id *int) *ActionCacheStatisticsCreate { + if id != nil { + acsc = acsc.SetActionSummaryID(*id) } - return acsc.AddActionSummaryIDs(ids...) + return acsc +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (acsc *ActionCacheStatisticsCreate) SetActionSummary(a *ActionSummary) *ActionCacheStatisticsCreate { + return acsc.SetActionSummaryID(a.ID) } // AddMissDetailIDs adds the "miss_details" edge to the MissDetail entity by IDs. @@ -202,10 +206,10 @@ func (acsc *ActionCacheStatisticsCreate) createSpec() (*ActionCacheStatistics, * } if nodes := acsc.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, + Columns: []string{actioncachestatistics.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -214,14 +218,15 @@ func (acsc *ActionCacheStatisticsCreate) createSpec() (*ActionCacheStatistics, * for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.action_summary_action_cache_statistics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := acsc.mutation.MissDetailsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), diff --git a/ent/gen/ent/actioncachestatistics_query.go b/ent/gen/ent/actioncachestatistics_query.go index 6f97ca5..52114ed 100644 --- a/ent/gen/ent/actioncachestatistics_query.go +++ b/ent/gen/ent/actioncachestatistics_query.go @@ -20,16 +20,16 @@ import ( // ActionCacheStatisticsQuery is the builder for querying ActionCacheStatistics entities. type ActionCacheStatisticsQuery struct { config - ctx *QueryContext - order []actioncachestatistics.OrderOption - inters []Interceptor - predicates []predicate.ActionCacheStatistics - withActionSummary *ActionSummaryQuery - withMissDetails *MissDetailQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*ActionCacheStatistics) error - withNamedActionSummary map[string]*ActionSummaryQuery - withNamedMissDetails map[string]*MissDetailQuery + ctx *QueryContext + order []actioncachestatistics.OrderOption + inters []Interceptor + predicates []predicate.ActionCacheStatistics + withActionSummary *ActionSummaryQuery + withMissDetails *MissDetailQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*ActionCacheStatistics) error + withNamedMissDetails map[string]*MissDetailQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -80,7 +80,7 @@ func (acsq *ActionCacheStatisticsQuery) QueryActionSummary() *ActionSummaryQuery step := sqlgraph.NewStep( sqlgraph.From(actioncachestatistics.Table, actioncachestatistics.FieldID, selector), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, actioncachestatistics.ActionSummaryTable, actioncachestatistics.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, actioncachestatistics.ActionSummaryTable, actioncachestatistics.ActionSummaryColumn), ) fromU = sqlgraph.SetNeighbors(acsq.driver.Dialect(), step) return fromU, nil @@ -102,7 +102,7 @@ func (acsq *ActionCacheStatisticsQuery) QueryMissDetails() *MissDetailQuery { step := sqlgraph.NewStep( sqlgraph.From(actioncachestatistics.Table, actioncachestatistics.FieldID, selector), sqlgraph.To(missdetail.Table, missdetail.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actioncachestatistics.MissDetailsTable, actioncachestatistics.MissDetailsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actioncachestatistics.MissDetailsTable, actioncachestatistics.MissDetailsColumn), ) fromU = sqlgraph.SetNeighbors(acsq.driver.Dialect(), step) return fromU, nil @@ -409,12 +409,19 @@ func (acsq *ActionCacheStatisticsQuery) prepareQuery(ctx context.Context) error func (acsq *ActionCacheStatisticsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ActionCacheStatistics, error) { var ( nodes = []*ActionCacheStatistics{} + withFKs = acsq.withFKs _spec = acsq.querySpec() loadedTypes = [2]bool{ acsq.withActionSummary != nil, acsq.withMissDetails != nil, } ) + if acsq.withActionSummary != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, actioncachestatistics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*ActionCacheStatistics).scanValues(nil, columns) } @@ -437,11 +444,8 @@ func (acsq *ActionCacheStatisticsQuery) sqlAll(ctx context.Context, hooks ...que return nodes, nil } if query := acsq.withActionSummary; query != nil { - if err := acsq.loadActionSummary(ctx, query, nodes, - func(n *ActionCacheStatistics) { n.Edges.ActionSummary = []*ActionSummary{} }, - func(n *ActionCacheStatistics, e *ActionSummary) { - n.Edges.ActionSummary = append(n.Edges.ActionSummary, e) - }); err != nil { + if err := acsq.loadActionSummary(ctx, query, nodes, nil, + func(n *ActionCacheStatistics, e *ActionSummary) { n.Edges.ActionSummary = e }); err != nil { return nil, err } } @@ -452,13 +456,6 @@ func (acsq *ActionCacheStatisticsQuery) sqlAll(ctx context.Context, hooks ...que return nil, err } } - for name, query := range acsq.withNamedActionSummary { - if err := acsq.loadActionSummary(ctx, query, nodes, - func(n *ActionCacheStatistics) { n.appendNamedActionSummary(name) }, - func(n *ActionCacheStatistics, e *ActionSummary) { n.appendNamedActionSummary(name, e) }); err != nil { - return nil, err - } - } for name, query := range acsq.withNamedMissDetails { if err := acsq.loadMissDetails(ctx, query, nodes, func(n *ActionCacheStatistics) { n.appendNamedMissDetails(name) }, @@ -475,124 +472,65 @@ func (acsq *ActionCacheStatisticsQuery) sqlAll(ctx context.Context, hooks ...que } func (acsq *ActionCacheStatisticsQuery) loadActionSummary(ctx context.Context, query *ActionSummaryQuery, nodes []*ActionCacheStatistics, init func(*ActionCacheStatistics), assign func(*ActionCacheStatistics, *ActionSummary)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionCacheStatistics) - nids := make(map[int]map[*ActionCacheStatistics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ActionCacheStatistics) + for i := range nodes { + if nodes[i].action_summary_action_cache_statistics == nil { + continue + } + fk := *nodes[i].action_summary_action_cache_statistics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actioncachestatistics.ActionSummaryTable) - s.Join(joinT).On(s.C(actionsummary.FieldID), joinT.C(actioncachestatistics.ActionSummaryPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(actioncachestatistics.ActionSummaryPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actioncachestatistics.ActionSummaryPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionCacheStatistics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionSummary](ctx, query, qr, query.inters) + query.Where(actionsummary.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "action_summary" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "action_summary_action_cache_statistics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (acsq *ActionCacheStatisticsQuery) loadMissDetails(ctx context.Context, query *MissDetailQuery, nodes []*ActionCacheStatistics, init func(*ActionCacheStatistics), assign func(*ActionCacheStatistics, *MissDetail)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionCacheStatistics) - nids := make(map[int]map[*ActionCacheStatistics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ActionCacheStatistics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actioncachestatistics.MissDetailsTable) - s.Join(joinT).On(s.C(missdetail.FieldID), joinT.C(actioncachestatistics.MissDetailsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(actioncachestatistics.MissDetailsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actioncachestatistics.MissDetailsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionCacheStatistics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*MissDetail](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.MissDetail(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(actioncachestatistics.MissDetailsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "miss_details" node returned %v`, n.ID) + fk := n.action_cache_statistics_miss_details + if fk == nil { + return fmt.Errorf(`foreign-key "action_cache_statistics_miss_details" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "action_cache_statistics_miss_details" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -681,20 +619,6 @@ func (acsq *ActionCacheStatisticsQuery) sqlQuery(ctx context.Context) *sql.Selec return selector } -// WithNamedActionSummary tells the query-builder to eager-load the nodes that are connected to the "action_summary" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (acsq *ActionCacheStatisticsQuery) WithNamedActionSummary(name string, opts ...func(*ActionSummaryQuery)) *ActionCacheStatisticsQuery { - query := (&ActionSummaryClient{config: acsq.config}).Query() - for _, opt := range opts { - opt(query) - } - if acsq.withNamedActionSummary == nil { - acsq.withNamedActionSummary = make(map[string]*ActionSummaryQuery) - } - acsq.withNamedActionSummary[name] = query - return acsq -} - // WithNamedMissDetails tells the query-builder to eager-load the nodes that are connected to the "miss_details" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (acsq *ActionCacheStatisticsQuery) WithNamedMissDetails(name string, opts ...func(*MissDetailQuery)) *ActionCacheStatisticsQuery { diff --git a/ent/gen/ent/actioncachestatistics_update.go b/ent/gen/ent/actioncachestatistics_update.go index 7dea383..5073b27 100644 --- a/ent/gen/ent/actioncachestatistics_update.go +++ b/ent/gen/ent/actioncachestatistics_update.go @@ -164,19 +164,23 @@ func (acsu *ActionCacheStatisticsUpdate) ClearMisses() *ActionCacheStatisticsUpd return acsu } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (acsu *ActionCacheStatisticsUpdate) AddActionSummaryIDs(ids ...int) *ActionCacheStatisticsUpdate { - acsu.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (acsu *ActionCacheStatisticsUpdate) SetActionSummaryID(id int) *ActionCacheStatisticsUpdate { + acsu.mutation.SetActionSummaryID(id) return acsu } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (acsu *ActionCacheStatisticsUpdate) AddActionSummary(a ...*ActionSummary) *ActionCacheStatisticsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (acsu *ActionCacheStatisticsUpdate) SetNillableActionSummaryID(id *int) *ActionCacheStatisticsUpdate { + if id != nil { + acsu = acsu.SetActionSummaryID(*id) } - return acsu.AddActionSummaryIDs(ids...) + return acsu +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (acsu *ActionCacheStatisticsUpdate) SetActionSummary(a *ActionSummary) *ActionCacheStatisticsUpdate { + return acsu.SetActionSummaryID(a.ID) } // AddMissDetailIDs adds the "miss_details" edge to the MissDetail entity by IDs. @@ -199,27 +203,12 @@ func (acsu *ActionCacheStatisticsUpdate) Mutation() *ActionCacheStatisticsMutati return acsu.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (acsu *ActionCacheStatisticsUpdate) ClearActionSummary() *ActionCacheStatisticsUpdate { acsu.mutation.ClearActionSummary() return acsu } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (acsu *ActionCacheStatisticsUpdate) RemoveActionSummaryIDs(ids ...int) *ActionCacheStatisticsUpdate { - acsu.mutation.RemoveActionSummaryIDs(ids...) - return acsu -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (acsu *ActionCacheStatisticsUpdate) RemoveActionSummary(a ...*ActionSummary) *ActionCacheStatisticsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return acsu.RemoveActionSummaryIDs(ids...) -} - // ClearMissDetails clears all "miss_details" edges to the MissDetail entity. func (acsu *ActionCacheStatisticsUpdate) ClearMissDetails() *ActionCacheStatisticsUpdate { acsu.mutation.ClearMissDetails() @@ -324,39 +313,23 @@ func (acsu *ActionCacheStatisticsUpdate) sqlSave(ctx context.Context) (n int, er } if acsu.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := acsu.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !acsu.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, + Columns: []string{actioncachestatistics.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := acsu.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, + Columns: []string{actioncachestatistics.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -369,10 +342,10 @@ func (acsu *ActionCacheStatisticsUpdate) sqlSave(ctx context.Context) (n int, er } if acsu.mutation.MissDetailsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), @@ -382,10 +355,10 @@ func (acsu *ActionCacheStatisticsUpdate) sqlSave(ctx context.Context) (n int, er } if nodes := acsu.mutation.RemovedMissDetailsIDs(); len(nodes) > 0 && !acsu.mutation.MissDetailsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), @@ -398,10 +371,10 @@ func (acsu *ActionCacheStatisticsUpdate) sqlSave(ctx context.Context) (n int, er } if nodes := acsu.mutation.MissDetailsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), @@ -567,19 +540,23 @@ func (acsuo *ActionCacheStatisticsUpdateOne) ClearMisses() *ActionCacheStatistic return acsuo } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (acsuo *ActionCacheStatisticsUpdateOne) AddActionSummaryIDs(ids ...int) *ActionCacheStatisticsUpdateOne { - acsuo.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (acsuo *ActionCacheStatisticsUpdateOne) SetActionSummaryID(id int) *ActionCacheStatisticsUpdateOne { + acsuo.mutation.SetActionSummaryID(id) return acsuo } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (acsuo *ActionCacheStatisticsUpdateOne) AddActionSummary(a ...*ActionSummary) *ActionCacheStatisticsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (acsuo *ActionCacheStatisticsUpdateOne) SetNillableActionSummaryID(id *int) *ActionCacheStatisticsUpdateOne { + if id != nil { + acsuo = acsuo.SetActionSummaryID(*id) } - return acsuo.AddActionSummaryIDs(ids...) + return acsuo +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (acsuo *ActionCacheStatisticsUpdateOne) SetActionSummary(a *ActionSummary) *ActionCacheStatisticsUpdateOne { + return acsuo.SetActionSummaryID(a.ID) } // AddMissDetailIDs adds the "miss_details" edge to the MissDetail entity by IDs. @@ -602,27 +579,12 @@ func (acsuo *ActionCacheStatisticsUpdateOne) Mutation() *ActionCacheStatisticsMu return acsuo.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (acsuo *ActionCacheStatisticsUpdateOne) ClearActionSummary() *ActionCacheStatisticsUpdateOne { acsuo.mutation.ClearActionSummary() return acsuo } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (acsuo *ActionCacheStatisticsUpdateOne) RemoveActionSummaryIDs(ids ...int) *ActionCacheStatisticsUpdateOne { - acsuo.mutation.RemoveActionSummaryIDs(ids...) - return acsuo -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (acsuo *ActionCacheStatisticsUpdateOne) RemoveActionSummary(a ...*ActionSummary) *ActionCacheStatisticsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return acsuo.RemoveActionSummaryIDs(ids...) -} - // ClearMissDetails clears all "miss_details" edges to the MissDetail entity. func (acsuo *ActionCacheStatisticsUpdateOne) ClearMissDetails() *ActionCacheStatisticsUpdateOne { acsuo.mutation.ClearMissDetails() @@ -757,39 +719,23 @@ func (acsuo *ActionCacheStatisticsUpdateOne) sqlSave(ctx context.Context) (_node } if acsuo.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := acsuo.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !acsuo.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, + Columns: []string{actioncachestatistics.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := acsuo.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: actioncachestatistics.ActionSummaryTable, - Columns: actioncachestatistics.ActionSummaryPrimaryKey, + Columns: []string{actioncachestatistics.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -802,10 +748,10 @@ func (acsuo *ActionCacheStatisticsUpdateOne) sqlSave(ctx context.Context) (_node } if acsuo.mutation.MissDetailsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), @@ -815,10 +761,10 @@ func (acsuo *ActionCacheStatisticsUpdateOne) sqlSave(ctx context.Context) (_node } if nodes := acsuo.mutation.RemovedMissDetailsIDs(); len(nodes) > 0 && !acsuo.mutation.MissDetailsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), @@ -831,10 +777,10 @@ func (acsuo *ActionCacheStatisticsUpdateOne) sqlSave(ctx context.Context) (_node } if nodes := acsuo.mutation.MissDetailsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actioncachestatistics.MissDetailsTable, - Columns: actioncachestatistics.MissDetailsPrimaryKey, + Columns: []string{actioncachestatistics.MissDetailsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(missdetail.FieldID, field.TypeInt), diff --git a/ent/gen/ent/actiondata.go b/ent/gen/ent/actiondata.go index 477d41c..b2643d8 100644 --- a/ent/gen/ent/actiondata.go +++ b/ent/gen/ent/actiondata.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/actiondata" + "github.com/buildbarn/bb-portal/ent/gen/ent/actionsummary" ) // ActionData is the model entity for the ActionData schema. @@ -32,28 +33,29 @@ type ActionData struct { UserTime int64 `json:"user_time,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the ActionDataQuery when eager-loading is set. - Edges ActionDataEdges `json:"edges"` - selectValues sql.SelectValues + Edges ActionDataEdges `json:"edges"` + action_summary_action_data *int + selectValues sql.SelectValues } // ActionDataEdges holds the relations/edges for other nodes in the graph. type ActionDataEdges struct { // ActionSummary holds the value of the action_summary edge. - ActionSummary []*ActionSummary `json:"action_summary,omitempty"` + ActionSummary *ActionSummary `json:"action_summary,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedActionSummary map[string][]*ActionSummary } // ActionSummaryOrErr returns the ActionSummary value or an error if the edge -// was not loaded in eager-loading. -func (e ActionDataEdges) ActionSummaryOrErr() ([]*ActionSummary, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ActionDataEdges) ActionSummaryOrErr() (*ActionSummary, error) { + if e.ActionSummary != nil { return e.ActionSummary, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: actionsummary.Label} } return nil, &NotLoadedError{edge: "action_summary"} } @@ -67,6 +69,8 @@ func (*ActionData) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case actiondata.FieldMnemonic: values[i] = new(sql.NullString) + case actiondata.ForeignKeys[0]: // action_summary_action_data + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -130,6 +134,13 @@ func (ad *ActionData) assignValues(columns []string, values []any) error { } else if value.Valid { ad.UserTime = value.Int64 } + case actiondata.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field action_summary_action_data", value) + } else if value.Valid { + ad.action_summary_action_data = new(int) + *ad.action_summary_action_data = int(value.Int64) + } default: ad.selectValues.Set(columns[i], values[i]) } @@ -195,29 +206,5 @@ func (ad *ActionData) String() string { return builder.String() } -// NamedActionSummary returns the ActionSummary named value or an error if the edge was not -// loaded in eager-loading with this name. -func (ad *ActionData) NamedActionSummary(name string) ([]*ActionSummary, error) { - if ad.Edges.namedActionSummary == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := ad.Edges.namedActionSummary[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (ad *ActionData) appendNamedActionSummary(name string, edges ...*ActionSummary) { - if ad.Edges.namedActionSummary == nil { - ad.Edges.namedActionSummary = make(map[string][]*ActionSummary) - } - if len(edges) == 0 { - ad.Edges.namedActionSummary[name] = []*ActionSummary{} - } else { - ad.Edges.namedActionSummary[name] = append(ad.Edges.namedActionSummary[name], edges...) - } -} - // ActionDataSlice is a parsable slice of ActionData. type ActionDataSlice []*ActionData diff --git a/ent/gen/ent/actiondata/actiondata.go b/ent/gen/ent/actiondata/actiondata.go index 8715f05..5ff223c 100644 --- a/ent/gen/ent/actiondata/actiondata.go +++ b/ent/gen/ent/actiondata/actiondata.go @@ -30,11 +30,13 @@ const ( EdgeActionSummary = "action_summary" // Table holds the table name of the actiondata in the database. Table = "action_data" - // ActionSummaryTable is the table that holds the action_summary relation/edge. The primary key declared below. - ActionSummaryTable = "action_summary_action_data" + // ActionSummaryTable is the table that holds the action_summary relation/edge. + ActionSummaryTable = "action_data" // ActionSummaryInverseTable is the table name for the ActionSummary entity. // It exists in this package in order to avoid circular dependency with the "actionsummary" package. ActionSummaryInverseTable = "action_summaries" + // ActionSummaryColumn is the table column denoting the action_summary relation/edge. + ActionSummaryColumn = "action_summary_action_data" ) // Columns holds all SQL columns for actiondata fields. @@ -49,11 +51,11 @@ var Columns = []string{ FieldUserTime, } -var ( - // ActionSummaryPrimaryKey and ActionSummaryColumn2 are the table columns denoting the - // primary key for the action_summary relation (M2M). - ActionSummaryPrimaryKey = []string{"action_summary_id", "action_data_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "action_data" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "action_summary_action_data", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -62,6 +64,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -108,23 +115,16 @@ func ByUserTime(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldUserTime, opts...).ToFunc() } -// ByActionSummaryCount orders the results by action_summary count. -func ByActionSummaryCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionSummaryStep(), opts...) - } -} - -// ByActionSummary orders the results by action_summary terms. -func ByActionSummary(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByActionSummaryField orders the results by action_summary field. +func ByActionSummaryField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), sql.OrderByField(field, opts...)) } } func newActionSummaryStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionSummaryInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionSummaryTable, ActionSummaryColumn), ) } diff --git a/ent/gen/ent/actiondata/where.go b/ent/gen/ent/actiondata/where.go index 1da2ded..41b504b 100644 --- a/ent/gen/ent/actiondata/where.go +++ b/ent/gen/ent/actiondata/where.go @@ -468,7 +468,7 @@ func HasActionSummary() predicate.ActionData { return predicate.ActionData(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionSummaryTable, ActionSummaryColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/actiondata_create.go b/ent/gen/ent/actiondata_create.go index 2d3e6c3..4f2fb98 100644 --- a/ent/gen/ent/actiondata_create.go +++ b/ent/gen/ent/actiondata_create.go @@ -117,19 +117,23 @@ func (adc *ActionDataCreate) SetNillableUserTime(i *int64) *ActionDataCreate { return adc } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (adc *ActionDataCreate) AddActionSummaryIDs(ids ...int) *ActionDataCreate { - adc.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (adc *ActionDataCreate) SetActionSummaryID(id int) *ActionDataCreate { + adc.mutation.SetActionSummaryID(id) return adc } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (adc *ActionDataCreate) AddActionSummary(a ...*ActionSummary) *ActionDataCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (adc *ActionDataCreate) SetNillableActionSummaryID(id *int) *ActionDataCreate { + if id != nil { + adc = adc.SetActionSummaryID(*id) } - return adc.AddActionSummaryIDs(ids...) + return adc +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (adc *ActionDataCreate) SetActionSummary(a *ActionSummary) *ActionDataCreate { + return adc.SetActionSummaryID(a.ID) } // Mutation returns the ActionDataMutation object of the builder. @@ -222,10 +226,10 @@ func (adc *ActionDataCreate) createSpec() (*ActionData, *sqlgraph.CreateSpec) { } if nodes := adc.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, + Columns: []string{actiondata.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -234,6 +238,7 @@ func (adc *ActionDataCreate) createSpec() (*ActionData, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.action_summary_action_data = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/actiondata_query.go b/ent/gen/ent/actiondata_query.go index 0a4810e..0f58fc0 100644 --- a/ent/gen/ent/actiondata_query.go +++ b/ent/gen/ent/actiondata_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // ActionDataQuery is the builder for querying ActionData entities. type ActionDataQuery struct { config - ctx *QueryContext - order []actiondata.OrderOption - inters []Interceptor - predicates []predicate.ActionData - withActionSummary *ActionSummaryQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*ActionData) error - withNamedActionSummary map[string]*ActionSummaryQuery + ctx *QueryContext + order []actiondata.OrderOption + inters []Interceptor + predicates []predicate.ActionData + withActionSummary *ActionSummaryQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*ActionData) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (adq *ActionDataQuery) QueryActionSummary() *ActionSummaryQuery { step := sqlgraph.NewStep( sqlgraph.From(actiondata.Table, actiondata.FieldID, selector), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, actiondata.ActionSummaryTable, actiondata.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, actiondata.ActionSummaryTable, actiondata.ActionSummaryColumn), ) fromU = sqlgraph.SetNeighbors(adq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (adq *ActionDataQuery) prepareQuery(ctx context.Context) error { func (adq *ActionDataQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ActionData, error) { var ( nodes = []*ActionData{} + withFKs = adq.withFKs _spec = adq.querySpec() loadedTypes = [1]bool{ adq.withActionSummary != nil, } ) + if adq.withActionSummary != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, actiondata.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*ActionData).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (adq *ActionDataQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* return nodes, nil } if query := adq.withActionSummary; query != nil { - if err := adq.loadActionSummary(ctx, query, nodes, - func(n *ActionData) { n.Edges.ActionSummary = []*ActionSummary{} }, - func(n *ActionData, e *ActionSummary) { n.Edges.ActionSummary = append(n.Edges.ActionSummary, e) }); err != nil { - return nil, err - } - } - for name, query := range adq.withNamedActionSummary { - if err := adq.loadActionSummary(ctx, query, nodes, - func(n *ActionData) { n.appendNamedActionSummary(name) }, - func(n *ActionData, e *ActionSummary) { n.appendNamedActionSummary(name, e) }); err != nil { + if err := adq.loadActionSummary(ctx, query, nodes, nil, + func(n *ActionData, e *ActionSummary) { n.Edges.ActionSummary = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (adq *ActionDataQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* } func (adq *ActionDataQuery) loadActionSummary(ctx context.Context, query *ActionSummaryQuery, nodes []*ActionData, init func(*ActionData), assign func(*ActionData, *ActionSummary)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionData) - nids := make(map[int]map[*ActionData]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ActionData) + for i := range nodes { + if nodes[i].action_summary_action_data == nil { + continue } + fk := *nodes[i].action_summary_action_data + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actiondata.ActionSummaryTable) - s.Join(joinT).On(s.C(actionsummary.FieldID), joinT.C(actiondata.ActionSummaryPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(actiondata.ActionSummaryPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actiondata.ActionSummaryPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionData]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionSummary](ctx, query, qr, query.inters) + query.Where(actionsummary.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "action_summary" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "action_summary_action_data" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (adq *ActionDataQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedActionSummary tells the query-builder to eager-load the nodes that are connected to the "action_summary" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (adq *ActionDataQuery) WithNamedActionSummary(name string, opts ...func(*ActionSummaryQuery)) *ActionDataQuery { - query := (&ActionSummaryClient{config: adq.config}).Query() - for _, opt := range opts { - opt(query) - } - if adq.withNamedActionSummary == nil { - adq.withNamedActionSummary = make(map[string]*ActionSummaryQuery) - } - adq.withNamedActionSummary[name] = query - return adq -} - // ActionDataGroupBy is the group-by builder for ActionData entities. type ActionDataGroupBy struct { selector diff --git a/ent/gen/ent/actiondata_update.go b/ent/gen/ent/actiondata_update.go index a8f4cac..838d1e8 100644 --- a/ent/gen/ent/actiondata_update.go +++ b/ent/gen/ent/actiondata_update.go @@ -210,19 +210,23 @@ func (adu *ActionDataUpdate) ClearUserTime() *ActionDataUpdate { return adu } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (adu *ActionDataUpdate) AddActionSummaryIDs(ids ...int) *ActionDataUpdate { - adu.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (adu *ActionDataUpdate) SetActionSummaryID(id int) *ActionDataUpdate { + adu.mutation.SetActionSummaryID(id) return adu } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (adu *ActionDataUpdate) AddActionSummary(a ...*ActionSummary) *ActionDataUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (adu *ActionDataUpdate) SetNillableActionSummaryID(id *int) *ActionDataUpdate { + if id != nil { + adu = adu.SetActionSummaryID(*id) } - return adu.AddActionSummaryIDs(ids...) + return adu +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (adu *ActionDataUpdate) SetActionSummary(a *ActionSummary) *ActionDataUpdate { + return adu.SetActionSummaryID(a.ID) } // Mutation returns the ActionDataMutation object of the builder. @@ -230,27 +234,12 @@ func (adu *ActionDataUpdate) Mutation() *ActionDataMutation { return adu.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (adu *ActionDataUpdate) ClearActionSummary() *ActionDataUpdate { adu.mutation.ClearActionSummary() return adu } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (adu *ActionDataUpdate) RemoveActionSummaryIDs(ids ...int) *ActionDataUpdate { - adu.mutation.RemoveActionSummaryIDs(ids...) - return adu -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (adu *ActionDataUpdate) RemoveActionSummary(a ...*ActionSummary) *ActionDataUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return adu.RemoveActionSummaryIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (adu *ActionDataUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, adu.sqlSave, adu.mutation, adu.hooks) @@ -349,39 +338,23 @@ func (adu *ActionDataUpdate) sqlSave(ctx context.Context) (n int, err error) { } if adu.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := adu.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !adu.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, + Columns: []string{actiondata.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := adu.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, + Columns: []string{actiondata.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -594,19 +567,23 @@ func (aduo *ActionDataUpdateOne) ClearUserTime() *ActionDataUpdateOne { return aduo } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (aduo *ActionDataUpdateOne) AddActionSummaryIDs(ids ...int) *ActionDataUpdateOne { - aduo.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (aduo *ActionDataUpdateOne) SetActionSummaryID(id int) *ActionDataUpdateOne { + aduo.mutation.SetActionSummaryID(id) return aduo } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (aduo *ActionDataUpdateOne) AddActionSummary(a ...*ActionSummary) *ActionDataUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (aduo *ActionDataUpdateOne) SetNillableActionSummaryID(id *int) *ActionDataUpdateOne { + if id != nil { + aduo = aduo.SetActionSummaryID(*id) } - return aduo.AddActionSummaryIDs(ids...) + return aduo +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (aduo *ActionDataUpdateOne) SetActionSummary(a *ActionSummary) *ActionDataUpdateOne { + return aduo.SetActionSummaryID(a.ID) } // Mutation returns the ActionDataMutation object of the builder. @@ -614,27 +591,12 @@ func (aduo *ActionDataUpdateOne) Mutation() *ActionDataMutation { return aduo.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (aduo *ActionDataUpdateOne) ClearActionSummary() *ActionDataUpdateOne { aduo.mutation.ClearActionSummary() return aduo } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (aduo *ActionDataUpdateOne) RemoveActionSummaryIDs(ids ...int) *ActionDataUpdateOne { - aduo.mutation.RemoveActionSummaryIDs(ids...) - return aduo -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (aduo *ActionDataUpdateOne) RemoveActionSummary(a ...*ActionSummary) *ActionDataUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return aduo.RemoveActionSummaryIDs(ids...) -} - // Where appends a list predicates to the ActionDataUpdate builder. func (aduo *ActionDataUpdateOne) Where(ps ...predicate.ActionData) *ActionDataUpdateOne { aduo.mutation.Where(ps...) @@ -763,39 +725,23 @@ func (aduo *ActionDataUpdateOne) sqlSave(ctx context.Context) (_node *ActionData } if aduo.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := aduo.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !aduo.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, + Columns: []string{actiondata.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := aduo.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: actiondata.ActionSummaryTable, - Columns: actiondata.ActionSummaryPrimaryKey, + Columns: []string{actiondata.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), diff --git a/ent/gen/ent/actionsummary.go b/ent/gen/ent/actionsummary.go index 69f57bb..25c98be 100644 --- a/ent/gen/ent/actionsummary.go +++ b/ent/gen/ent/actionsummary.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/actioncachestatistics" "github.com/buildbarn/bb-portal/ent/gen/ent/actionsummary" "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) @@ -41,16 +42,15 @@ type ActionSummaryEdges struct { // RunnerCount holds the value of the runner_count edge. RunnerCount []*RunnerCount `json:"runner_count,omitempty"` // ActionCacheStatistics holds the value of the action_cache_statistics edge. - ActionCacheStatistics []*ActionCacheStatistics `json:"action_cache_statistics,omitempty"` + ActionCacheStatistics *ActionCacheStatistics `json:"action_cache_statistics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [4]bool // totalCount holds the count of the edges above. totalCount [4]map[string]int - namedActionData map[string][]*ActionData - namedRunnerCount map[string][]*RunnerCount - namedActionCacheStatistics map[string][]*ActionCacheStatistics + namedActionData map[string][]*ActionData + namedRunnerCount map[string][]*RunnerCount } // MetricsOrErr returns the Metrics value or an error if the edge @@ -83,10 +83,12 @@ func (e ActionSummaryEdges) RunnerCountOrErr() ([]*RunnerCount, error) { } // ActionCacheStatisticsOrErr returns the ActionCacheStatistics value or an error if the edge -// was not loaded in eager-loading. -func (e ActionSummaryEdges) ActionCacheStatisticsOrErr() ([]*ActionCacheStatistics, error) { - if e.loadedTypes[3] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ActionSummaryEdges) ActionCacheStatisticsOrErr() (*ActionCacheStatistics, error) { + if e.ActionCacheStatistics != nil { return e.ActionCacheStatistics, nil + } else if e.loadedTypes[3] { + return nil, &NotFoundError{label: actioncachestatistics.Label} } return nil, &NotLoadedError{edge: "action_cache_statistics"} } @@ -271,29 +273,5 @@ func (as *ActionSummary) appendNamedRunnerCount(name string, edges ...*RunnerCou } } -// NamedActionCacheStatistics returns the ActionCacheStatistics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (as *ActionSummary) NamedActionCacheStatistics(name string) ([]*ActionCacheStatistics, error) { - if as.Edges.namedActionCacheStatistics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := as.Edges.namedActionCacheStatistics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (as *ActionSummary) appendNamedActionCacheStatistics(name string, edges ...*ActionCacheStatistics) { - if as.Edges.namedActionCacheStatistics == nil { - as.Edges.namedActionCacheStatistics = make(map[string][]*ActionCacheStatistics) - } - if len(edges) == 0 { - as.Edges.namedActionCacheStatistics[name] = []*ActionCacheStatistics{} - } else { - as.Edges.namedActionCacheStatistics[name] = append(as.Edges.namedActionCacheStatistics[name], edges...) - } -} - // ActionSummaries is a parsable slice of ActionSummary. type ActionSummaries []*ActionSummary diff --git a/ent/gen/ent/actionsummary/actionsummary.go b/ent/gen/ent/actionsummary/actionsummary.go index d194eda..8d3692f 100644 --- a/ent/gen/ent/actionsummary/actionsummary.go +++ b/ent/gen/ent/actionsummary/actionsummary.go @@ -37,21 +37,27 @@ const ( MetricsInverseTable = "metrics" // MetricsColumn is the table column denoting the metrics relation/edge. MetricsColumn = "metrics_action_summary" - // ActionDataTable is the table that holds the action_data relation/edge. The primary key declared below. - ActionDataTable = "action_summary_action_data" + // ActionDataTable is the table that holds the action_data relation/edge. + ActionDataTable = "action_data" // ActionDataInverseTable is the table name for the ActionData entity. // It exists in this package in order to avoid circular dependency with the "actiondata" package. ActionDataInverseTable = "action_data" - // RunnerCountTable is the table that holds the runner_count relation/edge. The primary key declared below. - RunnerCountTable = "action_summary_runner_count" + // ActionDataColumn is the table column denoting the action_data relation/edge. + ActionDataColumn = "action_summary_action_data" + // RunnerCountTable is the table that holds the runner_count relation/edge. + RunnerCountTable = "runner_counts" // RunnerCountInverseTable is the table name for the RunnerCount entity. // It exists in this package in order to avoid circular dependency with the "runnercount" package. RunnerCountInverseTable = "runner_counts" - // ActionCacheStatisticsTable is the table that holds the action_cache_statistics relation/edge. The primary key declared below. - ActionCacheStatisticsTable = "action_summary_action_cache_statistics" + // RunnerCountColumn is the table column denoting the runner_count relation/edge. + RunnerCountColumn = "action_summary_runner_count" + // ActionCacheStatisticsTable is the table that holds the action_cache_statistics relation/edge. + ActionCacheStatisticsTable = "action_cache_statistics" // ActionCacheStatisticsInverseTable is the table name for the ActionCacheStatistics entity. // It exists in this package in order to avoid circular dependency with the "actioncachestatistics" package. ActionCacheStatisticsInverseTable = "action_cache_statistics" + // ActionCacheStatisticsColumn is the table column denoting the action_cache_statistics relation/edge. + ActionCacheStatisticsColumn = "action_summary_action_cache_statistics" ) // Columns holds all SQL columns for actionsummary fields. @@ -69,18 +75,6 @@ var ForeignKeys = []string{ "metrics_action_summary", } -var ( - // ActionDataPrimaryKey and ActionDataColumn2 are the table columns denoting the - // primary key for the action_data relation (M2M). - ActionDataPrimaryKey = []string{"action_summary_id", "action_data_id"} - // RunnerCountPrimaryKey and RunnerCountColumn2 are the table columns denoting the - // primary key for the runner_count relation (M2M). - RunnerCountPrimaryKey = []string{"action_summary_id", "runner_count_id"} - // ActionCacheStatisticsPrimaryKey and ActionCacheStatisticsColumn2 are the table columns denoting the - // primary key for the action_cache_statistics relation (M2M). - ActionCacheStatisticsPrimaryKey = []string{"action_summary_id", "action_cache_statistics_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -159,44 +153,37 @@ func ByRunnerCount(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { } } -// ByActionCacheStatisticsCount orders the results by action_cache_statistics count. -func ByActionCacheStatisticsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionCacheStatisticsStep(), opts...) - } -} - -// ByActionCacheStatistics orders the results by action_cache_statistics terms. -func ByActionCacheStatistics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByActionCacheStatisticsField orders the results by action_cache_statistics field. +func ByActionCacheStatisticsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionCacheStatisticsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newActionCacheStatisticsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, MetricsTable, MetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newActionDataStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionDataInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ActionDataTable, ActionDataPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ActionDataTable, ActionDataColumn), ) } func newRunnerCountStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(RunnerCountInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, RunnerCountTable, RunnerCountPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, RunnerCountTable, RunnerCountColumn), ) } func newActionCacheStatisticsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionCacheStatisticsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ActionCacheStatisticsTable, ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, ActionCacheStatisticsTable, ActionCacheStatisticsColumn), ) } diff --git a/ent/gen/ent/actionsummary/where.go b/ent/gen/ent/actionsummary/where.go index a4db2e1..f692d6d 100644 --- a/ent/gen/ent/actionsummary/where.go +++ b/ent/gen/ent/actionsummary/where.go @@ -278,7 +278,7 @@ func HasMetrics() predicate.ActionSummary { return predicate.ActionSummary(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, MetricsTable, MetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -301,7 +301,7 @@ func HasActionData() predicate.ActionSummary { return predicate.ActionSummary(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ActionDataTable, ActionDataPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ActionDataTable, ActionDataColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -324,7 +324,7 @@ func HasRunnerCount() predicate.ActionSummary { return predicate.ActionSummary(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, RunnerCountTable, RunnerCountPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, RunnerCountTable, RunnerCountColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -347,7 +347,7 @@ func HasActionCacheStatistics() predicate.ActionSummary { return predicate.ActionSummary(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ActionCacheStatisticsTable, ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, ActionCacheStatisticsTable, ActionCacheStatisticsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/actionsummary_create.go b/ent/gen/ent/actionsummary_create.go index afd5cdd..866690a 100644 --- a/ent/gen/ent/actionsummary_create.go +++ b/ent/gen/ent/actionsummary_create.go @@ -127,19 +127,23 @@ func (asc *ActionSummaryCreate) AddRunnerCount(r ...*RunnerCount) *ActionSummary return asc.AddRunnerCountIDs(ids...) } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (asc *ActionSummaryCreate) AddActionCacheStatisticIDs(ids ...int) *ActionSummaryCreate { - asc.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (asc *ActionSummaryCreate) SetActionCacheStatisticsID(id int) *ActionSummaryCreate { + asc.mutation.SetActionCacheStatisticsID(id) return asc } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (asc *ActionSummaryCreate) AddActionCacheStatistics(a ...*ActionCacheStatistics) *ActionSummaryCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (asc *ActionSummaryCreate) SetNillableActionCacheStatisticsID(id *int) *ActionSummaryCreate { + if id != nil { + asc = asc.SetActionCacheStatisticsID(*id) } - return asc.AddActionCacheStatisticIDs(ids...) + return asc +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (asc *ActionSummaryCreate) SetActionCacheStatistics(a *ActionCacheStatistics) *ActionSummaryCreate { + return asc.SetActionCacheStatisticsID(a.ID) } // Mutation returns the ActionSummaryMutation object of the builder. @@ -220,7 +224,7 @@ func (asc *ActionSummaryCreate) createSpec() (*ActionSummary, *sqlgraph.CreateSp } if nodes := asc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: actionsummary.MetricsTable, Columns: []string{actionsummary.MetricsColumn}, @@ -237,10 +241,10 @@ func (asc *ActionSummaryCreate) createSpec() (*ActionSummary, *sqlgraph.CreateSp } if nodes := asc.mutation.ActionDataIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -253,10 +257,10 @@ func (asc *ActionSummaryCreate) createSpec() (*ActionSummary, *sqlgraph.CreateSp } if nodes := asc.mutation.RunnerCountIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -269,10 +273,10 @@ func (asc *ActionSummaryCreate) createSpec() (*ActionSummary, *sqlgraph.CreateSp } if nodes := asc.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, + Columns: []string{actionsummary.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/actionsummary_query.go b/ent/gen/ent/actionsummary_query.go index 1b9bce8..5045e64 100644 --- a/ent/gen/ent/actionsummary_query.go +++ b/ent/gen/ent/actionsummary_query.go @@ -22,20 +22,19 @@ import ( // ActionSummaryQuery is the builder for querying ActionSummary entities. type ActionSummaryQuery struct { config - ctx *QueryContext - order []actionsummary.OrderOption - inters []Interceptor - predicates []predicate.ActionSummary - withMetrics *MetricsQuery - withActionData *ActionDataQuery - withRunnerCount *RunnerCountQuery - withActionCacheStatistics *ActionCacheStatisticsQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*ActionSummary) error - withNamedActionData map[string]*ActionDataQuery - withNamedRunnerCount map[string]*RunnerCountQuery - withNamedActionCacheStatistics map[string]*ActionCacheStatisticsQuery + ctx *QueryContext + order []actionsummary.OrderOption + inters []Interceptor + predicates []predicate.ActionSummary + withMetrics *MetricsQuery + withActionData *ActionDataQuery + withRunnerCount *RunnerCountQuery + withActionCacheStatistics *ActionCacheStatisticsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*ActionSummary) error + withNamedActionData map[string]*ActionDataQuery + withNamedRunnerCount map[string]*RunnerCountQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -86,7 +85,7 @@ func (asq *ActionSummaryQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, actionsummary.MetricsTable, actionsummary.MetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, actionsummary.MetricsTable, actionsummary.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(asq.driver.Dialect(), step) return fromU, nil @@ -108,7 +107,7 @@ func (asq *ActionSummaryQuery) QueryActionData() *ActionDataQuery { step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, selector), sqlgraph.To(actiondata.Table, actiondata.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.ActionDataTable, actionsummary.ActionDataPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actionsummary.ActionDataTable, actionsummary.ActionDataColumn), ) fromU = sqlgraph.SetNeighbors(asq.driver.Dialect(), step) return fromU, nil @@ -130,7 +129,7 @@ func (asq *ActionSummaryQuery) QueryRunnerCount() *RunnerCountQuery { step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, selector), sqlgraph.To(runnercount.Table, runnercount.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.RunnerCountTable, actionsummary.RunnerCountPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actionsummary.RunnerCountTable, actionsummary.RunnerCountColumn), ) fromU = sqlgraph.SetNeighbors(asq.driver.Dialect(), step) return fromU, nil @@ -152,7 +151,7 @@ func (asq *ActionSummaryQuery) QueryActionCacheStatistics() *ActionCacheStatisti step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, selector), sqlgraph.To(actioncachestatistics.Table, actioncachestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.ActionCacheStatisticsTable, actionsummary.ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, actionsummary.ActionCacheStatisticsTable, actionsummary.ActionCacheStatisticsColumn), ) fromU = sqlgraph.SetNeighbors(asq.driver.Dialect(), step) return fromU, nil @@ -540,11 +539,8 @@ func (asq *ActionSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( } } if query := asq.withActionCacheStatistics; query != nil { - if err := asq.loadActionCacheStatistics(ctx, query, nodes, - func(n *ActionSummary) { n.Edges.ActionCacheStatistics = []*ActionCacheStatistics{} }, - func(n *ActionSummary, e *ActionCacheStatistics) { - n.Edges.ActionCacheStatistics = append(n.Edges.ActionCacheStatistics, e) - }); err != nil { + if err := asq.loadActionCacheStatistics(ctx, query, nodes, nil, + func(n *ActionSummary, e *ActionCacheStatistics) { n.Edges.ActionCacheStatistics = e }); err != nil { return nil, err } } @@ -562,13 +558,6 @@ func (asq *ActionSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nil, err } } - for name, query := range asq.withNamedActionCacheStatistics { - if err := asq.loadActionCacheStatistics(ctx, query, nodes, - func(n *ActionSummary) { n.appendNamedActionCacheStatistics(name) }, - func(n *ActionSummary, e *ActionCacheStatistics) { n.appendNamedActionCacheStatistics(name, e) }); err != nil { - return nil, err - } - } for i := range asq.loadTotal { if err := asq.loadTotal[i](ctx, nodes); err != nil { return nil, err @@ -610,185 +599,92 @@ func (asq *ActionSummaryQuery) loadMetrics(ctx context.Context, query *MetricsQu return nil } func (asq *ActionSummaryQuery) loadActionData(ctx context.Context, query *ActionDataQuery, nodes []*ActionSummary, init func(*ActionSummary), assign func(*ActionSummary, *ActionData)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionSummary) - nids := make(map[int]map[*ActionSummary]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ActionSummary) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actionsummary.ActionDataTable) - s.Join(joinT).On(s.C(actiondata.FieldID), joinT.C(actionsummary.ActionDataPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(actionsummary.ActionDataPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actionsummary.ActionDataPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + init(nodes[i]) + } } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionSummary]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionData](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.ActionData(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(actionsummary.ActionDataColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "action_data" node returned %v`, n.ID) + fk := n.action_summary_action_data + if fk == nil { + return fmt.Errorf(`foreign-key "action_summary_action_data" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "action_summary_action_data" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (asq *ActionSummaryQuery) loadRunnerCount(ctx context.Context, query *RunnerCountQuery, nodes []*ActionSummary, init func(*ActionSummary), assign func(*ActionSummary, *RunnerCount)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionSummary) - nids := make(map[int]map[*ActionSummary]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ActionSummary) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actionsummary.RunnerCountTable) - s.Join(joinT).On(s.C(runnercount.FieldID), joinT.C(actionsummary.RunnerCountPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(actionsummary.RunnerCountPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actionsummary.RunnerCountPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + init(nodes[i]) + } } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionSummary]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*RunnerCount](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.RunnerCount(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(actionsummary.RunnerCountColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "runner_count" node returned %v`, n.ID) + fk := n.action_summary_runner_count + if fk == nil { + return fmt.Errorf(`foreign-key "action_summary_runner_count" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "action_summary_runner_count" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (asq *ActionSummaryQuery) loadActionCacheStatistics(ctx context.Context, query *ActionCacheStatisticsQuery, nodes []*ActionSummary, init func(*ActionSummary), assign func(*ActionSummary, *ActionCacheStatistics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ActionSummary) - nids := make(map[int]map[*ActionSummary]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(actionsummary.ActionCacheStatisticsTable) - s.Join(joinT).On(s.C(actioncachestatistics.FieldID), joinT.C(actionsummary.ActionCacheStatisticsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(actionsummary.ActionCacheStatisticsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(actionsummary.ActionCacheStatisticsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ActionSummary) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ActionSummary]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionCacheStatistics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.ActionCacheStatistics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(actionsummary.ActionCacheStatisticsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "action_cache_statistics" node returned %v`, n.ID) + fk := n.action_summary_action_cache_statistics + if fk == nil { + return fmt.Errorf(`foreign-key "action_summary_action_cache_statistics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "action_summary_action_cache_statistics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -905,20 +801,6 @@ func (asq *ActionSummaryQuery) WithNamedRunnerCount(name string, opts ...func(*R return asq } -// WithNamedActionCacheStatistics tells the query-builder to eager-load the nodes that are connected to the "action_cache_statistics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (asq *ActionSummaryQuery) WithNamedActionCacheStatistics(name string, opts ...func(*ActionCacheStatisticsQuery)) *ActionSummaryQuery { - query := (&ActionCacheStatisticsClient{config: asq.config}).Query() - for _, opt := range opts { - opt(query) - } - if asq.withNamedActionCacheStatistics == nil { - asq.withNamedActionCacheStatistics = make(map[string]*ActionCacheStatisticsQuery) - } - asq.withNamedActionCacheStatistics[name] = query - return asq -} - // ActionSummaryGroupBy is the group-by builder for ActionSummary entities. type ActionSummaryGroupBy struct { selector diff --git a/ent/gen/ent/actionsummary_update.go b/ent/gen/ent/actionsummary_update.go index e714cd0..0e6adf4 100644 --- a/ent/gen/ent/actionsummary_update.go +++ b/ent/gen/ent/actionsummary_update.go @@ -188,19 +188,23 @@ func (asu *ActionSummaryUpdate) AddRunnerCount(r ...*RunnerCount) *ActionSummary return asu.AddRunnerCountIDs(ids...) } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (asu *ActionSummaryUpdate) AddActionCacheStatisticIDs(ids ...int) *ActionSummaryUpdate { - asu.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (asu *ActionSummaryUpdate) SetActionCacheStatisticsID(id int) *ActionSummaryUpdate { + asu.mutation.SetActionCacheStatisticsID(id) return asu } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (asu *ActionSummaryUpdate) AddActionCacheStatistics(a ...*ActionCacheStatistics) *ActionSummaryUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (asu *ActionSummaryUpdate) SetNillableActionCacheStatisticsID(id *int) *ActionSummaryUpdate { + if id != nil { + asu = asu.SetActionCacheStatisticsID(*id) } - return asu.AddActionCacheStatisticIDs(ids...) + return asu +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (asu *ActionSummaryUpdate) SetActionCacheStatistics(a *ActionCacheStatistics) *ActionSummaryUpdate { + return asu.SetActionCacheStatisticsID(a.ID) } // Mutation returns the ActionSummaryMutation object of the builder. @@ -256,27 +260,12 @@ func (asu *ActionSummaryUpdate) RemoveRunnerCount(r ...*RunnerCount) *ActionSumm return asu.RemoveRunnerCountIDs(ids...) } -// ClearActionCacheStatistics clears all "action_cache_statistics" edges to the ActionCacheStatistics entity. +// ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. func (asu *ActionSummaryUpdate) ClearActionCacheStatistics() *ActionSummaryUpdate { asu.mutation.ClearActionCacheStatistics() return asu } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to ActionCacheStatistics entities by IDs. -func (asu *ActionSummaryUpdate) RemoveActionCacheStatisticIDs(ids ...int) *ActionSummaryUpdate { - asu.mutation.RemoveActionCacheStatisticIDs(ids...) - return asu -} - -// RemoveActionCacheStatistics removes "action_cache_statistics" edges to ActionCacheStatistics entities. -func (asu *ActionSummaryUpdate) RemoveActionCacheStatistics(a ...*ActionCacheStatistics) *ActionSummaryUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return asu.RemoveActionCacheStatisticIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (asu *ActionSummaryUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, asu.sqlSave, asu.mutation, asu.hooks) @@ -351,7 +340,7 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if asu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: actionsummary.MetricsTable, Columns: []string{actionsummary.MetricsColumn}, @@ -364,7 +353,7 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := asu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: actionsummary.MetricsTable, Columns: []string{actionsummary.MetricsColumn}, @@ -380,10 +369,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if asu.mutation.ActionDataCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -393,10 +382,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := asu.mutation.RemovedActionDataIDs(); len(nodes) > 0 && !asu.mutation.ActionDataCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -409,10 +398,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := asu.mutation.ActionDataIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -425,10 +414,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if asu.mutation.RunnerCountCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -438,10 +427,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := asu.mutation.RemovedRunnerCountIDs(); len(nodes) > 0 && !asu.mutation.RunnerCountCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -454,10 +443,10 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := asu.mutation.RunnerCountIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -470,39 +459,23 @@ func (asu *ActionSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) } if asu.mutation.ActionCacheStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := asu.mutation.RemovedActionCacheStatisticsIDs(); len(nodes) > 0 && !asu.mutation.ActionCacheStatisticsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, + Columns: []string{actionsummary.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := asu.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, + Columns: []string{actionsummary.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), @@ -690,19 +663,23 @@ func (asuo *ActionSummaryUpdateOne) AddRunnerCount(r ...*RunnerCount) *ActionSum return asuo.AddRunnerCountIDs(ids...) } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (asuo *ActionSummaryUpdateOne) AddActionCacheStatisticIDs(ids ...int) *ActionSummaryUpdateOne { - asuo.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (asuo *ActionSummaryUpdateOne) SetActionCacheStatisticsID(id int) *ActionSummaryUpdateOne { + asuo.mutation.SetActionCacheStatisticsID(id) return asuo } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (asuo *ActionSummaryUpdateOne) AddActionCacheStatistics(a ...*ActionCacheStatistics) *ActionSummaryUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (asuo *ActionSummaryUpdateOne) SetNillableActionCacheStatisticsID(id *int) *ActionSummaryUpdateOne { + if id != nil { + asuo = asuo.SetActionCacheStatisticsID(*id) } - return asuo.AddActionCacheStatisticIDs(ids...) + return asuo +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (asuo *ActionSummaryUpdateOne) SetActionCacheStatistics(a *ActionCacheStatistics) *ActionSummaryUpdateOne { + return asuo.SetActionCacheStatisticsID(a.ID) } // Mutation returns the ActionSummaryMutation object of the builder. @@ -758,27 +735,12 @@ func (asuo *ActionSummaryUpdateOne) RemoveRunnerCount(r ...*RunnerCount) *Action return asuo.RemoveRunnerCountIDs(ids...) } -// ClearActionCacheStatistics clears all "action_cache_statistics" edges to the ActionCacheStatistics entity. +// ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. func (asuo *ActionSummaryUpdateOne) ClearActionCacheStatistics() *ActionSummaryUpdateOne { asuo.mutation.ClearActionCacheStatistics() return asuo } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to ActionCacheStatistics entities by IDs. -func (asuo *ActionSummaryUpdateOne) RemoveActionCacheStatisticIDs(ids ...int) *ActionSummaryUpdateOne { - asuo.mutation.RemoveActionCacheStatisticIDs(ids...) - return asuo -} - -// RemoveActionCacheStatistics removes "action_cache_statistics" edges to ActionCacheStatistics entities. -func (asuo *ActionSummaryUpdateOne) RemoveActionCacheStatistics(a ...*ActionCacheStatistics) *ActionSummaryUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return asuo.RemoveActionCacheStatisticIDs(ids...) -} - // Where appends a list predicates to the ActionSummaryUpdate builder. func (asuo *ActionSummaryUpdateOne) Where(ps ...predicate.ActionSummary) *ActionSummaryUpdateOne { asuo.mutation.Where(ps...) @@ -883,7 +845,7 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if asuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: actionsummary.MetricsTable, Columns: []string{actionsummary.MetricsColumn}, @@ -896,7 +858,7 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if nodes := asuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: actionsummary.MetricsTable, Columns: []string{actionsummary.MetricsColumn}, @@ -912,10 +874,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if asuo.mutation.ActionDataCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -925,10 +887,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if nodes := asuo.mutation.RemovedActionDataIDs(); len(nodes) > 0 && !asuo.mutation.ActionDataCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -941,10 +903,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if nodes := asuo.mutation.ActionDataIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.ActionDataTable, - Columns: actionsummary.ActionDataPrimaryKey, + Columns: []string{actionsummary.ActionDataColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actiondata.FieldID, field.TypeInt), @@ -957,10 +919,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if asuo.mutation.RunnerCountCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -970,10 +932,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if nodes := asuo.mutation.RemovedRunnerCountIDs(); len(nodes) > 0 && !asuo.mutation.RunnerCountCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -986,10 +948,10 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if nodes := asuo.mutation.RunnerCountIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: actionsummary.RunnerCountTable, - Columns: actionsummary.RunnerCountPrimaryKey, + Columns: []string{actionsummary.RunnerCountColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(runnercount.FieldID, field.TypeInt), @@ -1002,39 +964,23 @@ func (asuo *ActionSummaryUpdateOne) sqlSave(ctx context.Context) (_node *ActionS } if asuo.mutation.ActionCacheStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := asuo.mutation.RemovedActionCacheStatisticsIDs(); len(nodes) > 0 && !asuo.mutation.ActionCacheStatisticsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, + Columns: []string{actionsummary.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := asuo.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: actionsummary.ActionCacheStatisticsTable, - Columns: actionsummary.ActionCacheStatisticsPrimaryKey, + Columns: []string{actionsummary.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/artifactmetrics.go b/ent/gen/ent/artifactmetrics.go index af20f7e..84c078b 100644 --- a/ent/gen/ent/artifactmetrics.go +++ b/ent/gen/ent/artifactmetrics.go @@ -9,6 +9,8 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/artifactmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/filesmetric" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) // ArtifactMetrics is the model entity for the ArtifactMetrics schema. @@ -18,76 +20,84 @@ type ArtifactMetrics struct { ID int `json:"id,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the ArtifactMetricsQuery when eager-loading is set. - Edges ArtifactMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges ArtifactMetricsEdges `json:"edges"` + artifact_metrics_source_artifacts_read *int + artifact_metrics_output_artifacts_seen *int + artifact_metrics_output_artifacts_from_action_cache *int + metrics_artifact_metrics *int + selectValues sql.SelectValues } // ArtifactMetricsEdges holds the relations/edges for other nodes in the graph. type ArtifactMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // SourceArtifactsRead holds the value of the source_artifacts_read edge. - SourceArtifactsRead []*FilesMetric `json:"source_artifacts_read,omitempty"` + SourceArtifactsRead *FilesMetric `json:"source_artifacts_read,omitempty"` // OutputArtifactsSeen holds the value of the output_artifacts_seen edge. - OutputArtifactsSeen []*FilesMetric `json:"output_artifacts_seen,omitempty"` + OutputArtifactsSeen *FilesMetric `json:"output_artifacts_seen,omitempty"` // OutputArtifactsFromActionCache holds the value of the output_artifacts_from_action_cache edge. - OutputArtifactsFromActionCache []*FilesMetric `json:"output_artifacts_from_action_cache,omitempty"` + OutputArtifactsFromActionCache *FilesMetric `json:"output_artifacts_from_action_cache,omitempty"` // TopLevelArtifacts holds the value of the top_level_artifacts edge. - TopLevelArtifacts []*FilesMetric `json:"top_level_artifacts,omitempty"` + TopLevelArtifacts *FilesMetric `json:"top_level_artifacts,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [5]bool // totalCount holds the count of the edges above. totalCount [5]map[string]int - - namedMetrics map[string][]*Metrics - namedSourceArtifactsRead map[string][]*FilesMetric - namedOutputArtifactsSeen map[string][]*FilesMetric - namedOutputArtifactsFromActionCache map[string][]*FilesMetric - namedTopLevelArtifacts map[string][]*FilesMetric } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e ArtifactMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ArtifactMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } // SourceArtifactsReadOrErr returns the SourceArtifactsRead value or an error if the edge -// was not loaded in eager-loading. -func (e ArtifactMetricsEdges) SourceArtifactsReadOrErr() ([]*FilesMetric, error) { - if e.loadedTypes[1] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ArtifactMetricsEdges) SourceArtifactsReadOrErr() (*FilesMetric, error) { + if e.SourceArtifactsRead != nil { return e.SourceArtifactsRead, nil + } else if e.loadedTypes[1] { + return nil, &NotFoundError{label: filesmetric.Label} } return nil, &NotLoadedError{edge: "source_artifacts_read"} } // OutputArtifactsSeenOrErr returns the OutputArtifactsSeen value or an error if the edge -// was not loaded in eager-loading. -func (e ArtifactMetricsEdges) OutputArtifactsSeenOrErr() ([]*FilesMetric, error) { - if e.loadedTypes[2] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ArtifactMetricsEdges) OutputArtifactsSeenOrErr() (*FilesMetric, error) { + if e.OutputArtifactsSeen != nil { return e.OutputArtifactsSeen, nil + } else if e.loadedTypes[2] { + return nil, &NotFoundError{label: filesmetric.Label} } return nil, &NotLoadedError{edge: "output_artifacts_seen"} } // OutputArtifactsFromActionCacheOrErr returns the OutputArtifactsFromActionCache value or an error if the edge -// was not loaded in eager-loading. -func (e ArtifactMetricsEdges) OutputArtifactsFromActionCacheOrErr() ([]*FilesMetric, error) { - if e.loadedTypes[3] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ArtifactMetricsEdges) OutputArtifactsFromActionCacheOrErr() (*FilesMetric, error) { + if e.OutputArtifactsFromActionCache != nil { return e.OutputArtifactsFromActionCache, nil + } else if e.loadedTypes[3] { + return nil, &NotFoundError{label: filesmetric.Label} } return nil, &NotLoadedError{edge: "output_artifacts_from_action_cache"} } // TopLevelArtifactsOrErr returns the TopLevelArtifacts value or an error if the edge -// was not loaded in eager-loading. -func (e ArtifactMetricsEdges) TopLevelArtifactsOrErr() ([]*FilesMetric, error) { - if e.loadedTypes[4] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ArtifactMetricsEdges) TopLevelArtifactsOrErr() (*FilesMetric, error) { + if e.TopLevelArtifacts != nil { return e.TopLevelArtifacts, nil + } else if e.loadedTypes[4] { + return nil, &NotFoundError{label: filesmetric.Label} } return nil, &NotLoadedError{edge: "top_level_artifacts"} } @@ -99,6 +109,14 @@ func (*ArtifactMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case artifactmetrics.FieldID: values[i] = new(sql.NullInt64) + case artifactmetrics.ForeignKeys[0]: // artifact_metrics_source_artifacts_read + values[i] = new(sql.NullInt64) + case artifactmetrics.ForeignKeys[1]: // artifact_metrics_output_artifacts_seen + values[i] = new(sql.NullInt64) + case artifactmetrics.ForeignKeys[2]: // artifact_metrics_output_artifacts_from_action_cache + values[i] = new(sql.NullInt64) + case artifactmetrics.ForeignKeys[3]: // metrics_artifact_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -120,6 +138,34 @@ func (am *ArtifactMetrics) assignValues(columns []string, values []any) error { return fmt.Errorf("unexpected type %T for field id", value) } am.ID = int(value.Int64) + case artifactmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_source_artifacts_read", value) + } else if value.Valid { + am.artifact_metrics_source_artifacts_read = new(int) + *am.artifact_metrics_source_artifacts_read = int(value.Int64) + } + case artifactmetrics.ForeignKeys[1]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_output_artifacts_seen", value) + } else if value.Valid { + am.artifact_metrics_output_artifacts_seen = new(int) + *am.artifact_metrics_output_artifacts_seen = int(value.Int64) + } + case artifactmetrics.ForeignKeys[2]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_output_artifacts_from_action_cache", value) + } else if value.Valid { + am.artifact_metrics_output_artifacts_from_action_cache = new(int) + *am.artifact_metrics_output_artifacts_from_action_cache = int(value.Int64) + } + case artifactmetrics.ForeignKeys[3]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_artifact_metrics", value) + } else if value.Valid { + am.metrics_artifact_metrics = new(int) + *am.metrics_artifact_metrics = int(value.Int64) + } default: am.selectValues.Set(columns[i], values[i]) } @@ -185,125 +231,5 @@ func (am *ArtifactMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (am *ArtifactMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if am.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := am.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (am *ArtifactMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if am.Edges.namedMetrics == nil { - am.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - am.Edges.namedMetrics[name] = []*Metrics{} - } else { - am.Edges.namedMetrics[name] = append(am.Edges.namedMetrics[name], edges...) - } -} - -// NamedSourceArtifactsRead returns the SourceArtifactsRead named value or an error if the edge was not -// loaded in eager-loading with this name. -func (am *ArtifactMetrics) NamedSourceArtifactsRead(name string) ([]*FilesMetric, error) { - if am.Edges.namedSourceArtifactsRead == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := am.Edges.namedSourceArtifactsRead[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (am *ArtifactMetrics) appendNamedSourceArtifactsRead(name string, edges ...*FilesMetric) { - if am.Edges.namedSourceArtifactsRead == nil { - am.Edges.namedSourceArtifactsRead = make(map[string][]*FilesMetric) - } - if len(edges) == 0 { - am.Edges.namedSourceArtifactsRead[name] = []*FilesMetric{} - } else { - am.Edges.namedSourceArtifactsRead[name] = append(am.Edges.namedSourceArtifactsRead[name], edges...) - } -} - -// NamedOutputArtifactsSeen returns the OutputArtifactsSeen named value or an error if the edge was not -// loaded in eager-loading with this name. -func (am *ArtifactMetrics) NamedOutputArtifactsSeen(name string) ([]*FilesMetric, error) { - if am.Edges.namedOutputArtifactsSeen == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := am.Edges.namedOutputArtifactsSeen[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (am *ArtifactMetrics) appendNamedOutputArtifactsSeen(name string, edges ...*FilesMetric) { - if am.Edges.namedOutputArtifactsSeen == nil { - am.Edges.namedOutputArtifactsSeen = make(map[string][]*FilesMetric) - } - if len(edges) == 0 { - am.Edges.namedOutputArtifactsSeen[name] = []*FilesMetric{} - } else { - am.Edges.namedOutputArtifactsSeen[name] = append(am.Edges.namedOutputArtifactsSeen[name], edges...) - } -} - -// NamedOutputArtifactsFromActionCache returns the OutputArtifactsFromActionCache named value or an error if the edge was not -// loaded in eager-loading with this name. -func (am *ArtifactMetrics) NamedOutputArtifactsFromActionCache(name string) ([]*FilesMetric, error) { - if am.Edges.namedOutputArtifactsFromActionCache == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := am.Edges.namedOutputArtifactsFromActionCache[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (am *ArtifactMetrics) appendNamedOutputArtifactsFromActionCache(name string, edges ...*FilesMetric) { - if am.Edges.namedOutputArtifactsFromActionCache == nil { - am.Edges.namedOutputArtifactsFromActionCache = make(map[string][]*FilesMetric) - } - if len(edges) == 0 { - am.Edges.namedOutputArtifactsFromActionCache[name] = []*FilesMetric{} - } else { - am.Edges.namedOutputArtifactsFromActionCache[name] = append(am.Edges.namedOutputArtifactsFromActionCache[name], edges...) - } -} - -// NamedTopLevelArtifacts returns the TopLevelArtifacts named value or an error if the edge was not -// loaded in eager-loading with this name. -func (am *ArtifactMetrics) NamedTopLevelArtifacts(name string) ([]*FilesMetric, error) { - if am.Edges.namedTopLevelArtifacts == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := am.Edges.namedTopLevelArtifacts[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (am *ArtifactMetrics) appendNamedTopLevelArtifacts(name string, edges ...*FilesMetric) { - if am.Edges.namedTopLevelArtifacts == nil { - am.Edges.namedTopLevelArtifacts = make(map[string][]*FilesMetric) - } - if len(edges) == 0 { - am.Edges.namedTopLevelArtifacts[name] = []*FilesMetric{} - } else { - am.Edges.namedTopLevelArtifacts[name] = append(am.Edges.namedTopLevelArtifacts[name], edges...) - } -} - // ArtifactMetricsSlice is a parsable slice of ArtifactMetrics. type ArtifactMetricsSlice []*ArtifactMetrics diff --git a/ent/gen/ent/artifactmetrics/artifactmetrics.go b/ent/gen/ent/artifactmetrics/artifactmetrics.go index f18e708..c7e08fd 100644 --- a/ent/gen/ent/artifactmetrics/artifactmetrics.go +++ b/ent/gen/ent/artifactmetrics/artifactmetrics.go @@ -24,37 +24,41 @@ const ( EdgeTopLevelArtifacts = "top_level_artifacts" // Table holds the table name of the artifactmetrics in the database. Table = "artifact_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_artifact_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "artifact_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_artifact_metrics" // SourceArtifactsReadTable is the table that holds the source_artifacts_read relation/edge. - SourceArtifactsReadTable = "files_metrics" + SourceArtifactsReadTable = "artifact_metrics" // SourceArtifactsReadInverseTable is the table name for the FilesMetric entity. // It exists in this package in order to avoid circular dependency with the "filesmetric" package. SourceArtifactsReadInverseTable = "files_metrics" // SourceArtifactsReadColumn is the table column denoting the source_artifacts_read relation/edge. SourceArtifactsReadColumn = "artifact_metrics_source_artifacts_read" // OutputArtifactsSeenTable is the table that holds the output_artifacts_seen relation/edge. - OutputArtifactsSeenTable = "files_metrics" + OutputArtifactsSeenTable = "artifact_metrics" // OutputArtifactsSeenInverseTable is the table name for the FilesMetric entity. // It exists in this package in order to avoid circular dependency with the "filesmetric" package. OutputArtifactsSeenInverseTable = "files_metrics" // OutputArtifactsSeenColumn is the table column denoting the output_artifacts_seen relation/edge. OutputArtifactsSeenColumn = "artifact_metrics_output_artifacts_seen" // OutputArtifactsFromActionCacheTable is the table that holds the output_artifacts_from_action_cache relation/edge. - OutputArtifactsFromActionCacheTable = "files_metrics" + OutputArtifactsFromActionCacheTable = "artifact_metrics" // OutputArtifactsFromActionCacheInverseTable is the table name for the FilesMetric entity. // It exists in this package in order to avoid circular dependency with the "filesmetric" package. OutputArtifactsFromActionCacheInverseTable = "files_metrics" // OutputArtifactsFromActionCacheColumn is the table column denoting the output_artifacts_from_action_cache relation/edge. OutputArtifactsFromActionCacheColumn = "artifact_metrics_output_artifacts_from_action_cache" - // TopLevelArtifactsTable is the table that holds the top_level_artifacts relation/edge. The primary key declared below. - TopLevelArtifactsTable = "artifact_metrics_top_level_artifacts" + // TopLevelArtifactsTable is the table that holds the top_level_artifacts relation/edge. + TopLevelArtifactsTable = "files_metrics" // TopLevelArtifactsInverseTable is the table name for the FilesMetric entity. // It exists in this package in order to avoid circular dependency with the "filesmetric" package. TopLevelArtifactsInverseTable = "files_metrics" + // TopLevelArtifactsColumn is the table column denoting the top_level_artifacts relation/edge. + TopLevelArtifactsColumn = "artifact_metrics_top_level_artifacts" ) // Columns holds all SQL columns for artifactmetrics fields. @@ -62,14 +66,14 @@ var Columns = []string{ FieldID, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "artifact_metrics_id"} - // TopLevelArtifactsPrimaryKey and TopLevelArtifactsColumn2 are the table columns denoting the - // primary key for the top_level_artifacts relation (M2M). - TopLevelArtifactsPrimaryKey = []string{"artifact_metrics_id", "files_metric_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "artifact_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "artifact_metrics_source_artifacts_read", + "artifact_metrics_output_artifacts_seen", + "artifact_metrics_output_artifacts_from_action_cache", + "metrics_artifact_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -78,6 +82,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -89,107 +98,72 @@ func ByID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldID, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// BySourceArtifactsReadCount orders the results by source_artifacts_read count. -func BySourceArtifactsReadCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newSourceArtifactsReadStep(), opts...) - } -} - -// BySourceArtifactsRead orders the results by source_artifacts_read terms. -func BySourceArtifactsRead(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newSourceArtifactsReadStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByOutputArtifactsSeenCount orders the results by output_artifacts_seen count. -func ByOutputArtifactsSeenCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newOutputArtifactsSeenStep(), opts...) - } -} - -// ByOutputArtifactsSeen orders the results by output_artifacts_seen terms. -func ByOutputArtifactsSeen(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newOutputArtifactsSeenStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByOutputArtifactsFromActionCacheCount orders the results by output_artifacts_from_action_cache count. -func ByOutputArtifactsFromActionCacheCount(opts ...sql.OrderTermOption) OrderOption { +// BySourceArtifactsReadField orders the results by source_artifacts_read field. +func BySourceArtifactsReadField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newOutputArtifactsFromActionCacheStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newSourceArtifactsReadStep(), sql.OrderByField(field, opts...)) } } -// ByOutputArtifactsFromActionCache orders the results by output_artifacts_from_action_cache terms. -func ByOutputArtifactsFromActionCache(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByOutputArtifactsSeenField orders the results by output_artifacts_seen field. +func ByOutputArtifactsSeenField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newOutputArtifactsFromActionCacheStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newOutputArtifactsSeenStep(), sql.OrderByField(field, opts...)) } } -// ByTopLevelArtifactsCount orders the results by top_level_artifacts count. -func ByTopLevelArtifactsCount(opts ...sql.OrderTermOption) OrderOption { +// ByOutputArtifactsFromActionCacheField orders the results by output_artifacts_from_action_cache field. +func ByOutputArtifactsFromActionCacheField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTopLevelArtifactsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newOutputArtifactsFromActionCacheStep(), sql.OrderByField(field, opts...)) } } -// ByTopLevelArtifacts orders the results by top_level_artifacts terms. -func ByTopLevelArtifacts(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTopLevelArtifactsField orders the results by top_level_artifacts field. +func ByTopLevelArtifactsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTopLevelArtifactsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTopLevelArtifactsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newSourceArtifactsReadStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(SourceArtifactsReadInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, SourceArtifactsReadTable, SourceArtifactsReadColumn), + sqlgraph.Edge(sqlgraph.M2O, false, SourceArtifactsReadTable, SourceArtifactsReadColumn), ) } func newOutputArtifactsSeenStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(OutputArtifactsSeenInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, OutputArtifactsSeenTable, OutputArtifactsSeenColumn), + sqlgraph.Edge(sqlgraph.M2O, false, OutputArtifactsSeenTable, OutputArtifactsSeenColumn), ) } func newOutputArtifactsFromActionCacheStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(OutputArtifactsFromActionCacheInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, OutputArtifactsFromActionCacheTable, OutputArtifactsFromActionCacheColumn), + sqlgraph.Edge(sqlgraph.M2O, false, OutputArtifactsFromActionCacheTable, OutputArtifactsFromActionCacheColumn), ) } func newTopLevelArtifactsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TopLevelArtifactsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TopLevelArtifactsTable, TopLevelArtifactsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TopLevelArtifactsTable, TopLevelArtifactsColumn), ) } diff --git a/ent/gen/ent/artifactmetrics/where.go b/ent/gen/ent/artifactmetrics/where.go index 6f4776c..617539a 100644 --- a/ent/gen/ent/artifactmetrics/where.go +++ b/ent/gen/ent/artifactmetrics/where.go @@ -58,7 +58,7 @@ func HasMetrics() predicate.ArtifactMetrics { return predicate.ArtifactMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -81,7 +81,7 @@ func HasSourceArtifactsRead() predicate.ArtifactMetrics { return predicate.ArtifactMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, SourceArtifactsReadTable, SourceArtifactsReadColumn), + sqlgraph.Edge(sqlgraph.M2O, false, SourceArtifactsReadTable, SourceArtifactsReadColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -104,7 +104,7 @@ func HasOutputArtifactsSeen() predicate.ArtifactMetrics { return predicate.ArtifactMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, OutputArtifactsSeenTable, OutputArtifactsSeenColumn), + sqlgraph.Edge(sqlgraph.M2O, false, OutputArtifactsSeenTable, OutputArtifactsSeenColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -127,7 +127,7 @@ func HasOutputArtifactsFromActionCache() predicate.ArtifactMetrics { return predicate.ArtifactMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, OutputArtifactsFromActionCacheTable, OutputArtifactsFromActionCacheColumn), + sqlgraph.Edge(sqlgraph.M2O, false, OutputArtifactsFromActionCacheTable, OutputArtifactsFromActionCacheColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -150,7 +150,7 @@ func HasTopLevelArtifacts() predicate.ArtifactMetrics { return predicate.ArtifactMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TopLevelArtifactsTable, TopLevelArtifactsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TopLevelArtifactsTable, TopLevelArtifactsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/artifactmetrics_create.go b/ent/gen/ent/artifactmetrics_create.go index ba9a142..0c6896d 100644 --- a/ent/gen/ent/artifactmetrics_create.go +++ b/ent/gen/ent/artifactmetrics_create.go @@ -20,79 +20,99 @@ type ArtifactMetricsCreate struct { hooks []Hook } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (amc *ArtifactMetricsCreate) AddMetricIDs(ids ...int) *ArtifactMetricsCreate { - amc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (amc *ArtifactMetricsCreate) SetMetricsID(id int) *ArtifactMetricsCreate { + amc.mutation.SetMetricsID(id) return amc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (amc *ArtifactMetricsCreate) AddMetrics(m ...*Metrics) *ArtifactMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (amc *ArtifactMetricsCreate) SetNillableMetricsID(id *int) *ArtifactMetricsCreate { + if id != nil { + amc = amc.SetMetricsID(*id) } - return amc.AddMetricIDs(ids...) + return amc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (amc *ArtifactMetricsCreate) SetMetrics(m *Metrics) *ArtifactMetricsCreate { + return amc.SetMetricsID(m.ID) } -// AddSourceArtifactsReadIDs adds the "source_artifacts_read" edge to the FilesMetric entity by IDs. -func (amc *ArtifactMetricsCreate) AddSourceArtifactsReadIDs(ids ...int) *ArtifactMetricsCreate { - amc.mutation.AddSourceArtifactsReadIDs(ids...) +// SetSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID. +func (amc *ArtifactMetricsCreate) SetSourceArtifactsReadID(id int) *ArtifactMetricsCreate { + amc.mutation.SetSourceArtifactsReadID(id) return amc } -// AddSourceArtifactsRead adds the "source_artifacts_read" edges to the FilesMetric entity. -func (amc *ArtifactMetricsCreate) AddSourceArtifactsRead(f ...*FilesMetric) *ArtifactMetricsCreate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID if the given value is not nil. +func (amc *ArtifactMetricsCreate) SetNillableSourceArtifactsReadID(id *int) *ArtifactMetricsCreate { + if id != nil { + amc = amc.SetSourceArtifactsReadID(*id) } - return amc.AddSourceArtifactsReadIDs(ids...) + return amc } -// AddOutputArtifactsSeenIDs adds the "output_artifacts_seen" edge to the FilesMetric entity by IDs. -func (amc *ArtifactMetricsCreate) AddOutputArtifactsSeenIDs(ids ...int) *ArtifactMetricsCreate { - amc.mutation.AddOutputArtifactsSeenIDs(ids...) +// SetSourceArtifactsRead sets the "source_artifacts_read" edge to the FilesMetric entity. +func (amc *ArtifactMetricsCreate) SetSourceArtifactsRead(f *FilesMetric) *ArtifactMetricsCreate { + return amc.SetSourceArtifactsReadID(f.ID) +} + +// SetOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID. +func (amc *ArtifactMetricsCreate) SetOutputArtifactsSeenID(id int) *ArtifactMetricsCreate { + amc.mutation.SetOutputArtifactsSeenID(id) return amc } -// AddOutputArtifactsSeen adds the "output_artifacts_seen" edges to the FilesMetric entity. -func (amc *ArtifactMetricsCreate) AddOutputArtifactsSeen(f ...*FilesMetric) *ArtifactMetricsCreate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID if the given value is not nil. +func (amc *ArtifactMetricsCreate) SetNillableOutputArtifactsSeenID(id *int) *ArtifactMetricsCreate { + if id != nil { + amc = amc.SetOutputArtifactsSeenID(*id) } - return amc.AddOutputArtifactsSeenIDs(ids...) + return amc } -// AddOutputArtifactsFromActionCacheIDs adds the "output_artifacts_from_action_cache" edge to the FilesMetric entity by IDs. -func (amc *ArtifactMetricsCreate) AddOutputArtifactsFromActionCacheIDs(ids ...int) *ArtifactMetricsCreate { - amc.mutation.AddOutputArtifactsFromActionCacheIDs(ids...) +// SetOutputArtifactsSeen sets the "output_artifacts_seen" edge to the FilesMetric entity. +func (amc *ArtifactMetricsCreate) SetOutputArtifactsSeen(f *FilesMetric) *ArtifactMetricsCreate { + return amc.SetOutputArtifactsSeenID(f.ID) +} + +// SetOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID. +func (amc *ArtifactMetricsCreate) SetOutputArtifactsFromActionCacheID(id int) *ArtifactMetricsCreate { + amc.mutation.SetOutputArtifactsFromActionCacheID(id) return amc } -// AddOutputArtifactsFromActionCache adds the "output_artifacts_from_action_cache" edges to the FilesMetric entity. -func (amc *ArtifactMetricsCreate) AddOutputArtifactsFromActionCache(f ...*FilesMetric) *ArtifactMetricsCreate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID if the given value is not nil. +func (amc *ArtifactMetricsCreate) SetNillableOutputArtifactsFromActionCacheID(id *int) *ArtifactMetricsCreate { + if id != nil { + amc = amc.SetOutputArtifactsFromActionCacheID(*id) } - return amc.AddOutputArtifactsFromActionCacheIDs(ids...) + return amc +} + +// SetOutputArtifactsFromActionCache sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity. +func (amc *ArtifactMetricsCreate) SetOutputArtifactsFromActionCache(f *FilesMetric) *ArtifactMetricsCreate { + return amc.SetOutputArtifactsFromActionCacheID(f.ID) } -// AddTopLevelArtifactIDs adds the "top_level_artifacts" edge to the FilesMetric entity by IDs. -func (amc *ArtifactMetricsCreate) AddTopLevelArtifactIDs(ids ...int) *ArtifactMetricsCreate { - amc.mutation.AddTopLevelArtifactIDs(ids...) +// SetTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID. +func (amc *ArtifactMetricsCreate) SetTopLevelArtifactsID(id int) *ArtifactMetricsCreate { + amc.mutation.SetTopLevelArtifactsID(id) return amc } -// AddTopLevelArtifacts adds the "top_level_artifacts" edges to the FilesMetric entity. -func (amc *ArtifactMetricsCreate) AddTopLevelArtifacts(f ...*FilesMetric) *ArtifactMetricsCreate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID if the given value is not nil. +func (amc *ArtifactMetricsCreate) SetNillableTopLevelArtifactsID(id *int) *ArtifactMetricsCreate { + if id != nil { + amc = amc.SetTopLevelArtifactsID(*id) } - return amc.AddTopLevelArtifactIDs(ids...) + return amc +} + +// SetTopLevelArtifacts sets the "top_level_artifacts" edge to the FilesMetric entity. +func (amc *ArtifactMetricsCreate) SetTopLevelArtifacts(f *FilesMetric) *ArtifactMetricsCreate { + return amc.SetTopLevelArtifactsID(f.ID) } // Mutation returns the ArtifactMetricsMutation object of the builder. @@ -157,10 +177,10 @@ func (amc *ArtifactMetricsCreate) createSpec() (*ArtifactMetrics, *sqlgraph.Crea ) if nodes := amc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, + Columns: []string{artifactmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -169,11 +189,12 @@ func (amc *ArtifactMetricsCreate) createSpec() (*ArtifactMetrics, *sqlgraph.Crea for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_artifact_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := amc.mutation.SourceArtifactsReadIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.SourceArtifactsReadTable, Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, @@ -185,11 +206,12 @@ func (amc *ArtifactMetricsCreate) createSpec() (*ArtifactMetrics, *sqlgraph.Crea for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.artifact_metrics_source_artifacts_read = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := amc.mutation.OutputArtifactsSeenIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsSeenTable, Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, @@ -201,11 +223,12 @@ func (amc *ArtifactMetricsCreate) createSpec() (*ArtifactMetrics, *sqlgraph.Crea for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.artifact_metrics_output_artifacts_seen = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := amc.mutation.OutputArtifactsFromActionCacheIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsFromActionCacheTable, Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, @@ -217,14 +240,15 @@ func (amc *ArtifactMetricsCreate) createSpec() (*ArtifactMetrics, *sqlgraph.Crea for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.artifact_metrics_output_artifacts_from_action_cache = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := amc.mutation.TopLevelArtifactsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, + Columns: []string{artifactmetrics.TopLevelArtifactsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), diff --git a/ent/gen/ent/artifactmetrics_query.go b/ent/gen/ent/artifactmetrics_query.go index 17bca6d..2568dd1 100644 --- a/ent/gen/ent/artifactmetrics_query.go +++ b/ent/gen/ent/artifactmetrics_query.go @@ -20,22 +20,18 @@ import ( // ArtifactMetricsQuery is the builder for querying ArtifactMetrics entities. type ArtifactMetricsQuery struct { config - ctx *QueryContext - order []artifactmetrics.OrderOption - inters []Interceptor - predicates []predicate.ArtifactMetrics - withMetrics *MetricsQuery - withSourceArtifactsRead *FilesMetricQuery - withOutputArtifactsSeen *FilesMetricQuery - withOutputArtifactsFromActionCache *FilesMetricQuery - withTopLevelArtifacts *FilesMetricQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*ArtifactMetrics) error - withNamedMetrics map[string]*MetricsQuery - withNamedSourceArtifactsRead map[string]*FilesMetricQuery - withNamedOutputArtifactsSeen map[string]*FilesMetricQuery - withNamedOutputArtifactsFromActionCache map[string]*FilesMetricQuery - withNamedTopLevelArtifacts map[string]*FilesMetricQuery + ctx *QueryContext + order []artifactmetrics.OrderOption + inters []Interceptor + predicates []predicate.ArtifactMetrics + withMetrics *MetricsQuery + withSourceArtifactsRead *FilesMetricQuery + withOutputArtifactsSeen *FilesMetricQuery + withOutputArtifactsFromActionCache *FilesMetricQuery + withTopLevelArtifacts *FilesMetricQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*ArtifactMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -86,7 +82,7 @@ func (amq *ArtifactMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, artifactmetrics.MetricsTable, artifactmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, artifactmetrics.MetricsTable, artifactmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(amq.driver.Dialect(), step) return fromU, nil @@ -108,7 +104,7 @@ func (amq *ArtifactMetricsQuery) QuerySourceArtifactsRead() *FilesMetricQuery { step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, selector), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.SourceArtifactsReadTable, artifactmetrics.SourceArtifactsReadColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.SourceArtifactsReadTable, artifactmetrics.SourceArtifactsReadColumn), ) fromU = sqlgraph.SetNeighbors(amq.driver.Dialect(), step) return fromU, nil @@ -130,7 +126,7 @@ func (amq *ArtifactMetricsQuery) QueryOutputArtifactsSeen() *FilesMetricQuery { step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, selector), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.OutputArtifactsSeenTable, artifactmetrics.OutputArtifactsSeenColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.OutputArtifactsSeenTable, artifactmetrics.OutputArtifactsSeenColumn), ) fromU = sqlgraph.SetNeighbors(amq.driver.Dialect(), step) return fromU, nil @@ -152,7 +148,7 @@ func (amq *ArtifactMetricsQuery) QueryOutputArtifactsFromActionCache() *FilesMet step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, selector), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.OutputArtifactsFromActionCacheTable, artifactmetrics.OutputArtifactsFromActionCacheColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.OutputArtifactsFromActionCacheTable, artifactmetrics.OutputArtifactsFromActionCacheColumn), ) fromU = sqlgraph.SetNeighbors(amq.driver.Dialect(), step) return fromU, nil @@ -174,7 +170,7 @@ func (amq *ArtifactMetricsQuery) QueryTopLevelArtifacts() *FilesMetricQuery { step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, selector), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, artifactmetrics.TopLevelArtifactsTable, artifactmetrics.TopLevelArtifactsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, artifactmetrics.TopLevelArtifactsTable, artifactmetrics.TopLevelArtifactsColumn), ) fromU = sqlgraph.SetNeighbors(amq.driver.Dialect(), step) return fromU, nil @@ -495,6 +491,7 @@ func (amq *ArtifactMetricsQuery) prepareQuery(ctx context.Context) error { func (amq *ArtifactMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ArtifactMetrics, error) { var ( nodes = []*ArtifactMetrics{} + withFKs = amq.withFKs _spec = amq.querySpec() loadedTypes = [5]bool{ amq.withMetrics != nil, @@ -504,6 +501,12 @@ func (amq *ArtifactMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) amq.withTopLevelArtifacts != nil, } ) + if amq.withMetrics != nil || amq.withSourceArtifactsRead != nil || amq.withOutputArtifactsSeen != nil || amq.withOutputArtifactsFromActionCache != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, artifactmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*ArtifactMetrics).scanValues(nil, columns) } @@ -526,80 +529,32 @@ func (amq *ArtifactMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := amq.withMetrics; query != nil { - if err := amq.loadMetrics(ctx, query, nodes, - func(n *ArtifactMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *ArtifactMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := amq.loadMetrics(ctx, query, nodes, nil, + func(n *ArtifactMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } if query := amq.withSourceArtifactsRead; query != nil { - if err := amq.loadSourceArtifactsRead(ctx, query, nodes, - func(n *ArtifactMetrics) { n.Edges.SourceArtifactsRead = []*FilesMetric{} }, - func(n *ArtifactMetrics, e *FilesMetric) { - n.Edges.SourceArtifactsRead = append(n.Edges.SourceArtifactsRead, e) - }); err != nil { + if err := amq.loadSourceArtifactsRead(ctx, query, nodes, nil, + func(n *ArtifactMetrics, e *FilesMetric) { n.Edges.SourceArtifactsRead = e }); err != nil { return nil, err } } if query := amq.withOutputArtifactsSeen; query != nil { - if err := amq.loadOutputArtifactsSeen(ctx, query, nodes, - func(n *ArtifactMetrics) { n.Edges.OutputArtifactsSeen = []*FilesMetric{} }, - func(n *ArtifactMetrics, e *FilesMetric) { - n.Edges.OutputArtifactsSeen = append(n.Edges.OutputArtifactsSeen, e) - }); err != nil { + if err := amq.loadOutputArtifactsSeen(ctx, query, nodes, nil, + func(n *ArtifactMetrics, e *FilesMetric) { n.Edges.OutputArtifactsSeen = e }); err != nil { return nil, err } } if query := amq.withOutputArtifactsFromActionCache; query != nil { - if err := amq.loadOutputArtifactsFromActionCache(ctx, query, nodes, - func(n *ArtifactMetrics) { n.Edges.OutputArtifactsFromActionCache = []*FilesMetric{} }, - func(n *ArtifactMetrics, e *FilesMetric) { - n.Edges.OutputArtifactsFromActionCache = append(n.Edges.OutputArtifactsFromActionCache, e) - }); err != nil { + if err := amq.loadOutputArtifactsFromActionCache(ctx, query, nodes, nil, + func(n *ArtifactMetrics, e *FilesMetric) { n.Edges.OutputArtifactsFromActionCache = e }); err != nil { return nil, err } } if query := amq.withTopLevelArtifacts; query != nil { - if err := amq.loadTopLevelArtifacts(ctx, query, nodes, - func(n *ArtifactMetrics) { n.Edges.TopLevelArtifacts = []*FilesMetric{} }, - func(n *ArtifactMetrics, e *FilesMetric) { - n.Edges.TopLevelArtifacts = append(n.Edges.TopLevelArtifacts, e) - }); err != nil { - return nil, err - } - } - for name, query := range amq.withNamedMetrics { - if err := amq.loadMetrics(ctx, query, nodes, - func(n *ArtifactMetrics) { n.appendNamedMetrics(name) }, - func(n *ArtifactMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range amq.withNamedSourceArtifactsRead { - if err := amq.loadSourceArtifactsRead(ctx, query, nodes, - func(n *ArtifactMetrics) { n.appendNamedSourceArtifactsRead(name) }, - func(n *ArtifactMetrics, e *FilesMetric) { n.appendNamedSourceArtifactsRead(name, e) }); err != nil { - return nil, err - } - } - for name, query := range amq.withNamedOutputArtifactsSeen { - if err := amq.loadOutputArtifactsSeen(ctx, query, nodes, - func(n *ArtifactMetrics) { n.appendNamedOutputArtifactsSeen(name) }, - func(n *ArtifactMetrics, e *FilesMetric) { n.appendNamedOutputArtifactsSeen(name, e) }); err != nil { - return nil, err - } - } - for name, query := range amq.withNamedOutputArtifactsFromActionCache { - if err := amq.loadOutputArtifactsFromActionCache(ctx, query, nodes, - func(n *ArtifactMetrics) { n.appendNamedOutputArtifactsFromActionCache(name) }, - func(n *ArtifactMetrics, e *FilesMetric) { n.appendNamedOutputArtifactsFromActionCache(name, e) }); err != nil { - return nil, err - } - } - for name, query := range amq.withNamedTopLevelArtifacts { - if err := amq.loadTopLevelArtifacts(ctx, query, nodes, - func(n *ArtifactMetrics) { n.appendNamedTopLevelArtifacts(name) }, - func(n *ArtifactMetrics, e *FilesMetric) { n.appendNamedTopLevelArtifacts(name, e) }); err != nil { + if err := amq.loadTopLevelArtifacts(ctx, query, nodes, nil, + func(n *ArtifactMetrics, e *FilesMetric) { n.Edges.TopLevelArtifacts = e }); err != nil { return nil, err } } @@ -612,217 +567,158 @@ func (amq *ArtifactMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (amq *ArtifactMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*ArtifactMetrics, init func(*ArtifactMetrics), assign func(*ArtifactMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ArtifactMetrics) - nids := make(map[int]map[*ArtifactMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ArtifactMetrics) + for i := range nodes { + if nodes[i].metrics_artifact_metrics == nil { + continue + } + fk := *nodes[i].metrics_artifact_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(artifactmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(artifactmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(artifactmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(artifactmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ArtifactMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_artifact_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (amq *ArtifactMetricsQuery) loadSourceArtifactsRead(ctx context.Context, query *FilesMetricQuery, nodes []*ArtifactMetrics, init func(*ArtifactMetrics), assign func(*ArtifactMetrics, *FilesMetric)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*ArtifactMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ArtifactMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].artifact_metrics_source_artifacts_read == nil { + continue } + fk := *nodes[i].artifact_metrics_source_artifacts_read + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.FilesMetric(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(artifactmetrics.SourceArtifactsReadColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(filesmetric.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.artifact_metrics_source_artifacts_read - if fk == nil { - return fmt.Errorf(`foreign-key "artifact_metrics_source_artifacts_read" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "artifact_metrics_source_artifacts_read" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "artifact_metrics_source_artifacts_read" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (amq *ArtifactMetricsQuery) loadOutputArtifactsSeen(ctx context.Context, query *FilesMetricQuery, nodes []*ArtifactMetrics, init func(*ArtifactMetrics), assign func(*ArtifactMetrics, *FilesMetric)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*ArtifactMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ArtifactMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].artifact_metrics_output_artifacts_seen == nil { + continue + } + fk := *nodes[i].artifact_metrics_output_artifacts_seen + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.FilesMetric(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(artifactmetrics.OutputArtifactsSeenColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(filesmetric.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.artifact_metrics_output_artifacts_seen - if fk == nil { - return fmt.Errorf(`foreign-key "artifact_metrics_output_artifacts_seen" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "artifact_metrics_output_artifacts_seen" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "artifact_metrics_output_artifacts_seen" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (amq *ArtifactMetricsQuery) loadOutputArtifactsFromActionCache(ctx context.Context, query *FilesMetricQuery, nodes []*ArtifactMetrics, init func(*ArtifactMetrics), assign func(*ArtifactMetrics, *FilesMetric)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*ArtifactMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ArtifactMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].artifact_metrics_output_artifacts_from_action_cache == nil { + continue + } + fk := *nodes[i].artifact_metrics_output_artifacts_from_action_cache + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.FilesMetric(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(artifactmetrics.OutputArtifactsFromActionCacheColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(filesmetric.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.artifact_metrics_output_artifacts_from_action_cache - if fk == nil { - return fmt.Errorf(`foreign-key "artifact_metrics_output_artifacts_from_action_cache" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "artifact_metrics_output_artifacts_from_action_cache" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "artifact_metrics_output_artifacts_from_action_cache" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (amq *ArtifactMetricsQuery) loadTopLevelArtifacts(ctx context.Context, query *FilesMetricQuery, nodes []*ArtifactMetrics, init func(*ArtifactMetrics), assign func(*ArtifactMetrics, *FilesMetric)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ArtifactMetrics) - nids := make(map[int]map[*ArtifactMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(artifactmetrics.TopLevelArtifactsTable) - s.Join(joinT).On(s.C(filesmetric.FieldID), joinT.C(artifactmetrics.TopLevelArtifactsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(artifactmetrics.TopLevelArtifactsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(artifactmetrics.TopLevelArtifactsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ArtifactMetrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ArtifactMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*FilesMetric](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.FilesMetric(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(artifactmetrics.TopLevelArtifactsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "top_level_artifacts" node returned %v`, n.ID) + fk := n.artifact_metrics_top_level_artifacts + if fk == nil { + return fmt.Errorf(`foreign-key "artifact_metrics_top_level_artifacts" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "artifact_metrics_top_level_artifacts" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -911,76 +807,6 @@ func (amq *ArtifactMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (amq *ArtifactMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *ArtifactMetricsQuery { - query := (&MetricsClient{config: amq.config}).Query() - for _, opt := range opts { - opt(query) - } - if amq.withNamedMetrics == nil { - amq.withNamedMetrics = make(map[string]*MetricsQuery) - } - amq.withNamedMetrics[name] = query - return amq -} - -// WithNamedSourceArtifactsRead tells the query-builder to eager-load the nodes that are connected to the "source_artifacts_read" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (amq *ArtifactMetricsQuery) WithNamedSourceArtifactsRead(name string, opts ...func(*FilesMetricQuery)) *ArtifactMetricsQuery { - query := (&FilesMetricClient{config: amq.config}).Query() - for _, opt := range opts { - opt(query) - } - if amq.withNamedSourceArtifactsRead == nil { - amq.withNamedSourceArtifactsRead = make(map[string]*FilesMetricQuery) - } - amq.withNamedSourceArtifactsRead[name] = query - return amq -} - -// WithNamedOutputArtifactsSeen tells the query-builder to eager-load the nodes that are connected to the "output_artifacts_seen" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (amq *ArtifactMetricsQuery) WithNamedOutputArtifactsSeen(name string, opts ...func(*FilesMetricQuery)) *ArtifactMetricsQuery { - query := (&FilesMetricClient{config: amq.config}).Query() - for _, opt := range opts { - opt(query) - } - if amq.withNamedOutputArtifactsSeen == nil { - amq.withNamedOutputArtifactsSeen = make(map[string]*FilesMetricQuery) - } - amq.withNamedOutputArtifactsSeen[name] = query - return amq -} - -// WithNamedOutputArtifactsFromActionCache tells the query-builder to eager-load the nodes that are connected to the "output_artifacts_from_action_cache" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (amq *ArtifactMetricsQuery) WithNamedOutputArtifactsFromActionCache(name string, opts ...func(*FilesMetricQuery)) *ArtifactMetricsQuery { - query := (&FilesMetricClient{config: amq.config}).Query() - for _, opt := range opts { - opt(query) - } - if amq.withNamedOutputArtifactsFromActionCache == nil { - amq.withNamedOutputArtifactsFromActionCache = make(map[string]*FilesMetricQuery) - } - amq.withNamedOutputArtifactsFromActionCache[name] = query - return amq -} - -// WithNamedTopLevelArtifacts tells the query-builder to eager-load the nodes that are connected to the "top_level_artifacts" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (amq *ArtifactMetricsQuery) WithNamedTopLevelArtifacts(name string, opts ...func(*FilesMetricQuery)) *ArtifactMetricsQuery { - query := (&FilesMetricClient{config: amq.config}).Query() - for _, opt := range opts { - opt(query) - } - if amq.withNamedTopLevelArtifacts == nil { - amq.withNamedTopLevelArtifacts = make(map[string]*FilesMetricQuery) - } - amq.withNamedTopLevelArtifacts[name] = query - return amq -} - // ArtifactMetricsGroupBy is the group-by builder for ArtifactMetrics entities. type ArtifactMetricsGroupBy struct { selector diff --git a/ent/gen/ent/artifactmetrics_update.go b/ent/gen/ent/artifactmetrics_update.go index a76f69b..8895ac3 100644 --- a/ent/gen/ent/artifactmetrics_update.go +++ b/ent/gen/ent/artifactmetrics_update.go @@ -29,191 +29,136 @@ func (amu *ArtifactMetricsUpdate) Where(ps ...predicate.ArtifactMetrics) *Artifa return amu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (amu *ArtifactMetricsUpdate) AddMetricIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (amu *ArtifactMetricsUpdate) SetMetricsID(id int) *ArtifactMetricsUpdate { + amu.mutation.SetMetricsID(id) return amu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (amu *ArtifactMetricsUpdate) AddMetrics(m ...*Metrics) *ArtifactMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (amu *ArtifactMetricsUpdate) SetNillableMetricsID(id *int) *ArtifactMetricsUpdate { + if id != nil { + amu = amu.SetMetricsID(*id) } - return amu.AddMetricIDs(ids...) -} - -// AddSourceArtifactsReadIDs adds the "source_artifacts_read" edge to the FilesMetric entity by IDs. -func (amu *ArtifactMetricsUpdate) AddSourceArtifactsReadIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.AddSourceArtifactsReadIDs(ids...) return amu } -// AddSourceArtifactsRead adds the "source_artifacts_read" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) AddSourceArtifactsRead(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.AddSourceArtifactsReadIDs(ids...) +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (amu *ArtifactMetricsUpdate) SetMetrics(m *Metrics) *ArtifactMetricsUpdate { + return amu.SetMetricsID(m.ID) } -// AddOutputArtifactsSeenIDs adds the "output_artifacts_seen" edge to the FilesMetric entity by IDs. -func (amu *ArtifactMetricsUpdate) AddOutputArtifactsSeenIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.AddOutputArtifactsSeenIDs(ids...) +// SetSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID. +func (amu *ArtifactMetricsUpdate) SetSourceArtifactsReadID(id int) *ArtifactMetricsUpdate { + amu.mutation.SetSourceArtifactsReadID(id) return amu } -// AddOutputArtifactsSeen adds the "output_artifacts_seen" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) AddOutputArtifactsSeen(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID if the given value is not nil. +func (amu *ArtifactMetricsUpdate) SetNillableSourceArtifactsReadID(id *int) *ArtifactMetricsUpdate { + if id != nil { + amu = amu.SetSourceArtifactsReadID(*id) } - return amu.AddOutputArtifactsSeenIDs(ids...) -} - -// AddOutputArtifactsFromActionCacheIDs adds the "output_artifacts_from_action_cache" edge to the FilesMetric entity by IDs. -func (amu *ArtifactMetricsUpdate) AddOutputArtifactsFromActionCacheIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.AddOutputArtifactsFromActionCacheIDs(ids...) return amu } -// AddOutputArtifactsFromActionCache adds the "output_artifacts_from_action_cache" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) AddOutputArtifactsFromActionCache(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.AddOutputArtifactsFromActionCacheIDs(ids...) +// SetSourceArtifactsRead sets the "source_artifacts_read" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) SetSourceArtifactsRead(f *FilesMetric) *ArtifactMetricsUpdate { + return amu.SetSourceArtifactsReadID(f.ID) } -// AddTopLevelArtifactIDs adds the "top_level_artifacts" edge to the FilesMetric entity by IDs. -func (amu *ArtifactMetricsUpdate) AddTopLevelArtifactIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.AddTopLevelArtifactIDs(ids...) +// SetOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID. +func (amu *ArtifactMetricsUpdate) SetOutputArtifactsSeenID(id int) *ArtifactMetricsUpdate { + amu.mutation.SetOutputArtifactsSeenID(id) return amu } -// AddTopLevelArtifacts adds the "top_level_artifacts" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) AddTopLevelArtifacts(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID if the given value is not nil. +func (amu *ArtifactMetricsUpdate) SetNillableOutputArtifactsSeenID(id *int) *ArtifactMetricsUpdate { + if id != nil { + amu = amu.SetOutputArtifactsSeenID(*id) } - return amu.AddTopLevelArtifactIDs(ids...) + return amu } -// Mutation returns the ArtifactMetricsMutation object of the builder. -func (amu *ArtifactMetricsUpdate) Mutation() *ArtifactMetricsMutation { - return amu.mutation +// SetOutputArtifactsSeen sets the "output_artifacts_seen" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) SetOutputArtifactsSeen(f *FilesMetric) *ArtifactMetricsUpdate { + return amu.SetOutputArtifactsSeenID(f.ID) } -// ClearMetrics clears all "metrics" edges to the Metrics entity. -func (amu *ArtifactMetricsUpdate) ClearMetrics() *ArtifactMetricsUpdate { - amu.mutation.ClearMetrics() +// SetOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID. +func (amu *ArtifactMetricsUpdate) SetOutputArtifactsFromActionCacheID(id int) *ArtifactMetricsUpdate { + amu.mutation.SetOutputArtifactsFromActionCacheID(id) return amu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (amu *ArtifactMetricsUpdate) RemoveMetricIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.RemoveMetricIDs(ids...) +// SetNillableOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID if the given value is not nil. +func (amu *ArtifactMetricsUpdate) SetNillableOutputArtifactsFromActionCacheID(id *int) *ArtifactMetricsUpdate { + if id != nil { + amu = amu.SetOutputArtifactsFromActionCacheID(*id) + } return amu } -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (amu *ArtifactMetricsUpdate) RemoveMetrics(m ...*Metrics) *ArtifactMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return amu.RemoveMetricIDs(ids...) +// SetOutputArtifactsFromActionCache sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) SetOutputArtifactsFromActionCache(f *FilesMetric) *ArtifactMetricsUpdate { + return amu.SetOutputArtifactsFromActionCacheID(f.ID) } -// ClearSourceArtifactsRead clears all "source_artifacts_read" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) ClearSourceArtifactsRead() *ArtifactMetricsUpdate { - amu.mutation.ClearSourceArtifactsRead() +// SetTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID. +func (amu *ArtifactMetricsUpdate) SetTopLevelArtifactsID(id int) *ArtifactMetricsUpdate { + amu.mutation.SetTopLevelArtifactsID(id) return amu } -// RemoveSourceArtifactsReadIDs removes the "source_artifacts_read" edge to FilesMetric entities by IDs. -func (amu *ArtifactMetricsUpdate) RemoveSourceArtifactsReadIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.RemoveSourceArtifactsReadIDs(ids...) +// SetNillableTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID if the given value is not nil. +func (amu *ArtifactMetricsUpdate) SetNillableTopLevelArtifactsID(id *int) *ArtifactMetricsUpdate { + if id != nil { + amu = amu.SetTopLevelArtifactsID(*id) + } return amu } -// RemoveSourceArtifactsRead removes "source_artifacts_read" edges to FilesMetric entities. -func (amu *ArtifactMetricsUpdate) RemoveSourceArtifactsRead(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.RemoveSourceArtifactsReadIDs(ids...) +// SetTopLevelArtifacts sets the "top_level_artifacts" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) SetTopLevelArtifacts(f *FilesMetric) *ArtifactMetricsUpdate { + return amu.SetTopLevelArtifactsID(f.ID) } -// ClearOutputArtifactsSeen clears all "output_artifacts_seen" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) ClearOutputArtifactsSeen() *ArtifactMetricsUpdate { - amu.mutation.ClearOutputArtifactsSeen() - return amu +// Mutation returns the ArtifactMetricsMutation object of the builder. +func (amu *ArtifactMetricsUpdate) Mutation() *ArtifactMetricsMutation { + return amu.mutation } -// RemoveOutputArtifactsSeenIDs removes the "output_artifacts_seen" edge to FilesMetric entities by IDs. -func (amu *ArtifactMetricsUpdate) RemoveOutputArtifactsSeenIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.RemoveOutputArtifactsSeenIDs(ids...) +// ClearMetrics clears the "metrics" edge to the Metrics entity. +func (amu *ArtifactMetricsUpdate) ClearMetrics() *ArtifactMetricsUpdate { + amu.mutation.ClearMetrics() return amu } -// RemoveOutputArtifactsSeen removes "output_artifacts_seen" edges to FilesMetric entities. -func (amu *ArtifactMetricsUpdate) RemoveOutputArtifactsSeen(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.RemoveOutputArtifactsSeenIDs(ids...) -} - -// ClearOutputArtifactsFromActionCache clears all "output_artifacts_from_action_cache" edges to the FilesMetric entity. -func (amu *ArtifactMetricsUpdate) ClearOutputArtifactsFromActionCache() *ArtifactMetricsUpdate { - amu.mutation.ClearOutputArtifactsFromActionCache() +// ClearSourceArtifactsRead clears the "source_artifacts_read" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) ClearSourceArtifactsRead() *ArtifactMetricsUpdate { + amu.mutation.ClearSourceArtifactsRead() return amu } -// RemoveOutputArtifactsFromActionCacheIDs removes the "output_artifacts_from_action_cache" edge to FilesMetric entities by IDs. -func (amu *ArtifactMetricsUpdate) RemoveOutputArtifactsFromActionCacheIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.RemoveOutputArtifactsFromActionCacheIDs(ids...) +// ClearOutputArtifactsSeen clears the "output_artifacts_seen" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) ClearOutputArtifactsSeen() *ArtifactMetricsUpdate { + amu.mutation.ClearOutputArtifactsSeen() return amu } -// RemoveOutputArtifactsFromActionCache removes "output_artifacts_from_action_cache" edges to FilesMetric entities. -func (amu *ArtifactMetricsUpdate) RemoveOutputArtifactsFromActionCache(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.RemoveOutputArtifactsFromActionCacheIDs(ids...) +// ClearOutputArtifactsFromActionCache clears the "output_artifacts_from_action_cache" edge to the FilesMetric entity. +func (amu *ArtifactMetricsUpdate) ClearOutputArtifactsFromActionCache() *ArtifactMetricsUpdate { + amu.mutation.ClearOutputArtifactsFromActionCache() + return amu } -// ClearTopLevelArtifacts clears all "top_level_artifacts" edges to the FilesMetric entity. +// ClearTopLevelArtifacts clears the "top_level_artifacts" edge to the FilesMetric entity. func (amu *ArtifactMetricsUpdate) ClearTopLevelArtifacts() *ArtifactMetricsUpdate { amu.mutation.ClearTopLevelArtifacts() return amu } -// RemoveTopLevelArtifactIDs removes the "top_level_artifacts" edge to FilesMetric entities by IDs. -func (amu *ArtifactMetricsUpdate) RemoveTopLevelArtifactIDs(ids ...int) *ArtifactMetricsUpdate { - amu.mutation.RemoveTopLevelArtifactIDs(ids...) - return amu -} - -// RemoveTopLevelArtifacts removes "top_level_artifacts" edges to FilesMetric entities. -func (amu *ArtifactMetricsUpdate) RemoveTopLevelArtifacts(f ...*FilesMetric) *ArtifactMetricsUpdate { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amu.RemoveTopLevelArtifactIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (amu *ArtifactMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, amu.sqlSave, amu.mutation, amu.hooks) @@ -252,10 +197,10 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } if amu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, + Columns: []string{artifactmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -263,28 +208,12 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := amu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !amu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := amu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, + Columns: []string{artifactmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -297,20 +226,7 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } if amu.mutation.SourceArtifactsReadCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.SourceArtifactsReadTable, - Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amu.mutation.RemovedSourceArtifactsReadIDs(); len(nodes) > 0 && !amu.mutation.SourceArtifactsReadCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.SourceArtifactsReadTable, Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, @@ -319,14 +235,11 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amu.mutation.SourceArtifactsReadIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.SourceArtifactsReadTable, Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, @@ -342,20 +255,7 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } if amu.mutation.OutputArtifactsSeenCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.OutputArtifactsSeenTable, - Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amu.mutation.RemovedOutputArtifactsSeenIDs(); len(nodes) > 0 && !amu.mutation.OutputArtifactsSeenCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsSeenTable, Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, @@ -364,14 +264,11 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amu.mutation.OutputArtifactsSeenIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsSeenTable, Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, @@ -387,20 +284,7 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } if amu.mutation.OutputArtifactsFromActionCacheCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.OutputArtifactsFromActionCacheTable, - Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amu.mutation.RemovedOutputArtifactsFromActionCacheIDs(); len(nodes) > 0 && !amu.mutation.OutputArtifactsFromActionCacheCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsFromActionCacheTable, Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, @@ -409,14 +293,11 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amu.mutation.OutputArtifactsFromActionCacheIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsFromActionCacheTable, Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, @@ -432,39 +313,23 @@ func (amu *ArtifactMetricsUpdate) sqlSave(ctx context.Context) (n int, err error } if amu.mutation.TopLevelArtifactsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amu.mutation.RemovedTopLevelArtifactsIDs(); len(nodes) > 0 && !amu.mutation.TopLevelArtifactsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, + Columns: []string{artifactmetrics.TopLevelArtifactsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amu.mutation.TopLevelArtifactsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, + Columns: []string{artifactmetrics.TopLevelArtifactsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), @@ -495,191 +360,136 @@ type ArtifactMetricsUpdateOne struct { mutation *ArtifactMetricsMutation } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (amuo *ArtifactMetricsUpdateOne) AddMetricIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (amuo *ArtifactMetricsUpdateOne) SetMetricsID(id int) *ArtifactMetricsUpdateOne { + amuo.mutation.SetMetricsID(id) return amuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (amuo *ArtifactMetricsUpdateOne) AddMetrics(m ...*Metrics) *ArtifactMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (amuo *ArtifactMetricsUpdateOne) SetNillableMetricsID(id *int) *ArtifactMetricsUpdateOne { + if id != nil { + amuo = amuo.SetMetricsID(*id) } - return amuo.AddMetricIDs(ids...) -} - -// AddSourceArtifactsReadIDs adds the "source_artifacts_read" edge to the FilesMetric entity by IDs. -func (amuo *ArtifactMetricsUpdateOne) AddSourceArtifactsReadIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.AddSourceArtifactsReadIDs(ids...) return amuo } -// AddSourceArtifactsRead adds the "source_artifacts_read" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) AddSourceArtifactsRead(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.AddSourceArtifactsReadIDs(ids...) +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (amuo *ArtifactMetricsUpdateOne) SetMetrics(m *Metrics) *ArtifactMetricsUpdateOne { + return amuo.SetMetricsID(m.ID) } -// AddOutputArtifactsSeenIDs adds the "output_artifacts_seen" edge to the FilesMetric entity by IDs. -func (amuo *ArtifactMetricsUpdateOne) AddOutputArtifactsSeenIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.AddOutputArtifactsSeenIDs(ids...) +// SetSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID. +func (amuo *ArtifactMetricsUpdateOne) SetSourceArtifactsReadID(id int) *ArtifactMetricsUpdateOne { + amuo.mutation.SetSourceArtifactsReadID(id) return amuo } -// AddOutputArtifactsSeen adds the "output_artifacts_seen" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) AddOutputArtifactsSeen(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by ID if the given value is not nil. +func (amuo *ArtifactMetricsUpdateOne) SetNillableSourceArtifactsReadID(id *int) *ArtifactMetricsUpdateOne { + if id != nil { + amuo = amuo.SetSourceArtifactsReadID(*id) } - return amuo.AddOutputArtifactsSeenIDs(ids...) -} - -// AddOutputArtifactsFromActionCacheIDs adds the "output_artifacts_from_action_cache" edge to the FilesMetric entity by IDs. -func (amuo *ArtifactMetricsUpdateOne) AddOutputArtifactsFromActionCacheIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.AddOutputArtifactsFromActionCacheIDs(ids...) return amuo } -// AddOutputArtifactsFromActionCache adds the "output_artifacts_from_action_cache" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) AddOutputArtifactsFromActionCache(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.AddOutputArtifactsFromActionCacheIDs(ids...) +// SetSourceArtifactsRead sets the "source_artifacts_read" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) SetSourceArtifactsRead(f *FilesMetric) *ArtifactMetricsUpdateOne { + return amuo.SetSourceArtifactsReadID(f.ID) } -// AddTopLevelArtifactIDs adds the "top_level_artifacts" edge to the FilesMetric entity by IDs. -func (amuo *ArtifactMetricsUpdateOne) AddTopLevelArtifactIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.AddTopLevelArtifactIDs(ids...) +// SetOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID. +func (amuo *ArtifactMetricsUpdateOne) SetOutputArtifactsSeenID(id int) *ArtifactMetricsUpdateOne { + amuo.mutation.SetOutputArtifactsSeenID(id) return amuo } -// AddTopLevelArtifacts adds the "top_level_artifacts" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) AddTopLevelArtifacts(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID +// SetNillableOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by ID if the given value is not nil. +func (amuo *ArtifactMetricsUpdateOne) SetNillableOutputArtifactsSeenID(id *int) *ArtifactMetricsUpdateOne { + if id != nil { + amuo = amuo.SetOutputArtifactsSeenID(*id) } - return amuo.AddTopLevelArtifactIDs(ids...) + return amuo } -// Mutation returns the ArtifactMetricsMutation object of the builder. -func (amuo *ArtifactMetricsUpdateOne) Mutation() *ArtifactMetricsMutation { - return amuo.mutation +// SetOutputArtifactsSeen sets the "output_artifacts_seen" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) SetOutputArtifactsSeen(f *FilesMetric) *ArtifactMetricsUpdateOne { + return amuo.SetOutputArtifactsSeenID(f.ID) } -// ClearMetrics clears all "metrics" edges to the Metrics entity. -func (amuo *ArtifactMetricsUpdateOne) ClearMetrics() *ArtifactMetricsUpdateOne { - amuo.mutation.ClearMetrics() +// SetOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID. +func (amuo *ArtifactMetricsUpdateOne) SetOutputArtifactsFromActionCacheID(id int) *ArtifactMetricsUpdateOne { + amuo.mutation.SetOutputArtifactsFromActionCacheID(id) return amuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (amuo *ArtifactMetricsUpdateOne) RemoveMetricIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.RemoveMetricIDs(ids...) +// SetNillableOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ID if the given value is not nil. +func (amuo *ArtifactMetricsUpdateOne) SetNillableOutputArtifactsFromActionCacheID(id *int) *ArtifactMetricsUpdateOne { + if id != nil { + amuo = amuo.SetOutputArtifactsFromActionCacheID(*id) + } return amuo } -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (amuo *ArtifactMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *ArtifactMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return amuo.RemoveMetricIDs(ids...) +// SetOutputArtifactsFromActionCache sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) SetOutputArtifactsFromActionCache(f *FilesMetric) *ArtifactMetricsUpdateOne { + return amuo.SetOutputArtifactsFromActionCacheID(f.ID) } -// ClearSourceArtifactsRead clears all "source_artifacts_read" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) ClearSourceArtifactsRead() *ArtifactMetricsUpdateOne { - amuo.mutation.ClearSourceArtifactsRead() +// SetTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID. +func (amuo *ArtifactMetricsUpdateOne) SetTopLevelArtifactsID(id int) *ArtifactMetricsUpdateOne { + amuo.mutation.SetTopLevelArtifactsID(id) return amuo } -// RemoveSourceArtifactsReadIDs removes the "source_artifacts_read" edge to FilesMetric entities by IDs. -func (amuo *ArtifactMetricsUpdateOne) RemoveSourceArtifactsReadIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.RemoveSourceArtifactsReadIDs(ids...) +// SetNillableTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by ID if the given value is not nil. +func (amuo *ArtifactMetricsUpdateOne) SetNillableTopLevelArtifactsID(id *int) *ArtifactMetricsUpdateOne { + if id != nil { + amuo = amuo.SetTopLevelArtifactsID(*id) + } return amuo } -// RemoveSourceArtifactsRead removes "source_artifacts_read" edges to FilesMetric entities. -func (amuo *ArtifactMetricsUpdateOne) RemoveSourceArtifactsRead(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.RemoveSourceArtifactsReadIDs(ids...) +// SetTopLevelArtifacts sets the "top_level_artifacts" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) SetTopLevelArtifacts(f *FilesMetric) *ArtifactMetricsUpdateOne { + return amuo.SetTopLevelArtifactsID(f.ID) } -// ClearOutputArtifactsSeen clears all "output_artifacts_seen" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) ClearOutputArtifactsSeen() *ArtifactMetricsUpdateOne { - amuo.mutation.ClearOutputArtifactsSeen() - return amuo +// Mutation returns the ArtifactMetricsMutation object of the builder. +func (amuo *ArtifactMetricsUpdateOne) Mutation() *ArtifactMetricsMutation { + return amuo.mutation } -// RemoveOutputArtifactsSeenIDs removes the "output_artifacts_seen" edge to FilesMetric entities by IDs. -func (amuo *ArtifactMetricsUpdateOne) RemoveOutputArtifactsSeenIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.RemoveOutputArtifactsSeenIDs(ids...) +// ClearMetrics clears the "metrics" edge to the Metrics entity. +func (amuo *ArtifactMetricsUpdateOne) ClearMetrics() *ArtifactMetricsUpdateOne { + amuo.mutation.ClearMetrics() return amuo } -// RemoveOutputArtifactsSeen removes "output_artifacts_seen" edges to FilesMetric entities. -func (amuo *ArtifactMetricsUpdateOne) RemoveOutputArtifactsSeen(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.RemoveOutputArtifactsSeenIDs(ids...) -} - -// ClearOutputArtifactsFromActionCache clears all "output_artifacts_from_action_cache" edges to the FilesMetric entity. -func (amuo *ArtifactMetricsUpdateOne) ClearOutputArtifactsFromActionCache() *ArtifactMetricsUpdateOne { - amuo.mutation.ClearOutputArtifactsFromActionCache() +// ClearSourceArtifactsRead clears the "source_artifacts_read" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) ClearSourceArtifactsRead() *ArtifactMetricsUpdateOne { + amuo.mutation.ClearSourceArtifactsRead() return amuo } -// RemoveOutputArtifactsFromActionCacheIDs removes the "output_artifacts_from_action_cache" edge to FilesMetric entities by IDs. -func (amuo *ArtifactMetricsUpdateOne) RemoveOutputArtifactsFromActionCacheIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.RemoveOutputArtifactsFromActionCacheIDs(ids...) +// ClearOutputArtifactsSeen clears the "output_artifacts_seen" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) ClearOutputArtifactsSeen() *ArtifactMetricsUpdateOne { + amuo.mutation.ClearOutputArtifactsSeen() return amuo } -// RemoveOutputArtifactsFromActionCache removes "output_artifacts_from_action_cache" edges to FilesMetric entities. -func (amuo *ArtifactMetricsUpdateOne) RemoveOutputArtifactsFromActionCache(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.RemoveOutputArtifactsFromActionCacheIDs(ids...) +// ClearOutputArtifactsFromActionCache clears the "output_artifacts_from_action_cache" edge to the FilesMetric entity. +func (amuo *ArtifactMetricsUpdateOne) ClearOutputArtifactsFromActionCache() *ArtifactMetricsUpdateOne { + amuo.mutation.ClearOutputArtifactsFromActionCache() + return amuo } -// ClearTopLevelArtifacts clears all "top_level_artifacts" edges to the FilesMetric entity. +// ClearTopLevelArtifacts clears the "top_level_artifacts" edge to the FilesMetric entity. func (amuo *ArtifactMetricsUpdateOne) ClearTopLevelArtifacts() *ArtifactMetricsUpdateOne { amuo.mutation.ClearTopLevelArtifacts() return amuo } -// RemoveTopLevelArtifactIDs removes the "top_level_artifacts" edge to FilesMetric entities by IDs. -func (amuo *ArtifactMetricsUpdateOne) RemoveTopLevelArtifactIDs(ids ...int) *ArtifactMetricsUpdateOne { - amuo.mutation.RemoveTopLevelArtifactIDs(ids...) - return amuo -} - -// RemoveTopLevelArtifacts removes "top_level_artifacts" edges to FilesMetric entities. -func (amuo *ArtifactMetricsUpdateOne) RemoveTopLevelArtifacts(f ...*FilesMetric) *ArtifactMetricsUpdateOne { - ids := make([]int, len(f)) - for i := range f { - ids[i] = f[i].ID - } - return amuo.RemoveTopLevelArtifactIDs(ids...) -} - // Where appends a list predicates to the ArtifactMetricsUpdate builder. func (amuo *ArtifactMetricsUpdateOne) Where(ps ...predicate.ArtifactMetrics) *ArtifactMetricsUpdateOne { amuo.mutation.Where(ps...) @@ -748,10 +558,10 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } if amuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, + Columns: []string{artifactmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -759,28 +569,12 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := amuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !amuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := amuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: artifactmetrics.MetricsTable, - Columns: artifactmetrics.MetricsPrimaryKey, + Columns: []string{artifactmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -793,20 +587,7 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } if amuo.mutation.SourceArtifactsReadCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.SourceArtifactsReadTable, - Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amuo.mutation.RemovedSourceArtifactsReadIDs(); len(nodes) > 0 && !amuo.mutation.SourceArtifactsReadCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.SourceArtifactsReadTable, Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, @@ -815,14 +596,11 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amuo.mutation.SourceArtifactsReadIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.SourceArtifactsReadTable, Columns: []string{artifactmetrics.SourceArtifactsReadColumn}, @@ -838,20 +616,7 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } if amuo.mutation.OutputArtifactsSeenCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.OutputArtifactsSeenTable, - Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amuo.mutation.RemovedOutputArtifactsSeenIDs(); len(nodes) > 0 && !amuo.mutation.OutputArtifactsSeenCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsSeenTable, Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, @@ -860,14 +625,11 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amuo.mutation.OutputArtifactsSeenIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsSeenTable, Columns: []string{artifactmetrics.OutputArtifactsSeenColumn}, @@ -883,20 +645,7 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } if amuo.mutation.OutputArtifactsFromActionCacheCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: artifactmetrics.OutputArtifactsFromActionCacheTable, - Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amuo.mutation.RemovedOutputArtifactsFromActionCacheIDs(); len(nodes) > 0 && !amuo.mutation.OutputArtifactsFromActionCacheCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsFromActionCacheTable, Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, @@ -905,14 +654,11 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amuo.mutation.OutputArtifactsFromActionCacheIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: artifactmetrics.OutputArtifactsFromActionCacheTable, Columns: []string{artifactmetrics.OutputArtifactsFromActionCacheColumn}, @@ -928,39 +674,23 @@ func (amuo *ArtifactMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Artif } if amuo.mutation.TopLevelArtifactsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := amuo.mutation.RemovedTopLevelArtifactsIDs(); len(nodes) > 0 && !amuo.mutation.TopLevelArtifactsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, + Columns: []string{artifactmetrics.TopLevelArtifactsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := amuo.mutation.TopLevelArtifactsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: artifactmetrics.TopLevelArtifactsTable, - Columns: artifactmetrics.TopLevelArtifactsPrimaryKey, + Columns: []string{artifactmetrics.TopLevelArtifactsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(filesmetric.FieldID, field.TypeInt), diff --git a/ent/gen/ent/bazelinvocation/bazelinvocation.go b/ent/gen/ent/bazelinvocation/bazelinvocation.go index 9d3151e..ff76537 100644 --- a/ent/gen/ent/bazelinvocation/bazelinvocation.go +++ b/ent/gen/ent/bazelinvocation/bazelinvocation.go @@ -88,16 +88,20 @@ const ( MetricsInverseTable = "metrics" // MetricsColumn is the table column denoting the metrics relation/edge. MetricsColumn = "bazel_invocation_metrics" - // TestCollectionTable is the table that holds the test_collection relation/edge. The primary key declared below. - TestCollectionTable = "bazel_invocation_test_collection" + // TestCollectionTable is the table that holds the test_collection relation/edge. + TestCollectionTable = "test_collections" // TestCollectionInverseTable is the table name for the TestCollection entity. // It exists in this package in order to avoid circular dependency with the "testcollection" package. TestCollectionInverseTable = "test_collections" - // TargetsTable is the table that holds the targets relation/edge. The primary key declared below. - TargetsTable = "bazel_invocation_targets" + // TestCollectionColumn is the table column denoting the test_collection relation/edge. + TestCollectionColumn = "bazel_invocation_test_collection" + // TargetsTable is the table that holds the targets relation/edge. + TargetsTable = "target_pairs" // TargetsInverseTable is the table name for the TargetPair entity. // It exists in this package in order to avoid circular dependency with the "targetpair" package. TargetsInverseTable = "target_pairs" + // TargetsColumn is the table column denoting the targets relation/edge. + TargetsColumn = "bazel_invocation_targets" ) // Columns holds all SQL columns for bazelinvocation fields. @@ -129,15 +133,6 @@ var ForeignKeys = []string{ "event_file_bazel_invocation", } -var ( - // TestCollectionPrimaryKey and TestCollectionColumn2 are the table columns denoting the - // primary key for the test_collection relation (M2M). - TestCollectionPrimaryKey = []string{"bazel_invocation_id", "test_collection_id"} - // TargetsPrimaryKey and TargetsColumn2 are the table columns denoting the - // primary key for the targets relation (M2M). - TargetsPrimaryKey = []string{"bazel_invocation_id", "target_pair_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -330,13 +325,13 @@ func newTestCollectionStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestCollectionInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TestCollectionTable, TestCollectionPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TestCollectionTable, TestCollectionColumn), ) } func newTargetsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TargetsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TargetsTable, TargetsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TargetsTable, TargetsColumn), ) } diff --git a/ent/gen/ent/bazelinvocation/where.go b/ent/gen/ent/bazelinvocation/where.go index 9692610..5c89c6c 100644 --- a/ent/gen/ent/bazelinvocation/where.go +++ b/ent/gen/ent/bazelinvocation/where.go @@ -1108,7 +1108,7 @@ func HasTestCollection() predicate.BazelInvocation { return predicate.BazelInvocation(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TestCollectionTable, TestCollectionPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TestCollectionTable, TestCollectionColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -1131,7 +1131,7 @@ func HasTargets() predicate.BazelInvocation { return predicate.BazelInvocation(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TargetsTable, TargetsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TargetsTable, TargetsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/bazelinvocation_create.go b/ent/gen/ent/bazelinvocation_create.go index 6c07b8b..2fbaf76 100644 --- a/ent/gen/ent/bazelinvocation_create.go +++ b/ent/gen/ent/bazelinvocation_create.go @@ -529,10 +529,10 @@ func (bic *BazelInvocationCreate) createSpec() (*BazelInvocation, *sqlgraph.Crea } if nodes := bic.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -545,10 +545,10 @@ func (bic *BazelInvocationCreate) createSpec() (*BazelInvocation, *sqlgraph.Crea } if nodes := bic.mutation.TargetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), diff --git a/ent/gen/ent/bazelinvocation_query.go b/ent/gen/ent/bazelinvocation_query.go index 4037e5e..cbb3601 100644 --- a/ent/gen/ent/bazelinvocation_query.go +++ b/ent/gen/ent/bazelinvocation_query.go @@ -178,7 +178,7 @@ func (biq *BazelInvocationQuery) QueryTestCollection() *TestCollectionQuery { step := sqlgraph.NewStep( sqlgraph.From(bazelinvocation.Table, bazelinvocation.FieldID, selector), sqlgraph.To(testcollection.Table, testcollection.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, bazelinvocation.TestCollectionTable, bazelinvocation.TestCollectionPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, bazelinvocation.TestCollectionTable, bazelinvocation.TestCollectionColumn), ) fromU = sqlgraph.SetNeighbors(biq.driver.Dialect(), step) return fromU, nil @@ -200,7 +200,7 @@ func (biq *BazelInvocationQuery) QueryTargets() *TargetPairQuery { step := sqlgraph.NewStep( sqlgraph.From(bazelinvocation.Table, bazelinvocation.FieldID, selector), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, bazelinvocation.TargetsTable, bazelinvocation.TargetsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, bazelinvocation.TargetsTable, bazelinvocation.TargetsColumn), ) fromU = sqlgraph.SetNeighbors(biq.driver.Dialect(), step) return fromU, nil @@ -787,124 +787,64 @@ func (biq *BazelInvocationQuery) loadMetrics(ctx context.Context, query *Metrics return nil } func (biq *BazelInvocationQuery) loadTestCollection(ctx context.Context, query *TestCollectionQuery, nodes []*BazelInvocation, init func(*BazelInvocation), assign func(*BazelInvocation, *TestCollection)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*BazelInvocation) - nids := make(map[int]map[*BazelInvocation]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*BazelInvocation) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(bazelinvocation.TestCollectionTable) - s.Join(joinT).On(s.C(testcollection.FieldID), joinT.C(bazelinvocation.TestCollectionPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(bazelinvocation.TestCollectionPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(bazelinvocation.TestCollectionPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*BazelInvocation]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TestCollection](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TestCollection(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(bazelinvocation.TestCollectionColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "test_collection" node returned %v`, n.ID) + fk := n.bazel_invocation_test_collection + if fk == nil { + return fmt.Errorf(`foreign-key "bazel_invocation_test_collection" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "bazel_invocation_test_collection" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (biq *BazelInvocationQuery) loadTargets(ctx context.Context, query *TargetPairQuery, nodes []*BazelInvocation, init func(*BazelInvocation), assign func(*BazelInvocation, *TargetPair)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*BazelInvocation) - nids := make(map[int]map[*BazelInvocation]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*BazelInvocation) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(bazelinvocation.TargetsTable) - s.Join(joinT).On(s.C(targetpair.FieldID), joinT.C(bazelinvocation.TargetsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(bazelinvocation.TargetsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(bazelinvocation.TargetsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*BazelInvocation]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TargetPair](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TargetPair(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(bazelinvocation.TargetsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "targets" node returned %v`, n.ID) + fk := n.bazel_invocation_targets + if fk == nil { + return fmt.Errorf(`foreign-key "bazel_invocation_targets" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "bazel_invocation_targets" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } diff --git a/ent/gen/ent/bazelinvocation_update.go b/ent/gen/ent/bazelinvocation_update.go index f4d2307..5a2f910 100644 --- a/ent/gen/ent/bazelinvocation_update.go +++ b/ent/gen/ent/bazelinvocation_update.go @@ -789,10 +789,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if biu.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -802,10 +802,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := biu.mutation.RemovedTestCollectionIDs(); len(nodes) > 0 && !biu.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -818,10 +818,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := biu.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -834,10 +834,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if biu.mutation.TargetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), @@ -847,10 +847,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := biu.mutation.RemovedTargetsIDs(); len(nodes) > 0 && !biu.mutation.TargetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), @@ -863,10 +863,10 @@ func (biu *BazelInvocationUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := biu.mutation.TargetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), @@ -1681,10 +1681,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if biuo.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -1694,10 +1694,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if nodes := biuo.mutation.RemovedTestCollectionIDs(); len(nodes) > 0 && !biuo.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -1710,10 +1710,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if nodes := biuo.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TestCollectionTable, - Columns: bazelinvocation.TestCollectionPrimaryKey, + Columns: []string{bazelinvocation.TestCollectionColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), @@ -1726,10 +1726,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if biuo.mutation.TargetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), @@ -1739,10 +1739,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if nodes := biuo.mutation.RemovedTargetsIDs(); len(nodes) > 0 && !biuo.mutation.TargetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), @@ -1755,10 +1755,10 @@ func (biuo *BazelInvocationUpdateOne) sqlSave(ctx context.Context) (_node *Bazel } if nodes := biuo.mutation.TargetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: bazelinvocation.TargetsTable, - Columns: bazelinvocation.TargetsPrimaryKey, + Columns: []string{bazelinvocation.TargetsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), diff --git a/ent/gen/ent/buildgraphmetrics.go b/ent/gen/ent/buildgraphmetrics.go index f691e8a..a759c27 100644 --- a/ent/gen/ent/buildgraphmetrics.go +++ b/ent/gen/ent/buildgraphmetrics.go @@ -9,6 +9,8 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/buildgraphmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/evaluationstat" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) // BuildGraphMetrics is the model entity for the BuildGraphMetrics schema. @@ -36,88 +38,98 @@ type BuildGraphMetrics struct { PostInvocationSkyframeNodeCount int32 `json:"post_invocation_skyframe_node_count,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the BuildGraphMetricsQuery when eager-loading is set. - Edges BuildGraphMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges BuildGraphMetricsEdges `json:"edges"` + build_graph_metrics_dirtied_values *int + build_graph_metrics_changed_values *int + build_graph_metrics_built_values *int + build_graph_metrics_cleaned_values *int + metrics_build_graph_metrics *int + selectValues sql.SelectValues } // BuildGraphMetricsEdges holds the relations/edges for other nodes in the graph. type BuildGraphMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // DirtiedValues holds the value of the dirtied_values edge. - DirtiedValues []*EvaluationStat `json:"dirtied_values,omitempty"` + DirtiedValues *EvaluationStat `json:"dirtied_values,omitempty"` // ChangedValues holds the value of the changed_values edge. - ChangedValues []*EvaluationStat `json:"changed_values,omitempty"` + ChangedValues *EvaluationStat `json:"changed_values,omitempty"` // BuiltValues holds the value of the built_values edge. - BuiltValues []*EvaluationStat `json:"built_values,omitempty"` + BuiltValues *EvaluationStat `json:"built_values,omitempty"` // CleanedValues holds the value of the cleaned_values edge. - CleanedValues []*EvaluationStat `json:"cleaned_values,omitempty"` + CleanedValues *EvaluationStat `json:"cleaned_values,omitempty"` // EvaluatedValues holds the value of the evaluated_values edge. - EvaluatedValues []*EvaluationStat `json:"evaluated_values,omitempty"` + EvaluatedValues *EvaluationStat `json:"evaluated_values,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [6]bool // totalCount holds the count of the edges above. totalCount [6]map[string]int - - namedMetrics map[string][]*Metrics - namedDirtiedValues map[string][]*EvaluationStat - namedChangedValues map[string][]*EvaluationStat - namedBuiltValues map[string][]*EvaluationStat - namedCleanedValues map[string][]*EvaluationStat - namedEvaluatedValues map[string][]*EvaluationStat } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } // DirtiedValuesOrErr returns the DirtiedValues value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) DirtiedValuesOrErr() ([]*EvaluationStat, error) { - if e.loadedTypes[1] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) DirtiedValuesOrErr() (*EvaluationStat, error) { + if e.DirtiedValues != nil { return e.DirtiedValues, nil + } else if e.loadedTypes[1] { + return nil, &NotFoundError{label: evaluationstat.Label} } return nil, &NotLoadedError{edge: "dirtied_values"} } // ChangedValuesOrErr returns the ChangedValues value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) ChangedValuesOrErr() ([]*EvaluationStat, error) { - if e.loadedTypes[2] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) ChangedValuesOrErr() (*EvaluationStat, error) { + if e.ChangedValues != nil { return e.ChangedValues, nil + } else if e.loadedTypes[2] { + return nil, &NotFoundError{label: evaluationstat.Label} } return nil, &NotLoadedError{edge: "changed_values"} } // BuiltValuesOrErr returns the BuiltValues value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) BuiltValuesOrErr() ([]*EvaluationStat, error) { - if e.loadedTypes[3] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) BuiltValuesOrErr() (*EvaluationStat, error) { + if e.BuiltValues != nil { return e.BuiltValues, nil + } else if e.loadedTypes[3] { + return nil, &NotFoundError{label: evaluationstat.Label} } return nil, &NotLoadedError{edge: "built_values"} } // CleanedValuesOrErr returns the CleanedValues value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) CleanedValuesOrErr() ([]*EvaluationStat, error) { - if e.loadedTypes[4] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) CleanedValuesOrErr() (*EvaluationStat, error) { + if e.CleanedValues != nil { return e.CleanedValues, nil + } else if e.loadedTypes[4] { + return nil, &NotFoundError{label: evaluationstat.Label} } return nil, &NotLoadedError{edge: "cleaned_values"} } // EvaluatedValuesOrErr returns the EvaluatedValues value or an error if the edge -// was not loaded in eager-loading. -func (e BuildGraphMetricsEdges) EvaluatedValuesOrErr() ([]*EvaluationStat, error) { - if e.loadedTypes[5] { +// was not loaded in eager-loading, or loaded but was not found. +func (e BuildGraphMetricsEdges) EvaluatedValuesOrErr() (*EvaluationStat, error) { + if e.EvaluatedValues != nil { return e.EvaluatedValues, nil + } else if e.loadedTypes[5] { + return nil, &NotFoundError{label: evaluationstat.Label} } return nil, &NotLoadedError{edge: "evaluated_values"} } @@ -129,6 +141,16 @@ func (*BuildGraphMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case buildgraphmetrics.FieldID, buildgraphmetrics.FieldActionLookupValueCount, buildgraphmetrics.FieldActionLookupValueCountNotIncludingAspects, buildgraphmetrics.FieldActionCount, buildgraphmetrics.FieldActionCountNotIncludingAspects, buildgraphmetrics.FieldInputFileConfiguredTargetCount, buildgraphmetrics.FieldOutputFileConfiguredTargetCount, buildgraphmetrics.FieldOtherConfiguredTargetCount, buildgraphmetrics.FieldOutputArtifactCount, buildgraphmetrics.FieldPostInvocationSkyframeNodeCount: values[i] = new(sql.NullInt64) + case buildgraphmetrics.ForeignKeys[0]: // build_graph_metrics_dirtied_values + values[i] = new(sql.NullInt64) + case buildgraphmetrics.ForeignKeys[1]: // build_graph_metrics_changed_values + values[i] = new(sql.NullInt64) + case buildgraphmetrics.ForeignKeys[2]: // build_graph_metrics_built_values + values[i] = new(sql.NullInt64) + case buildgraphmetrics.ForeignKeys[3]: // build_graph_metrics_cleaned_values + values[i] = new(sql.NullInt64) + case buildgraphmetrics.ForeignKeys[4]: // metrics_build_graph_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -204,6 +226,41 @@ func (bgm *BuildGraphMetrics) assignValues(columns []string, values []any) error } else if value.Valid { bgm.PostInvocationSkyframeNodeCount = int32(value.Int64) } + case buildgraphmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_dirtied_values", value) + } else if value.Valid { + bgm.build_graph_metrics_dirtied_values = new(int) + *bgm.build_graph_metrics_dirtied_values = int(value.Int64) + } + case buildgraphmetrics.ForeignKeys[1]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_changed_values", value) + } else if value.Valid { + bgm.build_graph_metrics_changed_values = new(int) + *bgm.build_graph_metrics_changed_values = int(value.Int64) + } + case buildgraphmetrics.ForeignKeys[2]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_built_values", value) + } else if value.Valid { + bgm.build_graph_metrics_built_values = new(int) + *bgm.build_graph_metrics_built_values = int(value.Int64) + } + case buildgraphmetrics.ForeignKeys[3]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_cleaned_values", value) + } else if value.Valid { + bgm.build_graph_metrics_cleaned_values = new(int) + *bgm.build_graph_metrics_cleaned_values = int(value.Int64) + } + case buildgraphmetrics.ForeignKeys[4]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_build_graph_metrics", value) + } else if value.Valid { + bgm.metrics_build_graph_metrics = new(int) + *bgm.metrics_build_graph_metrics = int(value.Int64) + } default: bgm.selectValues.Set(columns[i], values[i]) } @@ -300,149 +357,5 @@ func (bgm *BuildGraphMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if bgm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if bgm.Edges.namedMetrics == nil { - bgm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - bgm.Edges.namedMetrics[name] = []*Metrics{} - } else { - bgm.Edges.namedMetrics[name] = append(bgm.Edges.namedMetrics[name], edges...) - } -} - -// NamedDirtiedValues returns the DirtiedValues named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedDirtiedValues(name string) ([]*EvaluationStat, error) { - if bgm.Edges.namedDirtiedValues == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedDirtiedValues[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedDirtiedValues(name string, edges ...*EvaluationStat) { - if bgm.Edges.namedDirtiedValues == nil { - bgm.Edges.namedDirtiedValues = make(map[string][]*EvaluationStat) - } - if len(edges) == 0 { - bgm.Edges.namedDirtiedValues[name] = []*EvaluationStat{} - } else { - bgm.Edges.namedDirtiedValues[name] = append(bgm.Edges.namedDirtiedValues[name], edges...) - } -} - -// NamedChangedValues returns the ChangedValues named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedChangedValues(name string) ([]*EvaluationStat, error) { - if bgm.Edges.namedChangedValues == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedChangedValues[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedChangedValues(name string, edges ...*EvaluationStat) { - if bgm.Edges.namedChangedValues == nil { - bgm.Edges.namedChangedValues = make(map[string][]*EvaluationStat) - } - if len(edges) == 0 { - bgm.Edges.namedChangedValues[name] = []*EvaluationStat{} - } else { - bgm.Edges.namedChangedValues[name] = append(bgm.Edges.namedChangedValues[name], edges...) - } -} - -// NamedBuiltValues returns the BuiltValues named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedBuiltValues(name string) ([]*EvaluationStat, error) { - if bgm.Edges.namedBuiltValues == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedBuiltValues[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedBuiltValues(name string, edges ...*EvaluationStat) { - if bgm.Edges.namedBuiltValues == nil { - bgm.Edges.namedBuiltValues = make(map[string][]*EvaluationStat) - } - if len(edges) == 0 { - bgm.Edges.namedBuiltValues[name] = []*EvaluationStat{} - } else { - bgm.Edges.namedBuiltValues[name] = append(bgm.Edges.namedBuiltValues[name], edges...) - } -} - -// NamedCleanedValues returns the CleanedValues named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedCleanedValues(name string) ([]*EvaluationStat, error) { - if bgm.Edges.namedCleanedValues == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedCleanedValues[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedCleanedValues(name string, edges ...*EvaluationStat) { - if bgm.Edges.namedCleanedValues == nil { - bgm.Edges.namedCleanedValues = make(map[string][]*EvaluationStat) - } - if len(edges) == 0 { - bgm.Edges.namedCleanedValues[name] = []*EvaluationStat{} - } else { - bgm.Edges.namedCleanedValues[name] = append(bgm.Edges.namedCleanedValues[name], edges...) - } -} - -// NamedEvaluatedValues returns the EvaluatedValues named value or an error if the edge was not -// loaded in eager-loading with this name. -func (bgm *BuildGraphMetrics) NamedEvaluatedValues(name string) ([]*EvaluationStat, error) { - if bgm.Edges.namedEvaluatedValues == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := bgm.Edges.namedEvaluatedValues[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (bgm *BuildGraphMetrics) appendNamedEvaluatedValues(name string, edges ...*EvaluationStat) { - if bgm.Edges.namedEvaluatedValues == nil { - bgm.Edges.namedEvaluatedValues = make(map[string][]*EvaluationStat) - } - if len(edges) == 0 { - bgm.Edges.namedEvaluatedValues[name] = []*EvaluationStat{} - } else { - bgm.Edges.namedEvaluatedValues[name] = append(bgm.Edges.namedEvaluatedValues[name], edges...) - } -} - // BuildGraphMetricsSlice is a parsable slice of BuildGraphMetrics. type BuildGraphMetricsSlice []*BuildGraphMetrics diff --git a/ent/gen/ent/buildgraphmetrics/buildgraphmetrics.go b/ent/gen/ent/buildgraphmetrics/buildgraphmetrics.go index eadf6de..302f458 100644 --- a/ent/gen/ent/buildgraphmetrics/buildgraphmetrics.go +++ b/ent/gen/ent/buildgraphmetrics/buildgraphmetrics.go @@ -44,44 +44,48 @@ const ( EdgeEvaluatedValues = "evaluated_values" // Table holds the table name of the buildgraphmetrics in the database. Table = "build_graph_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_build_graph_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "build_graph_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_build_graph_metrics" // DirtiedValuesTable is the table that holds the dirtied_values relation/edge. - DirtiedValuesTable = "evaluation_stats" + DirtiedValuesTable = "build_graph_metrics" // DirtiedValuesInverseTable is the table name for the EvaluationStat entity. // It exists in this package in order to avoid circular dependency with the "evaluationstat" package. DirtiedValuesInverseTable = "evaluation_stats" // DirtiedValuesColumn is the table column denoting the dirtied_values relation/edge. DirtiedValuesColumn = "build_graph_metrics_dirtied_values" // ChangedValuesTable is the table that holds the changed_values relation/edge. - ChangedValuesTable = "evaluation_stats" + ChangedValuesTable = "build_graph_metrics" // ChangedValuesInverseTable is the table name for the EvaluationStat entity. // It exists in this package in order to avoid circular dependency with the "evaluationstat" package. ChangedValuesInverseTable = "evaluation_stats" // ChangedValuesColumn is the table column denoting the changed_values relation/edge. ChangedValuesColumn = "build_graph_metrics_changed_values" // BuiltValuesTable is the table that holds the built_values relation/edge. - BuiltValuesTable = "evaluation_stats" + BuiltValuesTable = "build_graph_metrics" // BuiltValuesInverseTable is the table name for the EvaluationStat entity. // It exists in this package in order to avoid circular dependency with the "evaluationstat" package. BuiltValuesInverseTable = "evaluation_stats" // BuiltValuesColumn is the table column denoting the built_values relation/edge. BuiltValuesColumn = "build_graph_metrics_built_values" // CleanedValuesTable is the table that holds the cleaned_values relation/edge. - CleanedValuesTable = "evaluation_stats" + CleanedValuesTable = "build_graph_metrics" // CleanedValuesInverseTable is the table name for the EvaluationStat entity. // It exists in this package in order to avoid circular dependency with the "evaluationstat" package. CleanedValuesInverseTable = "evaluation_stats" // CleanedValuesColumn is the table column denoting the cleaned_values relation/edge. CleanedValuesColumn = "build_graph_metrics_cleaned_values" - // EvaluatedValuesTable is the table that holds the evaluated_values relation/edge. The primary key declared below. - EvaluatedValuesTable = "build_graph_metrics_evaluated_values" + // EvaluatedValuesTable is the table that holds the evaluated_values relation/edge. + EvaluatedValuesTable = "evaluation_stats" // EvaluatedValuesInverseTable is the table name for the EvaluationStat entity. // It exists in this package in order to avoid circular dependency with the "evaluationstat" package. EvaluatedValuesInverseTable = "evaluation_stats" + // EvaluatedValuesColumn is the table column denoting the evaluated_values relation/edge. + EvaluatedValuesColumn = "build_graph_metrics_evaluated_values" ) // Columns holds all SQL columns for buildgraphmetrics fields. @@ -98,14 +102,15 @@ var Columns = []string{ FieldPostInvocationSkyframeNodeCount, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "build_graph_metrics_id"} - // EvaluatedValuesPrimaryKey and EvaluatedValuesColumn2 are the table columns denoting the - // primary key for the evaluated_values relation (M2M). - EvaluatedValuesPrimaryKey = []string{"build_graph_metrics_id", "evaluation_stat_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "build_graph_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "build_graph_metrics_dirtied_values", + "build_graph_metrics_changed_values", + "build_graph_metrics_built_values", + "build_graph_metrics_cleaned_values", + "metrics_build_graph_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -114,6 +119,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -170,128 +180,86 @@ func ByPostInvocationSkyframeNodeCount(opts ...sql.OrderTermOption) OrderOption return sql.OrderByField(FieldPostInvocationSkyframeNodeCount, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByDirtiedValuesCount orders the results by dirtied_values count. -func ByDirtiedValuesCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newDirtiedValuesStep(), opts...) - } -} - -// ByDirtiedValues orders the results by dirtied_values terms. -func ByDirtiedValues(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newDirtiedValuesStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByChangedValuesCount orders the results by changed_values count. -func ByChangedValuesCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newChangedValuesStep(), opts...) - } -} - -// ByChangedValues orders the results by changed_values terms. -func ByChangedValues(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newChangedValuesStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByBuiltValuesCount orders the results by built_values count. -func ByBuiltValuesCount(opts ...sql.OrderTermOption) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newBuiltValuesStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByBuiltValues orders the results by built_values terms. -func ByBuiltValues(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByDirtiedValuesField orders the results by dirtied_values field. +func ByDirtiedValuesField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newBuiltValuesStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newDirtiedValuesStep(), sql.OrderByField(field, opts...)) } } -// ByCleanedValuesCount orders the results by cleaned_values count. -func ByCleanedValuesCount(opts ...sql.OrderTermOption) OrderOption { +// ByChangedValuesField orders the results by changed_values field. +func ByChangedValuesField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newCleanedValuesStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newChangedValuesStep(), sql.OrderByField(field, opts...)) } } -// ByCleanedValues orders the results by cleaned_values terms. -func ByCleanedValues(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByBuiltValuesField orders the results by built_values field. +func ByBuiltValuesField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newCleanedValuesStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newBuiltValuesStep(), sql.OrderByField(field, opts...)) } } -// ByEvaluatedValuesCount orders the results by evaluated_values count. -func ByEvaluatedValuesCount(opts ...sql.OrderTermOption) OrderOption { +// ByCleanedValuesField orders the results by cleaned_values field. +func ByCleanedValuesField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newEvaluatedValuesStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newCleanedValuesStep(), sql.OrderByField(field, opts...)) } } -// ByEvaluatedValues orders the results by evaluated_values terms. -func ByEvaluatedValues(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByEvaluatedValuesField orders the results by evaluated_values field. +func ByEvaluatedValuesField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newEvaluatedValuesStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newEvaluatedValuesStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newDirtiedValuesStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(DirtiedValuesInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, DirtiedValuesTable, DirtiedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, DirtiedValuesTable, DirtiedValuesColumn), ) } func newChangedValuesStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ChangedValuesInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, ChangedValuesTable, ChangedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, ChangedValuesTable, ChangedValuesColumn), ) } func newBuiltValuesStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(BuiltValuesInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, BuiltValuesTable, BuiltValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, BuiltValuesTable, BuiltValuesColumn), ) } func newCleanedValuesStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(CleanedValuesInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, CleanedValuesTable, CleanedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, CleanedValuesTable, CleanedValuesColumn), ) } func newEvaluatedValuesStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(EvaluatedValuesInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, EvaluatedValuesTable, EvaluatedValuesPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, EvaluatedValuesTable, EvaluatedValuesColumn), ) } diff --git a/ent/gen/ent/buildgraphmetrics/where.go b/ent/gen/ent/buildgraphmetrics/where.go index 7780ef9..959ba7f 100644 --- a/ent/gen/ent/buildgraphmetrics/where.go +++ b/ent/gen/ent/buildgraphmetrics/where.go @@ -553,7 +553,7 @@ func HasMetrics() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -576,7 +576,7 @@ func HasDirtiedValues() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, DirtiedValuesTable, DirtiedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, DirtiedValuesTable, DirtiedValuesColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -599,7 +599,7 @@ func HasChangedValues() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, ChangedValuesTable, ChangedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, ChangedValuesTable, ChangedValuesColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -622,7 +622,7 @@ func HasBuiltValues() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, BuiltValuesTable, BuiltValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, BuiltValuesTable, BuiltValuesColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -645,7 +645,7 @@ func HasCleanedValues() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, CleanedValuesTable, CleanedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, CleanedValuesTable, CleanedValuesColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -668,7 +668,7 @@ func HasEvaluatedValues() predicate.BuildGraphMetrics { return predicate.BuildGraphMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, EvaluatedValuesTable, EvaluatedValuesPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, EvaluatedValuesTable, EvaluatedValuesColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/buildgraphmetrics_create.go b/ent/gen/ent/buildgraphmetrics_create.go index 2c77a10..0c886ce 100644 --- a/ent/gen/ent/buildgraphmetrics_create.go +++ b/ent/gen/ent/buildgraphmetrics_create.go @@ -146,94 +146,118 @@ func (bgmc *BuildGraphMetricsCreate) SetNillablePostInvocationSkyframeNodeCount( return bgmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddMetricIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetMetricsID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetMetricsID(id) return bgmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (bgmc *BuildGraphMetricsCreate) AddMetrics(m ...*Metrics) *BuildGraphMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableMetricsID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetMetricsID(*id) } - return bgmc.AddMetricIDs(ids...) + return bgmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (bgmc *BuildGraphMetricsCreate) SetMetrics(m *Metrics) *BuildGraphMetricsCreate { + return bgmc.SetMetricsID(m.ID) } -// AddDirtiedValueIDs adds the "dirtied_values" edge to the EvaluationStat entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddDirtiedValueIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddDirtiedValueIDs(ids...) +// SetDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetDirtiedValuesID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetDirtiedValuesID(id) return bgmc } -// AddDirtiedValues adds the "dirtied_values" edges to the EvaluationStat entity. -func (bgmc *BuildGraphMetricsCreate) AddDirtiedValues(e ...*EvaluationStat) *BuildGraphMetricsCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableDirtiedValuesID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetDirtiedValuesID(*id) } - return bgmc.AddDirtiedValueIDs(ids...) + return bgmc } -// AddChangedValueIDs adds the "changed_values" edge to the EvaluationStat entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddChangedValueIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddChangedValueIDs(ids...) +// SetDirtiedValues sets the "dirtied_values" edge to the EvaluationStat entity. +func (bgmc *BuildGraphMetricsCreate) SetDirtiedValues(e *EvaluationStat) *BuildGraphMetricsCreate { + return bgmc.SetDirtiedValuesID(e.ID) +} + +// SetChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetChangedValuesID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetChangedValuesID(id) return bgmc } -// AddChangedValues adds the "changed_values" edges to the EvaluationStat entity. -func (bgmc *BuildGraphMetricsCreate) AddChangedValues(e ...*EvaluationStat) *BuildGraphMetricsCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableChangedValuesID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetChangedValuesID(*id) } - return bgmc.AddChangedValueIDs(ids...) + return bgmc +} + +// SetChangedValues sets the "changed_values" edge to the EvaluationStat entity. +func (bgmc *BuildGraphMetricsCreate) SetChangedValues(e *EvaluationStat) *BuildGraphMetricsCreate { + return bgmc.SetChangedValuesID(e.ID) } -// AddBuiltValueIDs adds the "built_values" edge to the EvaluationStat entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddBuiltValueIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddBuiltValueIDs(ids...) +// SetBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetBuiltValuesID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetBuiltValuesID(id) return bgmc } -// AddBuiltValues adds the "built_values" edges to the EvaluationStat entity. -func (bgmc *BuildGraphMetricsCreate) AddBuiltValues(e ...*EvaluationStat) *BuildGraphMetricsCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableBuiltValuesID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetBuiltValuesID(*id) } - return bgmc.AddBuiltValueIDs(ids...) + return bgmc +} + +// SetBuiltValues sets the "built_values" edge to the EvaluationStat entity. +func (bgmc *BuildGraphMetricsCreate) SetBuiltValues(e *EvaluationStat) *BuildGraphMetricsCreate { + return bgmc.SetBuiltValuesID(e.ID) } -// AddCleanedValueIDs adds the "cleaned_values" edge to the EvaluationStat entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddCleanedValueIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddCleanedValueIDs(ids...) +// SetCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetCleanedValuesID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetCleanedValuesID(id) return bgmc } -// AddCleanedValues adds the "cleaned_values" edges to the EvaluationStat entity. -func (bgmc *BuildGraphMetricsCreate) AddCleanedValues(e ...*EvaluationStat) *BuildGraphMetricsCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableCleanedValuesID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetCleanedValuesID(*id) } - return bgmc.AddCleanedValueIDs(ids...) + return bgmc } -// AddEvaluatedValueIDs adds the "evaluated_values" edge to the EvaluationStat entity by IDs. -func (bgmc *BuildGraphMetricsCreate) AddEvaluatedValueIDs(ids ...int) *BuildGraphMetricsCreate { - bgmc.mutation.AddEvaluatedValueIDs(ids...) +// SetCleanedValues sets the "cleaned_values" edge to the EvaluationStat entity. +func (bgmc *BuildGraphMetricsCreate) SetCleanedValues(e *EvaluationStat) *BuildGraphMetricsCreate { + return bgmc.SetCleanedValuesID(e.ID) +} + +// SetEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID. +func (bgmc *BuildGraphMetricsCreate) SetEvaluatedValuesID(id int) *BuildGraphMetricsCreate { + bgmc.mutation.SetEvaluatedValuesID(id) return bgmc } -// AddEvaluatedValues adds the "evaluated_values" edges to the EvaluationStat entity. -func (bgmc *BuildGraphMetricsCreate) AddEvaluatedValues(e ...*EvaluationStat) *BuildGraphMetricsCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmc *BuildGraphMetricsCreate) SetNillableEvaluatedValuesID(id *int) *BuildGraphMetricsCreate { + if id != nil { + bgmc = bgmc.SetEvaluatedValuesID(*id) } - return bgmc.AddEvaluatedValueIDs(ids...) + return bgmc +} + +// SetEvaluatedValues sets the "evaluated_values" edge to the EvaluationStat entity. +func (bgmc *BuildGraphMetricsCreate) SetEvaluatedValues(e *EvaluationStat) *BuildGraphMetricsCreate { + return bgmc.SetEvaluatedValuesID(e.ID) } // Mutation returns the BuildGraphMetricsMutation object of the builder. @@ -334,10 +358,10 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph } if nodes := bgmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, + Columns: []string{buildgraphmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -346,11 +370,12 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_build_graph_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := bgmc.mutation.DirtiedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.DirtiedValuesTable, Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, @@ -362,11 +387,12 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.build_graph_metrics_dirtied_values = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := bgmc.mutation.ChangedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.ChangedValuesTable, Columns: []string{buildgraphmetrics.ChangedValuesColumn}, @@ -378,11 +404,12 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.build_graph_metrics_changed_values = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := bgmc.mutation.BuiltValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.BuiltValuesTable, Columns: []string{buildgraphmetrics.BuiltValuesColumn}, @@ -394,11 +421,12 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.build_graph_metrics_built_values = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := bgmc.mutation.CleanedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.CleanedValuesTable, Columns: []string{buildgraphmetrics.CleanedValuesColumn}, @@ -410,14 +438,15 @@ func (bgmc *BuildGraphMetricsCreate) createSpec() (*BuildGraphMetrics, *sqlgraph for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.build_graph_metrics_cleaned_values = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := bgmc.mutation.EvaluatedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, + Columns: []string{buildgraphmetrics.EvaluatedValuesColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), diff --git a/ent/gen/ent/buildgraphmetrics_query.go b/ent/gen/ent/buildgraphmetrics_query.go index a6b1071..12df12c 100644 --- a/ent/gen/ent/buildgraphmetrics_query.go +++ b/ent/gen/ent/buildgraphmetrics_query.go @@ -20,24 +20,19 @@ import ( // BuildGraphMetricsQuery is the builder for querying BuildGraphMetrics entities. type BuildGraphMetricsQuery struct { config - ctx *QueryContext - order []buildgraphmetrics.OrderOption - inters []Interceptor - predicates []predicate.BuildGraphMetrics - withMetrics *MetricsQuery - withDirtiedValues *EvaluationStatQuery - withChangedValues *EvaluationStatQuery - withBuiltValues *EvaluationStatQuery - withCleanedValues *EvaluationStatQuery - withEvaluatedValues *EvaluationStatQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*BuildGraphMetrics) error - withNamedMetrics map[string]*MetricsQuery - withNamedDirtiedValues map[string]*EvaluationStatQuery - withNamedChangedValues map[string]*EvaluationStatQuery - withNamedBuiltValues map[string]*EvaluationStatQuery - withNamedCleanedValues map[string]*EvaluationStatQuery - withNamedEvaluatedValues map[string]*EvaluationStatQuery + ctx *QueryContext + order []buildgraphmetrics.OrderOption + inters []Interceptor + predicates []predicate.BuildGraphMetrics + withMetrics *MetricsQuery + withDirtiedValues *EvaluationStatQuery + withChangedValues *EvaluationStatQuery + withBuiltValues *EvaluationStatQuery + withCleanedValues *EvaluationStatQuery + withEvaluatedValues *EvaluationStatQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*BuildGraphMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -88,7 +83,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, buildgraphmetrics.MetricsTable, buildgraphmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, buildgraphmetrics.MetricsTable, buildgraphmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -110,7 +105,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryDirtiedValues() *EvaluationStatQuery { step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.DirtiedValuesTable, buildgraphmetrics.DirtiedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.DirtiedValuesTable, buildgraphmetrics.DirtiedValuesColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -132,7 +127,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryChangedValues() *EvaluationStatQuery { step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.ChangedValuesTable, buildgraphmetrics.ChangedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.ChangedValuesTable, buildgraphmetrics.ChangedValuesColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -154,7 +149,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryBuiltValues() *EvaluationStatQuery { step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.BuiltValuesTable, buildgraphmetrics.BuiltValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.BuiltValuesTable, buildgraphmetrics.BuiltValuesColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -176,7 +171,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryCleanedValues() *EvaluationStatQuery { step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.CleanedValuesTable, buildgraphmetrics.CleanedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.CleanedValuesTable, buildgraphmetrics.CleanedValuesColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -198,7 +193,7 @@ func (bgmq *BuildGraphMetricsQuery) QueryEvaluatedValues() *EvaluationStatQuery step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, selector), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, buildgraphmetrics.EvaluatedValuesTable, buildgraphmetrics.EvaluatedValuesPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, buildgraphmetrics.EvaluatedValuesTable, buildgraphmetrics.EvaluatedValuesColumn), ) fromU = sqlgraph.SetNeighbors(bgmq.driver.Dialect(), step) return fromU, nil @@ -553,6 +548,7 @@ func (bgmq *BuildGraphMetricsQuery) prepareQuery(ctx context.Context) error { func (bgmq *BuildGraphMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*BuildGraphMetrics, error) { var ( nodes = []*BuildGraphMetrics{} + withFKs = bgmq.withFKs _spec = bgmq.querySpec() loadedTypes = [6]bool{ bgmq.withMetrics != nil, @@ -563,6 +559,12 @@ func (bgmq *BuildGraphMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHo bgmq.withEvaluatedValues != nil, } ) + if bgmq.withMetrics != nil || bgmq.withDirtiedValues != nil || bgmq.withChangedValues != nil || bgmq.withBuiltValues != nil || bgmq.withCleanedValues != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, buildgraphmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*BuildGraphMetrics).scanValues(nil, columns) } @@ -585,94 +587,38 @@ func (bgmq *BuildGraphMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHo return nodes, nil } if query := bgmq.withMetrics; query != nil { - if err := bgmq.loadMetrics(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *BuildGraphMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := bgmq.loadMetrics(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } if query := bgmq.withDirtiedValues; query != nil { - if err := bgmq.loadDirtiedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.DirtiedValues = []*EvaluationStat{} }, - func(n *BuildGraphMetrics, e *EvaluationStat) { - n.Edges.DirtiedValues = append(n.Edges.DirtiedValues, e) - }); err != nil { + if err := bgmq.loadDirtiedValues(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.DirtiedValues = e }); err != nil { return nil, err } } if query := bgmq.withChangedValues; query != nil { - if err := bgmq.loadChangedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.ChangedValues = []*EvaluationStat{} }, - func(n *BuildGraphMetrics, e *EvaluationStat) { - n.Edges.ChangedValues = append(n.Edges.ChangedValues, e) - }); err != nil { + if err := bgmq.loadChangedValues(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.ChangedValues = e }); err != nil { return nil, err } } if query := bgmq.withBuiltValues; query != nil { - if err := bgmq.loadBuiltValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.BuiltValues = []*EvaluationStat{} }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.BuiltValues = append(n.Edges.BuiltValues, e) }); err != nil { + if err := bgmq.loadBuiltValues(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.BuiltValues = e }); err != nil { return nil, err } } if query := bgmq.withCleanedValues; query != nil { - if err := bgmq.loadCleanedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.CleanedValues = []*EvaluationStat{} }, - func(n *BuildGraphMetrics, e *EvaluationStat) { - n.Edges.CleanedValues = append(n.Edges.CleanedValues, e) - }); err != nil { + if err := bgmq.loadCleanedValues(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.CleanedValues = e }); err != nil { return nil, err } } if query := bgmq.withEvaluatedValues; query != nil { - if err := bgmq.loadEvaluatedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.Edges.EvaluatedValues = []*EvaluationStat{} }, - func(n *BuildGraphMetrics, e *EvaluationStat) { - n.Edges.EvaluatedValues = append(n.Edges.EvaluatedValues, e) - }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedMetrics { - if err := bgmq.loadMetrics(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedMetrics(name) }, - func(n *BuildGraphMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedDirtiedValues { - if err := bgmq.loadDirtiedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedDirtiedValues(name) }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.appendNamedDirtiedValues(name, e) }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedChangedValues { - if err := bgmq.loadChangedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedChangedValues(name) }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.appendNamedChangedValues(name, e) }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedBuiltValues { - if err := bgmq.loadBuiltValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedBuiltValues(name) }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.appendNamedBuiltValues(name, e) }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedCleanedValues { - if err := bgmq.loadCleanedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedCleanedValues(name) }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.appendNamedCleanedValues(name, e) }); err != nil { - return nil, err - } - } - for name, query := range bgmq.withNamedEvaluatedValues { - if err := bgmq.loadEvaluatedValues(ctx, query, nodes, - func(n *BuildGraphMetrics) { n.appendNamedEvaluatedValues(name) }, - func(n *BuildGraphMetrics, e *EvaluationStat) { n.appendNamedEvaluatedValues(name, e) }); err != nil { + if err := bgmq.loadEvaluatedValues(ctx, query, nodes, nil, + func(n *BuildGraphMetrics, e *EvaluationStat) { n.Edges.EvaluatedValues = e }); err != nil { return nil, err } } @@ -685,248 +631,190 @@ func (bgmq *BuildGraphMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHo } func (bgmq *BuildGraphMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*BuildGraphMetrics) - nids := make(map[int]map[*BuildGraphMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*BuildGraphMetrics) + for i := range nodes { + if nodes[i].metrics_build_graph_metrics == nil { + continue + } + fk := *nodes[i].metrics_build_graph_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(buildgraphmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(buildgraphmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(buildgraphmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(buildgraphmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*BuildGraphMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_build_graph_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (bgmq *BuildGraphMetricsQuery) loadDirtiedValues(ctx context.Context, query *EvaluationStatQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *EvaluationStat)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*BuildGraphMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*BuildGraphMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].build_graph_metrics_dirtied_values == nil { + continue + } + fk := *nodes[i].build_graph_metrics_dirtied_values + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.EvaluationStat(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(buildgraphmetrics.DirtiedValuesColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(evaluationstat.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.build_graph_metrics_dirtied_values - if fk == nil { - return fmt.Errorf(`foreign-key "build_graph_metrics_dirtied_values" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "build_graph_metrics_dirtied_values" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "build_graph_metrics_dirtied_values" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (bgmq *BuildGraphMetricsQuery) loadChangedValues(ctx context.Context, query *EvaluationStatQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *EvaluationStat)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*BuildGraphMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*BuildGraphMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].build_graph_metrics_changed_values == nil { + continue } + fk := *nodes[i].build_graph_metrics_changed_values + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.EvaluationStat(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(buildgraphmetrics.ChangedValuesColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(evaluationstat.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.build_graph_metrics_changed_values - if fk == nil { - return fmt.Errorf(`foreign-key "build_graph_metrics_changed_values" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "build_graph_metrics_changed_values" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "build_graph_metrics_changed_values" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (bgmq *BuildGraphMetricsQuery) loadBuiltValues(ctx context.Context, query *EvaluationStatQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *EvaluationStat)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*BuildGraphMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*BuildGraphMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].build_graph_metrics_built_values == nil { + continue + } + fk := *nodes[i].build_graph_metrics_built_values + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.EvaluationStat(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(buildgraphmetrics.BuiltValuesColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(evaluationstat.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.build_graph_metrics_built_values - if fk == nil { - return fmt.Errorf(`foreign-key "build_graph_metrics_built_values" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "build_graph_metrics_built_values" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "build_graph_metrics_built_values" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (bgmq *BuildGraphMetricsQuery) loadCleanedValues(ctx context.Context, query *EvaluationStatQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *EvaluationStat)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*BuildGraphMetrics) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*BuildGraphMetrics) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].build_graph_metrics_cleaned_values == nil { + continue } + fk := *nodes[i].build_graph_metrics_cleaned_values + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.EvaluationStat(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(buildgraphmetrics.CleanedValuesColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(evaluationstat.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.build_graph_metrics_cleaned_values - if fk == nil { - return fmt.Errorf(`foreign-key "build_graph_metrics_cleaned_values" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "build_graph_metrics_cleaned_values" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "build_graph_metrics_cleaned_values" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (bgmq *BuildGraphMetricsQuery) loadEvaluatedValues(ctx context.Context, query *EvaluationStatQuery, nodes []*BuildGraphMetrics, init func(*BuildGraphMetrics), assign func(*BuildGraphMetrics, *EvaluationStat)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*BuildGraphMetrics) - nids := make(map[int]map[*BuildGraphMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(buildgraphmetrics.EvaluatedValuesTable) - s.Join(joinT).On(s.C(evaluationstat.FieldID), joinT.C(buildgraphmetrics.EvaluatedValuesPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(buildgraphmetrics.EvaluatedValuesPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(buildgraphmetrics.EvaluatedValuesPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*BuildGraphMetrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*BuildGraphMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*EvaluationStat](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.EvaluationStat(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(buildgraphmetrics.EvaluatedValuesColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "evaluated_values" node returned %v`, n.ID) + fk := n.build_graph_metrics_evaluated_values + if fk == nil { + return fmt.Errorf(`foreign-key "build_graph_metrics_evaluated_values" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "build_graph_metrics_evaluated_values" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -1015,90 +903,6 @@ func (bgmq *BuildGraphMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *BuildGraphMetricsQuery { - query := (&MetricsClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedMetrics == nil { - bgmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - bgmq.withNamedMetrics[name] = query - return bgmq -} - -// WithNamedDirtiedValues tells the query-builder to eager-load the nodes that are connected to the "dirtied_values" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedDirtiedValues(name string, opts ...func(*EvaluationStatQuery)) *BuildGraphMetricsQuery { - query := (&EvaluationStatClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedDirtiedValues == nil { - bgmq.withNamedDirtiedValues = make(map[string]*EvaluationStatQuery) - } - bgmq.withNamedDirtiedValues[name] = query - return bgmq -} - -// WithNamedChangedValues tells the query-builder to eager-load the nodes that are connected to the "changed_values" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedChangedValues(name string, opts ...func(*EvaluationStatQuery)) *BuildGraphMetricsQuery { - query := (&EvaluationStatClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedChangedValues == nil { - bgmq.withNamedChangedValues = make(map[string]*EvaluationStatQuery) - } - bgmq.withNamedChangedValues[name] = query - return bgmq -} - -// WithNamedBuiltValues tells the query-builder to eager-load the nodes that are connected to the "built_values" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedBuiltValues(name string, opts ...func(*EvaluationStatQuery)) *BuildGraphMetricsQuery { - query := (&EvaluationStatClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedBuiltValues == nil { - bgmq.withNamedBuiltValues = make(map[string]*EvaluationStatQuery) - } - bgmq.withNamedBuiltValues[name] = query - return bgmq -} - -// WithNamedCleanedValues tells the query-builder to eager-load the nodes that are connected to the "cleaned_values" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedCleanedValues(name string, opts ...func(*EvaluationStatQuery)) *BuildGraphMetricsQuery { - query := (&EvaluationStatClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedCleanedValues == nil { - bgmq.withNamedCleanedValues = make(map[string]*EvaluationStatQuery) - } - bgmq.withNamedCleanedValues[name] = query - return bgmq -} - -// WithNamedEvaluatedValues tells the query-builder to eager-load the nodes that are connected to the "evaluated_values" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (bgmq *BuildGraphMetricsQuery) WithNamedEvaluatedValues(name string, opts ...func(*EvaluationStatQuery)) *BuildGraphMetricsQuery { - query := (&EvaluationStatClient{config: bgmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if bgmq.withNamedEvaluatedValues == nil { - bgmq.withNamedEvaluatedValues = make(map[string]*EvaluationStatQuery) - } - bgmq.withNamedEvaluatedValues[name] = query - return bgmq -} - // BuildGraphMetricsGroupBy is the group-by builder for BuildGraphMetrics entities. type BuildGraphMetricsGroupBy struct { selector diff --git a/ent/gen/ent/buildgraphmetrics_update.go b/ent/gen/ent/buildgraphmetrics_update.go index ed92285..6db1385 100644 --- a/ent/gen/ent/buildgraphmetrics_update.go +++ b/ent/gen/ent/buildgraphmetrics_update.go @@ -272,227 +272,161 @@ func (bgmu *BuildGraphMetricsUpdate) ClearPostInvocationSkyframeNodeCount() *Bui return bgmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddMetricIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetMetricsID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetMetricsID(id) return bgmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (bgmu *BuildGraphMetricsUpdate) AddMetrics(m ...*Metrics) *BuildGraphMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableMetricsID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetMetricsID(*id) } - return bgmu.AddMetricIDs(ids...) -} - -// AddDirtiedValueIDs adds the "dirtied_values" edge to the EvaluationStat entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddDirtiedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddDirtiedValueIDs(ids...) return bgmu } -// AddDirtiedValues adds the "dirtied_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) AddDirtiedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.AddDirtiedValueIDs(ids...) +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (bgmu *BuildGraphMetricsUpdate) SetMetrics(m *Metrics) *BuildGraphMetricsUpdate { + return bgmu.SetMetricsID(m.ID) } -// AddChangedValueIDs adds the "changed_values" edge to the EvaluationStat entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddChangedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddChangedValueIDs(ids...) +// SetDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetDirtiedValuesID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetDirtiedValuesID(id) return bgmu } -// AddChangedValues adds the "changed_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) AddChangedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableDirtiedValuesID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetDirtiedValuesID(*id) } - return bgmu.AddChangedValueIDs(ids...) -} - -// AddBuiltValueIDs adds the "built_values" edge to the EvaluationStat entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddBuiltValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddBuiltValueIDs(ids...) return bgmu } -// AddBuiltValues adds the "built_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) AddBuiltValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.AddBuiltValueIDs(ids...) +// SetDirtiedValues sets the "dirtied_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) SetDirtiedValues(e *EvaluationStat) *BuildGraphMetricsUpdate { + return bgmu.SetDirtiedValuesID(e.ID) } -// AddCleanedValueIDs adds the "cleaned_values" edge to the EvaluationStat entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddCleanedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddCleanedValueIDs(ids...) +// SetChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetChangedValuesID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetChangedValuesID(id) return bgmu } -// AddCleanedValues adds the "cleaned_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) AddCleanedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableChangedValuesID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetChangedValuesID(*id) } - return bgmu.AddCleanedValueIDs(ids...) -} - -// AddEvaluatedValueIDs adds the "evaluated_values" edge to the EvaluationStat entity by IDs. -func (bgmu *BuildGraphMetricsUpdate) AddEvaluatedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.AddEvaluatedValueIDs(ids...) return bgmu } -// AddEvaluatedValues adds the "evaluated_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) AddEvaluatedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.AddEvaluatedValueIDs(ids...) +// SetChangedValues sets the "changed_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) SetChangedValues(e *EvaluationStat) *BuildGraphMetricsUpdate { + return bgmu.SetChangedValuesID(e.ID) } -// Mutation returns the BuildGraphMetricsMutation object of the builder. -func (bgmu *BuildGraphMetricsUpdate) Mutation() *BuildGraphMetricsMutation { - return bgmu.mutation -} - -// ClearMetrics clears all "metrics" edges to the Metrics entity. -func (bgmu *BuildGraphMetricsUpdate) ClearMetrics() *BuildGraphMetricsUpdate { - bgmu.mutation.ClearMetrics() +// SetBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetBuiltValuesID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetBuiltValuesID(id) return bgmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveMetricIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveMetricIDs(ids...) +// SetNillableBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableBuiltValuesID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetBuiltValuesID(*id) + } return bgmu } -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveMetrics(m ...*Metrics) *BuildGraphMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return bgmu.RemoveMetricIDs(ids...) +// SetBuiltValues sets the "built_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) SetBuiltValues(e *EvaluationStat) *BuildGraphMetricsUpdate { + return bgmu.SetBuiltValuesID(e.ID) } -// ClearDirtiedValues clears all "dirtied_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) ClearDirtiedValues() *BuildGraphMetricsUpdate { - bgmu.mutation.ClearDirtiedValues() +// SetCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetCleanedValuesID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetCleanedValuesID(id) return bgmu } -// RemoveDirtiedValueIDs removes the "dirtied_values" edge to EvaluationStat entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveDirtiedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveDirtiedValueIDs(ids...) +// SetNillableCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableCleanedValuesID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetCleanedValuesID(*id) + } return bgmu } -// RemoveDirtiedValues removes "dirtied_values" edges to EvaluationStat entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveDirtiedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.RemoveDirtiedValueIDs(ids...) +// SetCleanedValues sets the "cleaned_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) SetCleanedValues(e *EvaluationStat) *BuildGraphMetricsUpdate { + return bgmu.SetCleanedValuesID(e.ID) } -// ClearChangedValues clears all "changed_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) ClearChangedValues() *BuildGraphMetricsUpdate { - bgmu.mutation.ClearChangedValues() +// SetEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID. +func (bgmu *BuildGraphMetricsUpdate) SetEvaluatedValuesID(id int) *BuildGraphMetricsUpdate { + bgmu.mutation.SetEvaluatedValuesID(id) return bgmu } -// RemoveChangedValueIDs removes the "changed_values" edge to EvaluationStat entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveChangedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveChangedValueIDs(ids...) +// SetNillableEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmu *BuildGraphMetricsUpdate) SetNillableEvaluatedValuesID(id *int) *BuildGraphMetricsUpdate { + if id != nil { + bgmu = bgmu.SetEvaluatedValuesID(*id) + } return bgmu } -// RemoveChangedValues removes "changed_values" edges to EvaluationStat entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveChangedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.RemoveChangedValueIDs(ids...) +// SetEvaluatedValues sets the "evaluated_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) SetEvaluatedValues(e *EvaluationStat) *BuildGraphMetricsUpdate { + return bgmu.SetEvaluatedValuesID(e.ID) } -// ClearBuiltValues clears all "built_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) ClearBuiltValues() *BuildGraphMetricsUpdate { - bgmu.mutation.ClearBuiltValues() - return bgmu +// Mutation returns the BuildGraphMetricsMutation object of the builder. +func (bgmu *BuildGraphMetricsUpdate) Mutation() *BuildGraphMetricsMutation { + return bgmu.mutation } -// RemoveBuiltValueIDs removes the "built_values" edge to EvaluationStat entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveBuiltValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveBuiltValueIDs(ids...) +// ClearMetrics clears the "metrics" edge to the Metrics entity. +func (bgmu *BuildGraphMetricsUpdate) ClearMetrics() *BuildGraphMetricsUpdate { + bgmu.mutation.ClearMetrics() return bgmu } -// RemoveBuiltValues removes "built_values" edges to EvaluationStat entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveBuiltValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.RemoveBuiltValueIDs(ids...) +// ClearDirtiedValues clears the "dirtied_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) ClearDirtiedValues() *BuildGraphMetricsUpdate { + bgmu.mutation.ClearDirtiedValues() + return bgmu } -// ClearCleanedValues clears all "cleaned_values" edges to the EvaluationStat entity. -func (bgmu *BuildGraphMetricsUpdate) ClearCleanedValues() *BuildGraphMetricsUpdate { - bgmu.mutation.ClearCleanedValues() +// ClearChangedValues clears the "changed_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) ClearChangedValues() *BuildGraphMetricsUpdate { + bgmu.mutation.ClearChangedValues() return bgmu } -// RemoveCleanedValueIDs removes the "cleaned_values" edge to EvaluationStat entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveCleanedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveCleanedValueIDs(ids...) +// ClearBuiltValues clears the "built_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) ClearBuiltValues() *BuildGraphMetricsUpdate { + bgmu.mutation.ClearBuiltValues() return bgmu } -// RemoveCleanedValues removes "cleaned_values" edges to EvaluationStat entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveCleanedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.RemoveCleanedValueIDs(ids...) +// ClearCleanedValues clears the "cleaned_values" edge to the EvaluationStat entity. +func (bgmu *BuildGraphMetricsUpdate) ClearCleanedValues() *BuildGraphMetricsUpdate { + bgmu.mutation.ClearCleanedValues() + return bgmu } -// ClearEvaluatedValues clears all "evaluated_values" edges to the EvaluationStat entity. +// ClearEvaluatedValues clears the "evaluated_values" edge to the EvaluationStat entity. func (bgmu *BuildGraphMetricsUpdate) ClearEvaluatedValues() *BuildGraphMetricsUpdate { bgmu.mutation.ClearEvaluatedValues() return bgmu } -// RemoveEvaluatedValueIDs removes the "evaluated_values" edge to EvaluationStat entities by IDs. -func (bgmu *BuildGraphMetricsUpdate) RemoveEvaluatedValueIDs(ids ...int) *BuildGraphMetricsUpdate { - bgmu.mutation.RemoveEvaluatedValueIDs(ids...) - return bgmu -} - -// RemoveEvaluatedValues removes "evaluated_values" edges to EvaluationStat entities. -func (bgmu *BuildGraphMetricsUpdate) RemoveEvaluatedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmu.RemoveEvaluatedValueIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (bgmu *BuildGraphMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, bgmu.sqlSave, bgmu.mutation, bgmu.hooks) @@ -612,10 +546,10 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, + Columns: []string{buildgraphmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -623,28 +557,12 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := bgmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !bgmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := bgmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, + Columns: []string{buildgraphmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -657,20 +575,7 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.DirtiedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.DirtiedValuesTable, - Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmu.mutation.RemovedDirtiedValuesIDs(); len(nodes) > 0 && !bgmu.mutation.DirtiedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.DirtiedValuesTable, Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, @@ -679,14 +584,11 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmu.mutation.DirtiedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.DirtiedValuesTable, Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, @@ -702,20 +604,7 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.ChangedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.ChangedValuesTable, - Columns: []string{buildgraphmetrics.ChangedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmu.mutation.RemovedChangedValuesIDs(); len(nodes) > 0 && !bgmu.mutation.ChangedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.ChangedValuesTable, Columns: []string{buildgraphmetrics.ChangedValuesColumn}, @@ -724,14 +613,11 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmu.mutation.ChangedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.ChangedValuesTable, Columns: []string{buildgraphmetrics.ChangedValuesColumn}, @@ -747,20 +633,7 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.BuiltValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.BuiltValuesTable, - Columns: []string{buildgraphmetrics.BuiltValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmu.mutation.RemovedBuiltValuesIDs(); len(nodes) > 0 && !bgmu.mutation.BuiltValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.BuiltValuesTable, Columns: []string{buildgraphmetrics.BuiltValuesColumn}, @@ -769,14 +642,11 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmu.mutation.BuiltValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.BuiltValuesTable, Columns: []string{buildgraphmetrics.BuiltValuesColumn}, @@ -792,20 +662,7 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.CleanedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.CleanedValuesTable, - Columns: []string{buildgraphmetrics.CleanedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmu.mutation.RemovedCleanedValuesIDs(); len(nodes) > 0 && !bgmu.mutation.CleanedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.CleanedValuesTable, Columns: []string{buildgraphmetrics.CleanedValuesColumn}, @@ -814,14 +671,11 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmu.mutation.CleanedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.CleanedValuesTable, Columns: []string{buildgraphmetrics.CleanedValuesColumn}, @@ -837,10 +691,10 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } if bgmu.mutation.EvaluatedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, + Columns: []string{buildgraphmetrics.EvaluatedValuesColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), @@ -848,28 +702,12 @@ func (bgmu *BuildGraphMetricsUpdate) sqlSave(ctx context.Context) (n int, err er } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := bgmu.mutation.RemovedEvaluatedValuesIDs(); len(nodes) > 0 && !bgmu.mutation.EvaluatedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := bgmu.mutation.EvaluatedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, + Columns: []string{buildgraphmetrics.EvaluatedValuesColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), @@ -1143,227 +981,161 @@ func (bgmuo *BuildGraphMetricsUpdateOne) ClearPostInvocationSkyframeNodeCount() return bgmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddMetricIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetMetricsID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetMetricsID(id) return bgmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddMetrics(m ...*Metrics) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableMetricsID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetMetricsID(*id) } - return bgmuo.AddMetricIDs(ids...) -} - -// AddDirtiedValueIDs adds the "dirtied_values" edge to the EvaluationStat entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddDirtiedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddDirtiedValueIDs(ids...) return bgmuo } -// AddDirtiedValues adds the "dirtied_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddDirtiedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.AddDirtiedValueIDs(ids...) +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetMetrics(m *Metrics) *BuildGraphMetricsUpdateOne { + return bgmuo.SetMetricsID(m.ID) } -// AddChangedValueIDs adds the "changed_values" edge to the EvaluationStat entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddChangedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddChangedValueIDs(ids...) +// SetDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetDirtiedValuesID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetDirtiedValuesID(id) return bgmuo } -// AddChangedValues adds the "changed_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddChangedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableDirtiedValuesID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetDirtiedValuesID(*id) } - return bgmuo.AddChangedValueIDs(ids...) -} - -// AddBuiltValueIDs adds the "built_values" edge to the EvaluationStat entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddBuiltValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddBuiltValueIDs(ids...) return bgmuo } -// AddBuiltValues adds the "built_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddBuiltValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.AddBuiltValueIDs(ids...) +// SetDirtiedValues sets the "dirtied_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetDirtiedValues(e *EvaluationStat) *BuildGraphMetricsUpdateOne { + return bgmuo.SetDirtiedValuesID(e.ID) } -// AddCleanedValueIDs adds the "cleaned_values" edge to the EvaluationStat entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddCleanedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddCleanedValueIDs(ids...) +// SetChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetChangedValuesID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetChangedValuesID(id) return bgmuo } -// AddCleanedValues adds the "cleaned_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddCleanedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableChangedValuesID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetChangedValuesID(*id) } - return bgmuo.AddCleanedValueIDs(ids...) -} - -// AddEvaluatedValueIDs adds the "evaluated_values" edge to the EvaluationStat entity by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) AddEvaluatedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.AddEvaluatedValueIDs(ids...) return bgmuo } -// AddEvaluatedValues adds the "evaluated_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) AddEvaluatedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.AddEvaluatedValueIDs(ids...) +// SetChangedValues sets the "changed_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetChangedValues(e *EvaluationStat) *BuildGraphMetricsUpdateOne { + return bgmuo.SetChangedValuesID(e.ID) } -// Mutation returns the BuildGraphMetricsMutation object of the builder. -func (bgmuo *BuildGraphMetricsUpdateOne) Mutation() *BuildGraphMetricsMutation { - return bgmuo.mutation -} - -// ClearMetrics clears all "metrics" edges to the Metrics entity. -func (bgmuo *BuildGraphMetricsUpdateOne) ClearMetrics() *BuildGraphMetricsUpdateOne { - bgmuo.mutation.ClearMetrics() +// SetBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetBuiltValuesID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetBuiltValuesID(id) return bgmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveMetricIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveMetricIDs(ids...) +// SetNillableBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableBuiltValuesID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetBuiltValuesID(*id) + } return bgmuo } -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return bgmuo.RemoveMetricIDs(ids...) +// SetBuiltValues sets the "built_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetBuiltValues(e *EvaluationStat) *BuildGraphMetricsUpdateOne { + return bgmuo.SetBuiltValuesID(e.ID) } -// ClearDirtiedValues clears all "dirtied_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) ClearDirtiedValues() *BuildGraphMetricsUpdateOne { - bgmuo.mutation.ClearDirtiedValues() +// SetCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetCleanedValuesID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetCleanedValuesID(id) return bgmuo } -// RemoveDirtiedValueIDs removes the "dirtied_values" edge to EvaluationStat entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveDirtiedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveDirtiedValueIDs(ids...) +// SetNillableCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableCleanedValuesID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetCleanedValuesID(*id) + } return bgmuo } -// RemoveDirtiedValues removes "dirtied_values" edges to EvaluationStat entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveDirtiedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.RemoveDirtiedValueIDs(ids...) +// SetCleanedValues sets the "cleaned_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetCleanedValues(e *EvaluationStat) *BuildGraphMetricsUpdateOne { + return bgmuo.SetCleanedValuesID(e.ID) } -// ClearChangedValues clears all "changed_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) ClearChangedValues() *BuildGraphMetricsUpdateOne { - bgmuo.mutation.ClearChangedValues() +// SetEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID. +func (bgmuo *BuildGraphMetricsUpdateOne) SetEvaluatedValuesID(id int) *BuildGraphMetricsUpdateOne { + bgmuo.mutation.SetEvaluatedValuesID(id) return bgmuo } -// RemoveChangedValueIDs removes the "changed_values" edge to EvaluationStat entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveChangedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveChangedValueIDs(ids...) +// SetNillableEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by ID if the given value is not nil. +func (bgmuo *BuildGraphMetricsUpdateOne) SetNillableEvaluatedValuesID(id *int) *BuildGraphMetricsUpdateOne { + if id != nil { + bgmuo = bgmuo.SetEvaluatedValuesID(*id) + } return bgmuo } -// RemoveChangedValues removes "changed_values" edges to EvaluationStat entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveChangedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.RemoveChangedValueIDs(ids...) +// SetEvaluatedValues sets the "evaluated_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) SetEvaluatedValues(e *EvaluationStat) *BuildGraphMetricsUpdateOne { + return bgmuo.SetEvaluatedValuesID(e.ID) } -// ClearBuiltValues clears all "built_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) ClearBuiltValues() *BuildGraphMetricsUpdateOne { - bgmuo.mutation.ClearBuiltValues() - return bgmuo +// Mutation returns the BuildGraphMetricsMutation object of the builder. +func (bgmuo *BuildGraphMetricsUpdateOne) Mutation() *BuildGraphMetricsMutation { + return bgmuo.mutation } -// RemoveBuiltValueIDs removes the "built_values" edge to EvaluationStat entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveBuiltValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveBuiltValueIDs(ids...) +// ClearMetrics clears the "metrics" edge to the Metrics entity. +func (bgmuo *BuildGraphMetricsUpdateOne) ClearMetrics() *BuildGraphMetricsUpdateOne { + bgmuo.mutation.ClearMetrics() return bgmuo } -// RemoveBuiltValues removes "built_values" edges to EvaluationStat entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveBuiltValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.RemoveBuiltValueIDs(ids...) +// ClearDirtiedValues clears the "dirtied_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) ClearDirtiedValues() *BuildGraphMetricsUpdateOne { + bgmuo.mutation.ClearDirtiedValues() + return bgmuo } -// ClearCleanedValues clears all "cleaned_values" edges to the EvaluationStat entity. -func (bgmuo *BuildGraphMetricsUpdateOne) ClearCleanedValues() *BuildGraphMetricsUpdateOne { - bgmuo.mutation.ClearCleanedValues() +// ClearChangedValues clears the "changed_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) ClearChangedValues() *BuildGraphMetricsUpdateOne { + bgmuo.mutation.ClearChangedValues() return bgmuo } -// RemoveCleanedValueIDs removes the "cleaned_values" edge to EvaluationStat entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveCleanedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveCleanedValueIDs(ids...) +// ClearBuiltValues clears the "built_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) ClearBuiltValues() *BuildGraphMetricsUpdateOne { + bgmuo.mutation.ClearBuiltValues() return bgmuo } -// RemoveCleanedValues removes "cleaned_values" edges to EvaluationStat entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveCleanedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.RemoveCleanedValueIDs(ids...) +// ClearCleanedValues clears the "cleaned_values" edge to the EvaluationStat entity. +func (bgmuo *BuildGraphMetricsUpdateOne) ClearCleanedValues() *BuildGraphMetricsUpdateOne { + bgmuo.mutation.ClearCleanedValues() + return bgmuo } -// ClearEvaluatedValues clears all "evaluated_values" edges to the EvaluationStat entity. +// ClearEvaluatedValues clears the "evaluated_values" edge to the EvaluationStat entity. func (bgmuo *BuildGraphMetricsUpdateOne) ClearEvaluatedValues() *BuildGraphMetricsUpdateOne { bgmuo.mutation.ClearEvaluatedValues() return bgmuo } -// RemoveEvaluatedValueIDs removes the "evaluated_values" edge to EvaluationStat entities by IDs. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveEvaluatedValueIDs(ids ...int) *BuildGraphMetricsUpdateOne { - bgmuo.mutation.RemoveEvaluatedValueIDs(ids...) - return bgmuo -} - -// RemoveEvaluatedValues removes "evaluated_values" edges to EvaluationStat entities. -func (bgmuo *BuildGraphMetricsUpdateOne) RemoveEvaluatedValues(e ...*EvaluationStat) *BuildGraphMetricsUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return bgmuo.RemoveEvaluatedValueIDs(ids...) -} - // Where appends a list predicates to the BuildGraphMetricsUpdate builder. func (bgmuo *BuildGraphMetricsUpdateOne) Where(ps ...predicate.BuildGraphMetrics) *BuildGraphMetricsUpdateOne { bgmuo.mutation.Where(ps...) @@ -1513,10 +1285,10 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, + Columns: []string{buildgraphmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -1524,28 +1296,12 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := bgmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !bgmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := bgmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: buildgraphmetrics.MetricsTable, - Columns: buildgraphmetrics.MetricsPrimaryKey, + Columns: []string{buildgraphmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -1558,20 +1314,7 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.DirtiedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.DirtiedValuesTable, - Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmuo.mutation.RemovedDirtiedValuesIDs(); len(nodes) > 0 && !bgmuo.mutation.DirtiedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.DirtiedValuesTable, Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, @@ -1580,14 +1323,11 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmuo.mutation.DirtiedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.DirtiedValuesTable, Columns: []string{buildgraphmetrics.DirtiedValuesColumn}, @@ -1603,20 +1343,7 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.ChangedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.ChangedValuesTable, - Columns: []string{buildgraphmetrics.ChangedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmuo.mutation.RemovedChangedValuesIDs(); len(nodes) > 0 && !bgmuo.mutation.ChangedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.ChangedValuesTable, Columns: []string{buildgraphmetrics.ChangedValuesColumn}, @@ -1625,14 +1352,11 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmuo.mutation.ChangedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.ChangedValuesTable, Columns: []string{buildgraphmetrics.ChangedValuesColumn}, @@ -1648,20 +1372,7 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.BuiltValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.BuiltValuesTable, - Columns: []string{buildgraphmetrics.BuiltValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmuo.mutation.RemovedBuiltValuesIDs(); len(nodes) > 0 && !bgmuo.mutation.BuiltValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.BuiltValuesTable, Columns: []string{buildgraphmetrics.BuiltValuesColumn}, @@ -1670,14 +1381,11 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmuo.mutation.BuiltValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.BuiltValuesTable, Columns: []string{buildgraphmetrics.BuiltValuesColumn}, @@ -1693,20 +1401,7 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.CleanedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: buildgraphmetrics.CleanedValuesTable, - Columns: []string{buildgraphmetrics.CleanedValuesColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := bgmuo.mutation.RemovedCleanedValuesIDs(); len(nodes) > 0 && !bgmuo.mutation.CleanedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.CleanedValuesTable, Columns: []string{buildgraphmetrics.CleanedValuesColumn}, @@ -1715,14 +1410,11 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := bgmuo.mutation.CleanedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.M2O, Inverse: false, Table: buildgraphmetrics.CleanedValuesTable, Columns: []string{buildgraphmetrics.CleanedValuesColumn}, @@ -1738,10 +1430,10 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } if bgmuo.mutation.EvaluatedValuesCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, + Columns: []string{buildgraphmetrics.EvaluatedValuesColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), @@ -1749,28 +1441,12 @@ func (bgmuo *BuildGraphMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Bu } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := bgmuo.mutation.RemovedEvaluatedValuesIDs(); len(nodes) > 0 && !bgmuo.mutation.EvaluatedValuesCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := bgmuo.mutation.EvaluatedValuesIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: buildgraphmetrics.EvaluatedValuesTable, - Columns: buildgraphmetrics.EvaluatedValuesPrimaryKey, + Columns: []string{buildgraphmetrics.EvaluatedValuesColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(evaluationstat.FieldID, field.TypeInt), diff --git a/ent/gen/ent/client.go b/ent/gen/ent/client.go index 492f350..885f76f 100644 --- a/ent/gen/ent/client.go +++ b/ent/gen/ent/client.go @@ -645,7 +645,7 @@ func (c *ActionCacheStatisticsClient) QueryActionSummary(acs *ActionCacheStatist step := sqlgraph.NewStep( sqlgraph.From(actioncachestatistics.Table, actioncachestatistics.FieldID, id), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, actioncachestatistics.ActionSummaryTable, actioncachestatistics.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, actioncachestatistics.ActionSummaryTable, actioncachestatistics.ActionSummaryColumn), ) fromV = sqlgraph.Neighbors(acs.driver.Dialect(), step) return fromV, nil @@ -661,7 +661,7 @@ func (c *ActionCacheStatisticsClient) QueryMissDetails(acs *ActionCacheStatistic step := sqlgraph.NewStep( sqlgraph.From(actioncachestatistics.Table, actioncachestatistics.FieldID, id), sqlgraph.To(missdetail.Table, missdetail.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actioncachestatistics.MissDetailsTable, actioncachestatistics.MissDetailsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actioncachestatistics.MissDetailsTable, actioncachestatistics.MissDetailsColumn), ) fromV = sqlgraph.Neighbors(acs.driver.Dialect(), step) return fromV, nil @@ -810,7 +810,7 @@ func (c *ActionDataClient) QueryActionSummary(ad *ActionData) *ActionSummaryQuer step := sqlgraph.NewStep( sqlgraph.From(actiondata.Table, actiondata.FieldID, id), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, actiondata.ActionSummaryTable, actiondata.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, actiondata.ActionSummaryTable, actiondata.ActionSummaryColumn), ) fromV = sqlgraph.Neighbors(ad.driver.Dialect(), step) return fromV, nil @@ -959,7 +959,7 @@ func (c *ActionSummaryClient) QueryMetrics(as *ActionSummary) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, actionsummary.MetricsTable, actionsummary.MetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, actionsummary.MetricsTable, actionsummary.MetricsColumn), ) fromV = sqlgraph.Neighbors(as.driver.Dialect(), step) return fromV, nil @@ -975,7 +975,7 @@ func (c *ActionSummaryClient) QueryActionData(as *ActionSummary) *ActionDataQuer step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, id), sqlgraph.To(actiondata.Table, actiondata.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.ActionDataTable, actionsummary.ActionDataPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actionsummary.ActionDataTable, actionsummary.ActionDataColumn), ) fromV = sqlgraph.Neighbors(as.driver.Dialect(), step) return fromV, nil @@ -991,7 +991,7 @@ func (c *ActionSummaryClient) QueryRunnerCount(as *ActionSummary) *RunnerCountQu step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, id), sqlgraph.To(runnercount.Table, runnercount.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.RunnerCountTable, actionsummary.RunnerCountPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, actionsummary.RunnerCountTable, actionsummary.RunnerCountColumn), ) fromV = sqlgraph.Neighbors(as.driver.Dialect(), step) return fromV, nil @@ -1007,7 +1007,7 @@ func (c *ActionSummaryClient) QueryActionCacheStatistics(as *ActionSummary) *Act step := sqlgraph.NewStep( sqlgraph.From(actionsummary.Table, actionsummary.FieldID, id), sqlgraph.To(actioncachestatistics.Table, actioncachestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, actionsummary.ActionCacheStatisticsTable, actionsummary.ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, actionsummary.ActionCacheStatisticsTable, actionsummary.ActionCacheStatisticsColumn), ) fromV = sqlgraph.Neighbors(as.driver.Dialect(), step) return fromV, nil @@ -1156,7 +1156,7 @@ func (c *ArtifactMetricsClient) QueryMetrics(am *ArtifactMetrics) *MetricsQuery step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, artifactmetrics.MetricsTable, artifactmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, artifactmetrics.MetricsTable, artifactmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(am.driver.Dialect(), step) return fromV, nil @@ -1172,7 +1172,7 @@ func (c *ArtifactMetricsClient) QuerySourceArtifactsRead(am *ArtifactMetrics) *F step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, id), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.SourceArtifactsReadTable, artifactmetrics.SourceArtifactsReadColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.SourceArtifactsReadTable, artifactmetrics.SourceArtifactsReadColumn), ) fromV = sqlgraph.Neighbors(am.driver.Dialect(), step) return fromV, nil @@ -1188,7 +1188,7 @@ func (c *ArtifactMetricsClient) QueryOutputArtifactsSeen(am *ArtifactMetrics) *F step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, id), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.OutputArtifactsSeenTable, artifactmetrics.OutputArtifactsSeenColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.OutputArtifactsSeenTable, artifactmetrics.OutputArtifactsSeenColumn), ) fromV = sqlgraph.Neighbors(am.driver.Dialect(), step) return fromV, nil @@ -1204,7 +1204,7 @@ func (c *ArtifactMetricsClient) QueryOutputArtifactsFromActionCache(am *Artifact step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, id), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, artifactmetrics.OutputArtifactsFromActionCacheTable, artifactmetrics.OutputArtifactsFromActionCacheColumn), + sqlgraph.Edge(sqlgraph.M2O, false, artifactmetrics.OutputArtifactsFromActionCacheTable, artifactmetrics.OutputArtifactsFromActionCacheColumn), ) fromV = sqlgraph.Neighbors(am.driver.Dialect(), step) return fromV, nil @@ -1220,7 +1220,7 @@ func (c *ArtifactMetricsClient) QueryTopLevelArtifacts(am *ArtifactMetrics) *Fil step := sqlgraph.NewStep( sqlgraph.From(artifactmetrics.Table, artifactmetrics.FieldID, id), sqlgraph.To(filesmetric.Table, filesmetric.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, artifactmetrics.TopLevelArtifactsTable, artifactmetrics.TopLevelArtifactsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, artifactmetrics.TopLevelArtifactsTable, artifactmetrics.TopLevelArtifactsColumn), ) fromV = sqlgraph.Neighbors(am.driver.Dialect(), step) return fromV, nil @@ -1433,7 +1433,7 @@ func (c *BazelInvocationClient) QueryTestCollection(bi *BazelInvocation) *TestCo step := sqlgraph.NewStep( sqlgraph.From(bazelinvocation.Table, bazelinvocation.FieldID, id), sqlgraph.To(testcollection.Table, testcollection.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, bazelinvocation.TestCollectionTable, bazelinvocation.TestCollectionPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, bazelinvocation.TestCollectionTable, bazelinvocation.TestCollectionColumn), ) fromV = sqlgraph.Neighbors(bi.driver.Dialect(), step) return fromV, nil @@ -1449,7 +1449,7 @@ func (c *BazelInvocationClient) QueryTargets(bi *BazelInvocation) *TargetPairQue step := sqlgraph.NewStep( sqlgraph.From(bazelinvocation.Table, bazelinvocation.FieldID, id), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, bazelinvocation.TargetsTable, bazelinvocation.TargetsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, bazelinvocation.TargetsTable, bazelinvocation.TargetsColumn), ) fromV = sqlgraph.Neighbors(bi.driver.Dialect(), step) return fromV, nil @@ -2029,7 +2029,7 @@ func (c *BuildGraphMetricsClient) QueryMetrics(bgm *BuildGraphMetrics) *MetricsQ step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, buildgraphmetrics.MetricsTable, buildgraphmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, buildgraphmetrics.MetricsTable, buildgraphmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2045,7 +2045,7 @@ func (c *BuildGraphMetricsClient) QueryDirtiedValues(bgm *BuildGraphMetrics) *Ev step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.DirtiedValuesTable, buildgraphmetrics.DirtiedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.DirtiedValuesTable, buildgraphmetrics.DirtiedValuesColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2061,7 +2061,7 @@ func (c *BuildGraphMetricsClient) QueryChangedValues(bgm *BuildGraphMetrics) *Ev step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.ChangedValuesTable, buildgraphmetrics.ChangedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.ChangedValuesTable, buildgraphmetrics.ChangedValuesColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2077,7 +2077,7 @@ func (c *BuildGraphMetricsClient) QueryBuiltValues(bgm *BuildGraphMetrics) *Eval step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.BuiltValuesTable, buildgraphmetrics.BuiltValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.BuiltValuesTable, buildgraphmetrics.BuiltValuesColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2093,7 +2093,7 @@ func (c *BuildGraphMetricsClient) QueryCleanedValues(bgm *BuildGraphMetrics) *Ev step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, buildgraphmetrics.CleanedValuesTable, buildgraphmetrics.CleanedValuesColumn), + sqlgraph.Edge(sqlgraph.M2O, false, buildgraphmetrics.CleanedValuesTable, buildgraphmetrics.CleanedValuesColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2109,7 +2109,7 @@ func (c *BuildGraphMetricsClient) QueryEvaluatedValues(bgm *BuildGraphMetrics) * step := sqlgraph.NewStep( sqlgraph.From(buildgraphmetrics.Table, buildgraphmetrics.FieldID, id), sqlgraph.To(evaluationstat.Table, evaluationstat.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, buildgraphmetrics.EvaluatedValuesTable, buildgraphmetrics.EvaluatedValuesPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, buildgraphmetrics.EvaluatedValuesTable, buildgraphmetrics.EvaluatedValuesColumn), ) fromV = sqlgraph.Neighbors(bgm.driver.Dialect(), step) return fromV, nil @@ -2258,7 +2258,7 @@ func (c *CumulativeMetricsClient) QueryMetrics(cm *CumulativeMetrics) *MetricsQu step := sqlgraph.NewStep( sqlgraph.From(cumulativemetrics.Table, cumulativemetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, cumulativemetrics.MetricsTable, cumulativemetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, cumulativemetrics.MetricsTable, cumulativemetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(cm.driver.Dialect(), step) return fromV, nil @@ -2407,7 +2407,7 @@ func (c *DynamicExecutionMetricsClient) QueryMetrics(dem *DynamicExecutionMetric step := sqlgraph.NewStep( sqlgraph.From(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, dynamicexecutionmetrics.MetricsTable, dynamicexecutionmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, dynamicexecutionmetrics.MetricsTable, dynamicexecutionmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(dem.driver.Dialect(), step) return fromV, nil @@ -2423,7 +2423,7 @@ func (c *DynamicExecutionMetricsClient) QueryRaceStatistics(dem *DynamicExecutio step := sqlgraph.NewStep( sqlgraph.From(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID, id), sqlgraph.To(racestatistics.Table, racestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, dynamicexecutionmetrics.RaceStatisticsTable, dynamicexecutionmetrics.RaceStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, dynamicexecutionmetrics.RaceStatisticsTable, dynamicexecutionmetrics.RaceStatisticsColumn), ) fromV = sqlgraph.Neighbors(dem.driver.Dialect(), step) return fromV, nil @@ -2572,7 +2572,7 @@ func (c *EvaluationStatClient) QueryBuildGraphMetrics(es *EvaluationStat) *Build step := sqlgraph.NewStep( sqlgraph.From(evaluationstat.Table, evaluationstat.FieldID, id), sqlgraph.To(buildgraphmetrics.Table, buildgraphmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, evaluationstat.BuildGraphMetricsTable, evaluationstat.BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, evaluationstat.BuildGraphMetricsTable, evaluationstat.BuildGraphMetricsColumn), ) fromV = sqlgraph.Neighbors(es.driver.Dialect(), step) return fromV, nil @@ -2870,7 +2870,7 @@ func (c *ExectionInfoClient) QueryTestResult(ei *ExectionInfo) *TestResultBESQue step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, id), sqlgraph.To(testresultbes.Table, testresultbes.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, exectioninfo.TestResultTable, exectioninfo.TestResultColumn), + sqlgraph.Edge(sqlgraph.O2O, true, exectioninfo.TestResultTable, exectioninfo.TestResultColumn), ) fromV = sqlgraph.Neighbors(ei.driver.Dialect(), step) return fromV, nil @@ -2886,7 +2886,7 @@ func (c *ExectionInfoClient) QueryTimingBreakdown(ei *ExectionInfo) *TimingBreak step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, id), sqlgraph.To(timingbreakdown.Table, timingbreakdown.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, exectioninfo.TimingBreakdownTable, exectioninfo.TimingBreakdownColumn), + sqlgraph.Edge(sqlgraph.O2O, false, exectioninfo.TimingBreakdownTable, exectioninfo.TimingBreakdownColumn), ) fromV = sqlgraph.Neighbors(ei.driver.Dialect(), step) return fromV, nil @@ -2902,7 +2902,7 @@ func (c *ExectionInfoClient) QueryResourceUsage(ei *ExectionInfo) *ResourceUsage step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, id), sqlgraph.To(resourceusage.Table, resourceusage.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, exectioninfo.ResourceUsageTable, exectioninfo.ResourceUsagePrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, exectioninfo.ResourceUsageTable, exectioninfo.ResourceUsageColumn), ) fromV = sqlgraph.Neighbors(ei.driver.Dialect(), step) return fromV, nil @@ -3051,7 +3051,7 @@ func (c *FilesMetricClient) QueryArtifactMetrics(fm *FilesMetric) *ArtifactMetri step := sqlgraph.NewStep( sqlgraph.From(filesmetric.Table, filesmetric.FieldID, id), sqlgraph.To(artifactmetrics.Table, artifactmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, filesmetric.ArtifactMetricsTable, filesmetric.ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, filesmetric.ArtifactMetricsTable, filesmetric.ArtifactMetricsColumn), ) fromV = sqlgraph.Neighbors(fm.driver.Dialect(), step) return fromV, nil @@ -3200,7 +3200,7 @@ func (c *GarbageMetricsClient) QueryMemoryMetrics(gm *GarbageMetrics) *MemoryMet step := sqlgraph.NewStep( sqlgraph.From(garbagemetrics.Table, garbagemetrics.FieldID, id), sqlgraph.To(memorymetrics.Table, memorymetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, garbagemetrics.MemoryMetricsTable, garbagemetrics.MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, garbagemetrics.MemoryMetricsTable, garbagemetrics.MemoryMetricsColumn), ) fromV = sqlgraph.Neighbors(gm.driver.Dialect(), step) return fromV, nil @@ -3349,7 +3349,7 @@ func (c *MemoryMetricsClient) QueryMetrics(mm *MemoryMetrics) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(memorymetrics.Table, memorymetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, memorymetrics.MetricsTable, memorymetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, memorymetrics.MetricsTable, memorymetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(mm.driver.Dialect(), step) return fromV, nil @@ -3365,7 +3365,7 @@ func (c *MemoryMetricsClient) QueryGarbageMetrics(mm *MemoryMetrics) *GarbageMet step := sqlgraph.NewStep( sqlgraph.From(memorymetrics.Table, memorymetrics.FieldID, id), sqlgraph.To(garbagemetrics.Table, garbagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, memorymetrics.GarbageMetricsTable, memorymetrics.GarbageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, memorymetrics.GarbageMetricsTable, memorymetrics.GarbageMetricsColumn), ) fromV = sqlgraph.Neighbors(mm.driver.Dialect(), step) return fromV, nil @@ -3530,7 +3530,7 @@ func (c *MetricsClient) QueryActionSummary(m *Metrics) *ActionSummaryQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, metrics.ActionSummaryTable, metrics.ActionSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.ActionSummaryTable, metrics.ActionSummaryColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3546,7 +3546,7 @@ func (c *MetricsClient) QueryMemoryMetrics(m *Metrics) *MemoryMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(memorymetrics.Table, memorymetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.MemoryMetricsTable, metrics.MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.MemoryMetricsTable, metrics.MemoryMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3562,7 +3562,7 @@ func (c *MetricsClient) QueryTargetMetrics(m *Metrics) *TargetMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(targetmetrics.Table, targetmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.TargetMetricsTable, metrics.TargetMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.TargetMetricsTable, metrics.TargetMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3578,7 +3578,7 @@ func (c *MetricsClient) QueryPackageMetrics(m *Metrics) *PackageMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(packagemetrics.Table, packagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.PackageMetricsTable, metrics.PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.PackageMetricsTable, metrics.PackageMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3594,7 +3594,7 @@ func (c *MetricsClient) QueryTimingMetrics(m *Metrics) *TimingMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(timingmetrics.Table, timingmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.TimingMetricsTable, metrics.TimingMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.TimingMetricsTable, metrics.TimingMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3610,7 +3610,7 @@ func (c *MetricsClient) QueryCumulativeMetrics(m *Metrics) *CumulativeMetricsQue step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(cumulativemetrics.Table, cumulativemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.CumulativeMetricsTable, metrics.CumulativeMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.CumulativeMetricsTable, metrics.CumulativeMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3626,7 +3626,7 @@ func (c *MetricsClient) QueryArtifactMetrics(m *Metrics) *ArtifactMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(artifactmetrics.Table, artifactmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.ArtifactMetricsTable, metrics.ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.ArtifactMetricsTable, metrics.ArtifactMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3642,7 +3642,7 @@ func (c *MetricsClient) QueryNetworkMetrics(m *Metrics) *NetworkMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(networkmetrics.Table, networkmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.NetworkMetricsTable, metrics.NetworkMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.NetworkMetricsTable, metrics.NetworkMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3658,7 +3658,7 @@ func (c *MetricsClient) QueryDynamicExecutionMetrics(m *Metrics) *DynamicExecuti step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.DynamicExecutionMetricsTable, metrics.DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.DynamicExecutionMetricsTable, metrics.DynamicExecutionMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3674,7 +3674,7 @@ func (c *MetricsClient) QueryBuildGraphMetrics(m *Metrics) *BuildGraphMetricsQue step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, id), sqlgraph.To(buildgraphmetrics.Table, buildgraphmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.BuildGraphMetricsTable, metrics.BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.BuildGraphMetricsTable, metrics.BuildGraphMetricsColumn), ) fromV = sqlgraph.Neighbors(m.driver.Dialect(), step) return fromV, nil @@ -3823,7 +3823,7 @@ func (c *MissDetailClient) QueryActionCacheStatistics(md *MissDetail) *ActionCac step := sqlgraph.NewStep( sqlgraph.From(missdetail.Table, missdetail.FieldID, id), sqlgraph.To(actioncachestatistics.Table, actioncachestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, missdetail.ActionCacheStatisticsTable, missdetail.ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, missdetail.ActionCacheStatisticsTable, missdetail.ActionCacheStatisticsColumn), ) fromV = sqlgraph.Neighbors(md.driver.Dialect(), step) return fromV, nil @@ -3972,7 +3972,7 @@ func (c *NamedSetOfFilesClient) QueryOutputGroup(nsof *NamedSetOfFiles) *OutputG step := sqlgraph.NewStep( sqlgraph.From(namedsetoffiles.Table, namedsetoffiles.FieldID, id), sqlgraph.To(outputgroup.Table, outputgroup.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, namedsetoffiles.OutputGroupTable, namedsetoffiles.OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, true, namedsetoffiles.OutputGroupTable, namedsetoffiles.OutputGroupColumn), ) fromV = sqlgraph.Neighbors(nsof.driver.Dialect(), step) return fromV, nil @@ -4153,7 +4153,7 @@ func (c *NetworkMetricsClient) QueryMetrics(nm *NetworkMetrics) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(networkmetrics.Table, networkmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, networkmetrics.MetricsTable, networkmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, networkmetrics.MetricsTable, networkmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(nm.driver.Dialect(), step) return fromV, nil @@ -4169,7 +4169,7 @@ func (c *NetworkMetricsClient) QuerySystemNetworkStats(nm *NetworkMetrics) *Syst step := sqlgraph.NewStep( sqlgraph.From(networkmetrics.Table, networkmetrics.FieldID, id), sqlgraph.To(systemnetworkstats.Table, systemnetworkstats.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, networkmetrics.SystemNetworkStatsTable, networkmetrics.SystemNetworkStatsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, networkmetrics.SystemNetworkStatsTable, networkmetrics.SystemNetworkStatsColumn), ) fromV = sqlgraph.Neighbors(nm.driver.Dialect(), step) return fromV, nil @@ -4318,7 +4318,7 @@ func (c *OutputGroupClient) QueryTargetComplete(og *OutputGroup) *TargetComplete step := sqlgraph.NewStep( sqlgraph.From(outputgroup.Table, outputgroup.FieldID, id), sqlgraph.To(targetcomplete.Table, targetcomplete.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, outputgroup.TargetCompleteTable, outputgroup.TargetCompleteColumn), + sqlgraph.Edge(sqlgraph.O2O, true, outputgroup.TargetCompleteTable, outputgroup.TargetCompleteColumn), ) fromV = sqlgraph.Neighbors(og.driver.Dialect(), step) return fromV, nil @@ -4350,7 +4350,7 @@ func (c *OutputGroupClient) QueryFileSets(og *OutputGroup) *NamedSetOfFilesQuery step := sqlgraph.NewStep( sqlgraph.From(outputgroup.Table, outputgroup.FieldID, id), sqlgraph.To(namedsetoffiles.Table, namedsetoffiles.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, outputgroup.FileSetsTable, outputgroup.FileSetsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, outputgroup.FileSetsTable, outputgroup.FileSetsColumn), ) fromV = sqlgraph.Neighbors(og.driver.Dialect(), step) return fromV, nil @@ -4499,7 +4499,7 @@ func (c *PackageLoadMetricsClient) QueryPackageMetrics(plm *PackageLoadMetrics) step := sqlgraph.NewStep( sqlgraph.From(packageloadmetrics.Table, packageloadmetrics.FieldID, id), sqlgraph.To(packagemetrics.Table, packagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, packageloadmetrics.PackageMetricsTable, packageloadmetrics.PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, packageloadmetrics.PackageMetricsTable, packageloadmetrics.PackageMetricsColumn), ) fromV = sqlgraph.Neighbors(plm.driver.Dialect(), step) return fromV, nil @@ -4648,7 +4648,7 @@ func (c *PackageMetricsClient) QueryMetrics(pm *PackageMetrics) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(packagemetrics.Table, packagemetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, packagemetrics.MetricsTable, packagemetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, packagemetrics.MetricsTable, packagemetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(pm.driver.Dialect(), step) return fromV, nil @@ -4664,7 +4664,7 @@ func (c *PackageMetricsClient) QueryPackageLoadMetrics(pm *PackageMetrics) *Pack step := sqlgraph.NewStep( sqlgraph.From(packagemetrics.Table, packagemetrics.FieldID, id), sqlgraph.To(packageloadmetrics.Table, packageloadmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, packagemetrics.PackageLoadMetricsTable, packagemetrics.PackageLoadMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, packagemetrics.PackageLoadMetricsTable, packagemetrics.PackageLoadMetricsColumn), ) fromV = sqlgraph.Neighbors(pm.driver.Dialect(), step) return fromV, nil @@ -4813,7 +4813,7 @@ func (c *RaceStatisticsClient) QueryDynamicExecutionMetrics(rs *RaceStatistics) step := sqlgraph.NewStep( sqlgraph.From(racestatistics.Table, racestatistics.FieldID, id), sqlgraph.To(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, racestatistics.DynamicExecutionMetricsTable, racestatistics.DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, racestatistics.DynamicExecutionMetricsTable, racestatistics.DynamicExecutionMetricsColumn), ) fromV = sqlgraph.Neighbors(rs.driver.Dialect(), step) return fromV, nil @@ -4962,7 +4962,7 @@ func (c *ResourceUsageClient) QueryExecutionInfo(ru *ResourceUsage) *ExectionInf step := sqlgraph.NewStep( sqlgraph.From(resourceusage.Table, resourceusage.FieldID, id), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, resourceusage.ExecutionInfoTable, resourceusage.ExecutionInfoPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, resourceusage.ExecutionInfoTable, resourceusage.ExecutionInfoColumn), ) fromV = sqlgraph.Neighbors(ru.driver.Dialect(), step) return fromV, nil @@ -5111,7 +5111,7 @@ func (c *RunnerCountClient) QueryActionSummary(rc *RunnerCount) *ActionSummaryQu step := sqlgraph.NewStep( sqlgraph.From(runnercount.Table, runnercount.FieldID, id), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, runnercount.ActionSummaryTable, runnercount.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, runnercount.ActionSummaryTable, runnercount.ActionSummaryColumn), ) fromV = sqlgraph.Neighbors(rc.driver.Dialect(), step) return fromV, nil @@ -5260,7 +5260,7 @@ func (c *SystemNetworkStatsClient) QueryNetworkMetrics(sns *SystemNetworkStats) step := sqlgraph.NewStep( sqlgraph.From(systemnetworkstats.Table, systemnetworkstats.FieldID, id), sqlgraph.To(networkmetrics.Table, networkmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, systemnetworkstats.NetworkMetricsTable, systemnetworkstats.NetworkMetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, systemnetworkstats.NetworkMetricsTable, systemnetworkstats.NetworkMetricsColumn), ) fromV = sqlgraph.Neighbors(sns.driver.Dialect(), step) return fromV, nil @@ -5409,7 +5409,7 @@ func (c *TargetCompleteClient) QueryTargetPair(tc *TargetComplete) *TargetPairQu step := sqlgraph.NewStep( sqlgraph.From(targetcomplete.Table, targetcomplete.FieldID, id), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, targetcomplete.TargetPairTable, targetcomplete.TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, targetcomplete.TargetPairTable, targetcomplete.TargetPairColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -5457,7 +5457,7 @@ func (c *TargetCompleteClient) QueryOutputGroup(tc *TargetComplete) *OutputGroup step := sqlgraph.NewStep( sqlgraph.From(targetcomplete.Table, targetcomplete.FieldID, id), sqlgraph.To(outputgroup.Table, outputgroup.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetcomplete.OutputGroupTable, targetcomplete.OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetcomplete.OutputGroupTable, targetcomplete.OutputGroupColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -5606,7 +5606,7 @@ func (c *TargetConfiguredClient) QueryTargetPair(tc *TargetConfigured) *TargetPa step := sqlgraph.NewStep( sqlgraph.From(targetconfigured.Table, targetconfigured.FieldID, id), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, targetconfigured.TargetPairTable, targetconfigured.TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, targetconfigured.TargetPairTable, targetconfigured.TargetPairColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -5755,7 +5755,7 @@ func (c *TargetMetricsClient) QueryMetrics(tm *TargetMetrics) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(targetmetrics.Table, targetmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, targetmetrics.MetricsTable, targetmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, targetmetrics.MetricsTable, targetmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(tm.driver.Dialect(), step) return fromV, nil @@ -5904,7 +5904,7 @@ func (c *TargetPairClient) QueryBazelInvocation(tp *TargetPair) *BazelInvocation step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, id), sqlgraph.To(bazelinvocation.Table, bazelinvocation.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, targetpair.BazelInvocationTable, targetpair.BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, targetpair.BazelInvocationTable, targetpair.BazelInvocationColumn), ) fromV = sqlgraph.Neighbors(tp.driver.Dialect(), step) return fromV, nil @@ -5920,7 +5920,7 @@ func (c *TargetPairClient) QueryConfiguration(tp *TargetPair) *TargetConfiguredQ step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, id), sqlgraph.To(targetconfigured.Table, targetconfigured.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetpair.ConfigurationTable, targetpair.ConfigurationColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetpair.ConfigurationTable, targetpair.ConfigurationColumn), ) fromV = sqlgraph.Neighbors(tp.driver.Dialect(), step) return fromV, nil @@ -5936,7 +5936,7 @@ func (c *TargetPairClient) QueryCompletion(tp *TargetPair) *TargetCompleteQuery step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, id), sqlgraph.To(targetcomplete.Table, targetcomplete.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetpair.CompletionTable, targetpair.CompletionColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetpair.CompletionTable, targetpair.CompletionColumn), ) fromV = sqlgraph.Neighbors(tp.driver.Dialect(), step) return fromV, nil @@ -6085,7 +6085,7 @@ func (c *TestCollectionClient) QueryBazelInvocation(tc *TestCollection) *BazelIn step := sqlgraph.NewStep( sqlgraph.From(testcollection.Table, testcollection.FieldID, id), sqlgraph.To(bazelinvocation.Table, bazelinvocation.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, testcollection.BazelInvocationTable, testcollection.BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, testcollection.BazelInvocationTable, testcollection.BazelInvocationColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -6101,7 +6101,7 @@ func (c *TestCollectionClient) QueryTestSummary(tc *TestCollection) *TestSummary step := sqlgraph.NewStep( sqlgraph.From(testcollection.Table, testcollection.FieldID, id), sqlgraph.To(testsummary.Table, testsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, testcollection.TestSummaryTable, testcollection.TestSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, testcollection.TestSummaryTable, testcollection.TestSummaryColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -6266,7 +6266,7 @@ func (c *TestFileClient) QueryTestResult(tf *TestFile) *TestResultBESQuery { step := sqlgraph.NewStep( sqlgraph.From(testfile.Table, testfile.FieldID, id), sqlgraph.To(testresultbes.Table, testresultbes.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, testfile.TestResultTable, testfile.TestResultPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, testfile.TestResultTable, testfile.TestResultColumn), ) fromV = sqlgraph.Neighbors(tf.driver.Dialect(), step) return fromV, nil @@ -6431,7 +6431,7 @@ func (c *TestResultBESClient) QueryTestActionOutput(trb *TestResultBES) *TestFil step := sqlgraph.NewStep( sqlgraph.From(testresultbes.Table, testresultbes.FieldID, id), sqlgraph.To(testfile.Table, testfile.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, testresultbes.TestActionOutputTable, testresultbes.TestActionOutputPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, testresultbes.TestActionOutputTable, testresultbes.TestActionOutputColumn), ) fromV = sqlgraph.Neighbors(trb.driver.Dialect(), step) return fromV, nil @@ -6447,7 +6447,7 @@ func (c *TestResultBESClient) QueryExecutionInfo(trb *TestResultBES) *ExectionIn step := sqlgraph.NewStep( sqlgraph.From(testresultbes.Table, testresultbes.FieldID, id), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, testresultbes.ExecutionInfoTable, testresultbes.ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, false, testresultbes.ExecutionInfoTable, testresultbes.ExecutionInfoColumn), ) fromV = sqlgraph.Neighbors(trb.driver.Dialect(), step) return fromV, nil @@ -6596,7 +6596,7 @@ func (c *TestSummaryClient) QueryTestCollection(ts *TestSummary) *TestCollection step := sqlgraph.NewStep( sqlgraph.From(testsummary.Table, testsummary.FieldID, id), sqlgraph.To(testcollection.Table, testcollection.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, testsummary.TestCollectionTable, testsummary.TestCollectionColumn), + sqlgraph.Edge(sqlgraph.O2O, true, testsummary.TestCollectionTable, testsummary.TestCollectionColumn), ) fromV = sqlgraph.Neighbors(ts.driver.Dialect(), step) return fromV, nil @@ -6777,7 +6777,7 @@ func (c *TimingBreakdownClient) QueryExecutionInfo(tb *TimingBreakdown) *Exectio step := sqlgraph.NewStep( sqlgraph.From(timingbreakdown.Table, timingbreakdown.FieldID, id), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, timingbreakdown.ExecutionInfoTable, timingbreakdown.ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, true, timingbreakdown.ExecutionInfoTable, timingbreakdown.ExecutionInfoColumn), ) fromV = sqlgraph.Neighbors(tb.driver.Dialect(), step) return fromV, nil @@ -6793,7 +6793,7 @@ func (c *TimingBreakdownClient) QueryChild(tb *TimingBreakdown) *TimingChildQuer step := sqlgraph.NewStep( sqlgraph.From(timingbreakdown.Table, timingbreakdown.FieldID, id), sqlgraph.To(timingchild.Table, timingchild.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, timingbreakdown.ChildTable, timingbreakdown.ChildPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, timingbreakdown.ChildTable, timingbreakdown.ChildColumn), ) fromV = sqlgraph.Neighbors(tb.driver.Dialect(), step) return fromV, nil @@ -6942,7 +6942,7 @@ func (c *TimingChildClient) QueryTimingBreakdown(tc *TimingChild) *TimingBreakdo step := sqlgraph.NewStep( sqlgraph.From(timingchild.Table, timingchild.FieldID, id), sqlgraph.To(timingbreakdown.Table, timingbreakdown.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, timingchild.TimingBreakdownTable, timingchild.TimingBreakdownPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, timingchild.TimingBreakdownTable, timingchild.TimingBreakdownColumn), ) fromV = sqlgraph.Neighbors(tc.driver.Dialect(), step) return fromV, nil @@ -7091,7 +7091,7 @@ func (c *TimingMetricsClient) QueryMetrics(tm *TimingMetrics) *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(timingmetrics.Table, timingmetrics.FieldID, id), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, timingmetrics.MetricsTable, timingmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, timingmetrics.MetricsTable, timingmetrics.MetricsColumn), ) fromV = sqlgraph.Neighbors(tm.driver.Dialect(), step) return fromV, nil diff --git a/ent/gen/ent/cumulativemetrics.go b/ent/gen/ent/cumulativemetrics.go index 6767966..a2b821b 100644 --- a/ent/gen/ent/cumulativemetrics.go +++ b/ent/gen/ent/cumulativemetrics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/cumulativemetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) // CumulativeMetrics is the model entity for the CumulativeMetrics schema. @@ -22,28 +23,29 @@ type CumulativeMetrics struct { NumBuilds int32 `json:"num_builds,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the CumulativeMetricsQuery when eager-loading is set. - Edges CumulativeMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges CumulativeMetricsEdges `json:"edges"` + metrics_cumulative_metrics *int + selectValues sql.SelectValues } // CumulativeMetricsEdges holds the relations/edges for other nodes in the graph. type CumulativeMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedMetrics map[string][]*Metrics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e CumulativeMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e CumulativeMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -55,6 +57,8 @@ func (*CumulativeMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case cumulativemetrics.FieldID, cumulativemetrics.FieldNumAnalyses, cumulativemetrics.FieldNumBuilds: values[i] = new(sql.NullInt64) + case cumulativemetrics.ForeignKeys[0]: // metrics_cumulative_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -88,6 +92,13 @@ func (cm *CumulativeMetrics) assignValues(columns []string, values []any) error } else if value.Valid { cm.NumBuilds = int32(value.Int64) } + case cumulativemetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_cumulative_metrics", value) + } else if value.Valid { + cm.metrics_cumulative_metrics = new(int) + *cm.metrics_cumulative_metrics = int(value.Int64) + } default: cm.selectValues.Set(columns[i], values[i]) } @@ -138,29 +149,5 @@ func (cm *CumulativeMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (cm *CumulativeMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if cm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := cm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (cm *CumulativeMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if cm.Edges.namedMetrics == nil { - cm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - cm.Edges.namedMetrics[name] = []*Metrics{} - } else { - cm.Edges.namedMetrics[name] = append(cm.Edges.namedMetrics[name], edges...) - } -} - // CumulativeMetricsSlice is a parsable slice of CumulativeMetrics. type CumulativeMetricsSlice []*CumulativeMetrics diff --git a/ent/gen/ent/cumulativemetrics/cumulativemetrics.go b/ent/gen/ent/cumulativemetrics/cumulativemetrics.go index a973491..a15801d 100644 --- a/ent/gen/ent/cumulativemetrics/cumulativemetrics.go +++ b/ent/gen/ent/cumulativemetrics/cumulativemetrics.go @@ -20,11 +20,13 @@ const ( EdgeMetrics = "metrics" // Table holds the table name of the cumulativemetrics in the database. Table = "cumulative_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_cumulative_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "cumulative_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_cumulative_metrics" ) // Columns holds all SQL columns for cumulativemetrics fields. @@ -34,11 +36,11 @@ var Columns = []string{ FieldNumBuilds, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "cumulative_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "cumulative_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_cumulative_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -47,6 +49,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -68,23 +75,16 @@ func ByNumBuilds(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldNumBuilds, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } diff --git a/ent/gen/ent/cumulativemetrics/where.go b/ent/gen/ent/cumulativemetrics/where.go index 25fc0a4..931d0e8 100644 --- a/ent/gen/ent/cumulativemetrics/where.go +++ b/ent/gen/ent/cumulativemetrics/where.go @@ -168,7 +168,7 @@ func HasMetrics() predicate.CumulativeMetrics { return predicate.CumulativeMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/cumulativemetrics_create.go b/ent/gen/ent/cumulativemetrics_create.go index 503064e..c470e28 100644 --- a/ent/gen/ent/cumulativemetrics_create.go +++ b/ent/gen/ent/cumulativemetrics_create.go @@ -47,19 +47,23 @@ func (cmc *CumulativeMetricsCreate) SetNillableNumBuilds(i *int32) *CumulativeMe return cmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (cmc *CumulativeMetricsCreate) AddMetricIDs(ids ...int) *CumulativeMetricsCreate { - cmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (cmc *CumulativeMetricsCreate) SetMetricsID(id int) *CumulativeMetricsCreate { + cmc.mutation.SetMetricsID(id) return cmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (cmc *CumulativeMetricsCreate) AddMetrics(m ...*Metrics) *CumulativeMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (cmc *CumulativeMetricsCreate) SetNillableMetricsID(id *int) *CumulativeMetricsCreate { + if id != nil { + cmc = cmc.SetMetricsID(*id) } - return cmc.AddMetricIDs(ids...) + return cmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (cmc *CumulativeMetricsCreate) SetMetrics(m *Metrics) *CumulativeMetricsCreate { + return cmc.SetMetricsID(m.ID) } // Mutation returns the CumulativeMetricsMutation object of the builder. @@ -132,10 +136,10 @@ func (cmc *CumulativeMetricsCreate) createSpec() (*CumulativeMetrics, *sqlgraph. } if nodes := cmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, + Columns: []string{cumulativemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (cmc *CumulativeMetricsCreate) createSpec() (*CumulativeMetrics, *sqlgraph. for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_cumulative_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/cumulativemetrics_query.go b/ent/gen/ent/cumulativemetrics_query.go index fd491ec..6d3dc94 100644 --- a/ent/gen/ent/cumulativemetrics_query.go +++ b/ent/gen/ent/cumulativemetrics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // CumulativeMetricsQuery is the builder for querying CumulativeMetrics entities. type CumulativeMetricsQuery struct { config - ctx *QueryContext - order []cumulativemetrics.OrderOption - inters []Interceptor - predicates []predicate.CumulativeMetrics - withMetrics *MetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*CumulativeMetrics) error - withNamedMetrics map[string]*MetricsQuery + ctx *QueryContext + order []cumulativemetrics.OrderOption + inters []Interceptor + predicates []predicate.CumulativeMetrics + withMetrics *MetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*CumulativeMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (cmq *CumulativeMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(cumulativemetrics.Table, cumulativemetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, cumulativemetrics.MetricsTable, cumulativemetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, cumulativemetrics.MetricsTable, cumulativemetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(cmq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (cmq *CumulativeMetricsQuery) prepareQuery(ctx context.Context) error { func (cmq *CumulativeMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*CumulativeMetrics, error) { var ( nodes = []*CumulativeMetrics{} + withFKs = cmq.withFKs _spec = cmq.querySpec() loadedTypes = [1]bool{ cmq.withMetrics != nil, } ) + if cmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, cumulativemetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*CumulativeMetrics).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (cmq *CumulativeMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHoo return nodes, nil } if query := cmq.withMetrics; query != nil { - if err := cmq.loadMetrics(ctx, query, nodes, - func(n *CumulativeMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *CumulativeMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { - return nil, err - } - } - for name, query := range cmq.withNamedMetrics { - if err := cmq.loadMetrics(ctx, query, nodes, - func(n *CumulativeMetrics) { n.appendNamedMetrics(name) }, - func(n *CumulativeMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { + if err := cmq.loadMetrics(ctx, query, nodes, nil, + func(n *CumulativeMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (cmq *CumulativeMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHoo } func (cmq *CumulativeMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*CumulativeMetrics, init func(*CumulativeMetrics), assign func(*CumulativeMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*CumulativeMetrics) - nids := make(map[int]map[*CumulativeMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*CumulativeMetrics) + for i := range nodes { + if nodes[i].metrics_cumulative_metrics == nil { + continue } + fk := *nodes[i].metrics_cumulative_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(cumulativemetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(cumulativemetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(cumulativemetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(cumulativemetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*CumulativeMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_cumulative_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (cmq *CumulativeMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (cmq *CumulativeMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *CumulativeMetricsQuery { - query := (&MetricsClient{config: cmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if cmq.withNamedMetrics == nil { - cmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - cmq.withNamedMetrics[name] = query - return cmq -} - // CumulativeMetricsGroupBy is the group-by builder for CumulativeMetrics entities. type CumulativeMetricsGroupBy struct { selector diff --git a/ent/gen/ent/cumulativemetrics_update.go b/ent/gen/ent/cumulativemetrics_update.go index fb92ad9..2be4acc 100644 --- a/ent/gen/ent/cumulativemetrics_update.go +++ b/ent/gen/ent/cumulativemetrics_update.go @@ -82,19 +82,23 @@ func (cmu *CumulativeMetricsUpdate) ClearNumBuilds() *CumulativeMetricsUpdate { return cmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (cmu *CumulativeMetricsUpdate) AddMetricIDs(ids ...int) *CumulativeMetricsUpdate { - cmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (cmu *CumulativeMetricsUpdate) SetMetricsID(id int) *CumulativeMetricsUpdate { + cmu.mutation.SetMetricsID(id) return cmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (cmu *CumulativeMetricsUpdate) AddMetrics(m ...*Metrics) *CumulativeMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (cmu *CumulativeMetricsUpdate) SetNillableMetricsID(id *int) *CumulativeMetricsUpdate { + if id != nil { + cmu = cmu.SetMetricsID(*id) } - return cmu.AddMetricIDs(ids...) + return cmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (cmu *CumulativeMetricsUpdate) SetMetrics(m *Metrics) *CumulativeMetricsUpdate { + return cmu.SetMetricsID(m.ID) } // Mutation returns the CumulativeMetricsMutation object of the builder. @@ -102,27 +106,12 @@ func (cmu *CumulativeMetricsUpdate) Mutation() *CumulativeMetricsMutation { return cmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (cmu *CumulativeMetricsUpdate) ClearMetrics() *CumulativeMetricsUpdate { cmu.mutation.ClearMetrics() return cmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (cmu *CumulativeMetricsUpdate) RemoveMetricIDs(ids ...int) *CumulativeMetricsUpdate { - cmu.mutation.RemoveMetricIDs(ids...) - return cmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (cmu *CumulativeMetricsUpdate) RemoveMetrics(m ...*Metrics) *CumulativeMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return cmu.RemoveMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (cmu *CumulativeMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, cmu.sqlSave, cmu.mutation, cmu.hooks) @@ -179,39 +168,23 @@ func (cmu *CumulativeMetricsUpdate) sqlSave(ctx context.Context) (n int, err err } if cmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := cmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !cmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, + Columns: []string{cumulativemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := cmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, + Columns: []string{cumulativemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -296,19 +269,23 @@ func (cmuo *CumulativeMetricsUpdateOne) ClearNumBuilds() *CumulativeMetricsUpdat return cmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (cmuo *CumulativeMetricsUpdateOne) AddMetricIDs(ids ...int) *CumulativeMetricsUpdateOne { - cmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (cmuo *CumulativeMetricsUpdateOne) SetMetricsID(id int) *CumulativeMetricsUpdateOne { + cmuo.mutation.SetMetricsID(id) return cmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (cmuo *CumulativeMetricsUpdateOne) AddMetrics(m ...*Metrics) *CumulativeMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (cmuo *CumulativeMetricsUpdateOne) SetNillableMetricsID(id *int) *CumulativeMetricsUpdateOne { + if id != nil { + cmuo = cmuo.SetMetricsID(*id) } - return cmuo.AddMetricIDs(ids...) + return cmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (cmuo *CumulativeMetricsUpdateOne) SetMetrics(m *Metrics) *CumulativeMetricsUpdateOne { + return cmuo.SetMetricsID(m.ID) } // Mutation returns the CumulativeMetricsMutation object of the builder. @@ -316,27 +293,12 @@ func (cmuo *CumulativeMetricsUpdateOne) Mutation() *CumulativeMetricsMutation { return cmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (cmuo *CumulativeMetricsUpdateOne) ClearMetrics() *CumulativeMetricsUpdateOne { cmuo.mutation.ClearMetrics() return cmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (cmuo *CumulativeMetricsUpdateOne) RemoveMetricIDs(ids ...int) *CumulativeMetricsUpdateOne { - cmuo.mutation.RemoveMetricIDs(ids...) - return cmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (cmuo *CumulativeMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *CumulativeMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return cmuo.RemoveMetricIDs(ids...) -} - // Where appends a list predicates to the CumulativeMetricsUpdate builder. func (cmuo *CumulativeMetricsUpdateOne) Where(ps ...predicate.CumulativeMetrics) *CumulativeMetricsUpdateOne { cmuo.mutation.Where(ps...) @@ -423,39 +385,23 @@ func (cmuo *CumulativeMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Cum } if cmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := cmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !cmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, + Columns: []string{cumulativemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := cmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: cumulativemetrics.MetricsTable, - Columns: cumulativemetrics.MetricsPrimaryKey, + Columns: []string{cumulativemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/dynamicexecutionmetrics.go b/ent/gen/ent/dynamicexecutionmetrics.go index f6d5af9..28e2101 100644 --- a/ent/gen/ent/dynamicexecutionmetrics.go +++ b/ent/gen/ent/dynamicexecutionmetrics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/dynamicexecutionmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) // DynamicExecutionMetrics is the model entity for the DynamicExecutionMetrics schema. @@ -18,14 +19,15 @@ type DynamicExecutionMetrics struct { ID int `json:"id,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the DynamicExecutionMetricsQuery when eager-loading is set. - Edges DynamicExecutionMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges DynamicExecutionMetricsEdges `json:"edges"` + metrics_dynamic_execution_metrics *int + selectValues sql.SelectValues } // DynamicExecutionMetricsEdges holds the relations/edges for other nodes in the graph. type DynamicExecutionMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // RaceStatistics holds the value of the race_statistics edge. RaceStatistics []*RaceStatistics `json:"race_statistics,omitempty"` // loadedTypes holds the information for reporting if a @@ -34,15 +36,16 @@ type DynamicExecutionMetricsEdges struct { // totalCount holds the count of the edges above. totalCount [2]map[string]int - namedMetrics map[string][]*Metrics namedRaceStatistics map[string][]*RaceStatistics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e DynamicExecutionMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e DynamicExecutionMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -63,6 +66,8 @@ func (*DynamicExecutionMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case dynamicexecutionmetrics.FieldID: values[i] = new(sql.NullInt64) + case dynamicexecutionmetrics.ForeignKeys[0]: // metrics_dynamic_execution_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -84,6 +89,13 @@ func (dem *DynamicExecutionMetrics) assignValues(columns []string, values []any) return fmt.Errorf("unexpected type %T for field id", value) } dem.ID = int(value.Int64) + case dynamicexecutionmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_dynamic_execution_metrics", value) + } else if value.Valid { + dem.metrics_dynamic_execution_metrics = new(int) + *dem.metrics_dynamic_execution_metrics = int(value.Int64) + } default: dem.selectValues.Set(columns[i], values[i]) } @@ -134,30 +146,6 @@ func (dem *DynamicExecutionMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (dem *DynamicExecutionMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if dem.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := dem.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (dem *DynamicExecutionMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if dem.Edges.namedMetrics == nil { - dem.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - dem.Edges.namedMetrics[name] = []*Metrics{} - } else { - dem.Edges.namedMetrics[name] = append(dem.Edges.namedMetrics[name], edges...) - } -} - // NamedRaceStatistics returns the RaceStatistics named value or an error if the edge was not // loaded in eager-loading with this name. func (dem *DynamicExecutionMetrics) NamedRaceStatistics(name string) ([]*RaceStatistics, error) { diff --git a/ent/gen/ent/dynamicexecutionmetrics/dynamicexecutionmetrics.go b/ent/gen/ent/dynamicexecutionmetrics/dynamicexecutionmetrics.go index b6504d3..bd2fb25 100644 --- a/ent/gen/ent/dynamicexecutionmetrics/dynamicexecutionmetrics.go +++ b/ent/gen/ent/dynamicexecutionmetrics/dynamicexecutionmetrics.go @@ -18,16 +18,20 @@ const ( EdgeRaceStatistics = "race_statistics" // Table holds the table name of the dynamicexecutionmetrics in the database. Table = "dynamic_execution_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_dynamic_execution_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "dynamic_execution_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" - // RaceStatisticsTable is the table that holds the race_statistics relation/edge. The primary key declared below. - RaceStatisticsTable = "dynamic_execution_metrics_race_statistics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_dynamic_execution_metrics" + // RaceStatisticsTable is the table that holds the race_statistics relation/edge. + RaceStatisticsTable = "race_statistics" // RaceStatisticsInverseTable is the table name for the RaceStatistics entity. // It exists in this package in order to avoid circular dependency with the "racestatistics" package. RaceStatisticsInverseTable = "race_statistics" + // RaceStatisticsColumn is the table column denoting the race_statistics relation/edge. + RaceStatisticsColumn = "dynamic_execution_metrics_race_statistics" ) // Columns holds all SQL columns for dynamicexecutionmetrics fields. @@ -35,14 +39,11 @@ var Columns = []string{ FieldID, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "dynamic_execution_metrics_id"} - // RaceStatisticsPrimaryKey and RaceStatisticsColumn2 are the table columns denoting the - // primary key for the race_statistics relation (M2M). - RaceStatisticsPrimaryKey = []string{"dynamic_execution_metrics_id", "race_statistics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "dynamic_execution_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_dynamic_execution_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -51,6 +52,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -62,17 +68,10 @@ func ByID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldID, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } @@ -93,13 +92,13 @@ func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newRaceStatisticsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(RaceStatisticsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, RaceStatisticsTable, RaceStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, RaceStatisticsTable, RaceStatisticsColumn), ) } diff --git a/ent/gen/ent/dynamicexecutionmetrics/where.go b/ent/gen/ent/dynamicexecutionmetrics/where.go index 0b34f5b..657b9f1 100644 --- a/ent/gen/ent/dynamicexecutionmetrics/where.go +++ b/ent/gen/ent/dynamicexecutionmetrics/where.go @@ -58,7 +58,7 @@ func HasMetrics() predicate.DynamicExecutionMetrics { return predicate.DynamicExecutionMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -81,7 +81,7 @@ func HasRaceStatistics() predicate.DynamicExecutionMetrics { return predicate.DynamicExecutionMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, RaceStatisticsTable, RaceStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, RaceStatisticsTable, RaceStatisticsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/dynamicexecutionmetrics_create.go b/ent/gen/ent/dynamicexecutionmetrics_create.go index d55f4a4..61f6665 100644 --- a/ent/gen/ent/dynamicexecutionmetrics_create.go +++ b/ent/gen/ent/dynamicexecutionmetrics_create.go @@ -20,19 +20,23 @@ type DynamicExecutionMetricsCreate struct { hooks []Hook } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (demc *DynamicExecutionMetricsCreate) AddMetricIDs(ids ...int) *DynamicExecutionMetricsCreate { - demc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (demc *DynamicExecutionMetricsCreate) SetMetricsID(id int) *DynamicExecutionMetricsCreate { + demc.mutation.SetMetricsID(id) return demc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (demc *DynamicExecutionMetricsCreate) AddMetrics(m ...*Metrics) *DynamicExecutionMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (demc *DynamicExecutionMetricsCreate) SetNillableMetricsID(id *int) *DynamicExecutionMetricsCreate { + if id != nil { + demc = demc.SetMetricsID(*id) } - return demc.AddMetricIDs(ids...) + return demc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (demc *DynamicExecutionMetricsCreate) SetMetrics(m *Metrics) *DynamicExecutionMetricsCreate { + return demc.SetMetricsID(m.ID) } // AddRaceStatisticIDs adds the "race_statistics" edge to the RaceStatistics entity by IDs. @@ -112,10 +116,10 @@ func (demc *DynamicExecutionMetricsCreate) createSpec() (*DynamicExecutionMetric ) if nodes := demc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -124,14 +128,15 @@ func (demc *DynamicExecutionMetricsCreate) createSpec() (*DynamicExecutionMetric for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_dynamic_execution_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := demc.mutation.RaceStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/dynamicexecutionmetrics_query.go b/ent/gen/ent/dynamicexecutionmetrics_query.go index b62ed06..04baa57 100644 --- a/ent/gen/ent/dynamicexecutionmetrics_query.go +++ b/ent/gen/ent/dynamicexecutionmetrics_query.go @@ -26,9 +26,9 @@ type DynamicExecutionMetricsQuery struct { predicates []predicate.DynamicExecutionMetrics withMetrics *MetricsQuery withRaceStatistics *RaceStatisticsQuery + withFKs bool modifiers []func(*sql.Selector) loadTotal []func(context.Context, []*DynamicExecutionMetrics) error - withNamedMetrics map[string]*MetricsQuery withNamedRaceStatistics map[string]*RaceStatisticsQuery // intermediate query (i.e. traversal path). sql *sql.Selector @@ -80,7 +80,7 @@ func (demq *DynamicExecutionMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, dynamicexecutionmetrics.MetricsTable, dynamicexecutionmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, dynamicexecutionmetrics.MetricsTable, dynamicexecutionmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(demq.driver.Dialect(), step) return fromU, nil @@ -102,7 +102,7 @@ func (demq *DynamicExecutionMetricsQuery) QueryRaceStatistics() *RaceStatisticsQ step := sqlgraph.NewStep( sqlgraph.From(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID, selector), sqlgraph.To(racestatistics.Table, racestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, dynamicexecutionmetrics.RaceStatisticsTable, dynamicexecutionmetrics.RaceStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, dynamicexecutionmetrics.RaceStatisticsTable, dynamicexecutionmetrics.RaceStatisticsColumn), ) fromU = sqlgraph.SetNeighbors(demq.driver.Dialect(), step) return fromU, nil @@ -387,12 +387,19 @@ func (demq *DynamicExecutionMetricsQuery) prepareQuery(ctx context.Context) erro func (demq *DynamicExecutionMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*DynamicExecutionMetrics, error) { var ( nodes = []*DynamicExecutionMetrics{} + withFKs = demq.withFKs _spec = demq.querySpec() loadedTypes = [2]bool{ demq.withMetrics != nil, demq.withRaceStatistics != nil, } ) + if demq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, dynamicexecutionmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*DynamicExecutionMetrics).scanValues(nil, columns) } @@ -415,9 +422,8 @@ func (demq *DynamicExecutionMetricsQuery) sqlAll(ctx context.Context, hooks ...q return nodes, nil } if query := demq.withMetrics; query != nil { - if err := demq.loadMetrics(ctx, query, nodes, - func(n *DynamicExecutionMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *DynamicExecutionMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := demq.loadMetrics(ctx, query, nodes, nil, + func(n *DynamicExecutionMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -430,13 +436,6 @@ func (demq *DynamicExecutionMetricsQuery) sqlAll(ctx context.Context, hooks ...q return nil, err } } - for name, query := range demq.withNamedMetrics { - if err := demq.loadMetrics(ctx, query, nodes, - func(n *DynamicExecutionMetrics) { n.appendNamedMetrics(name) }, - func(n *DynamicExecutionMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } for name, query := range demq.withNamedRaceStatistics { if err := demq.loadRaceStatistics(ctx, query, nodes, func(n *DynamicExecutionMetrics) { n.appendNamedRaceStatistics(name) }, @@ -453,124 +452,65 @@ func (demq *DynamicExecutionMetricsQuery) sqlAll(ctx context.Context, hooks ...q } func (demq *DynamicExecutionMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*DynamicExecutionMetrics, init func(*DynamicExecutionMetrics), assign func(*DynamicExecutionMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*DynamicExecutionMetrics) - nids := make(map[int]map[*DynamicExecutionMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*DynamicExecutionMetrics) + for i := range nodes { + if nodes[i].metrics_dynamic_execution_metrics == nil { + continue + } + fk := *nodes[i].metrics_dynamic_execution_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(dynamicexecutionmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(dynamicexecutionmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(dynamicexecutionmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(dynamicexecutionmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*DynamicExecutionMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_dynamic_execution_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (demq *DynamicExecutionMetricsQuery) loadRaceStatistics(ctx context.Context, query *RaceStatisticsQuery, nodes []*DynamicExecutionMetrics, init func(*DynamicExecutionMetrics), assign func(*DynamicExecutionMetrics, *RaceStatistics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*DynamicExecutionMetrics) - nids := make(map[int]map[*DynamicExecutionMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*DynamicExecutionMetrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(dynamicexecutionmetrics.RaceStatisticsTable) - s.Join(joinT).On(s.C(racestatistics.FieldID), joinT.C(dynamicexecutionmetrics.RaceStatisticsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(dynamicexecutionmetrics.RaceStatisticsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(dynamicexecutionmetrics.RaceStatisticsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*DynamicExecutionMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*RaceStatistics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.RaceStatistics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(dynamicexecutionmetrics.RaceStatisticsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "race_statistics" node returned %v`, n.ID) + fk := n.dynamic_execution_metrics_race_statistics + if fk == nil { + return fmt.Errorf(`foreign-key "dynamic_execution_metrics_race_statistics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "dynamic_execution_metrics_race_statistics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -659,20 +599,6 @@ func (demq *DynamicExecutionMetricsQuery) sqlQuery(ctx context.Context) *sql.Sel return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (demq *DynamicExecutionMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *DynamicExecutionMetricsQuery { - query := (&MetricsClient{config: demq.config}).Query() - for _, opt := range opts { - opt(query) - } - if demq.withNamedMetrics == nil { - demq.withNamedMetrics = make(map[string]*MetricsQuery) - } - demq.withNamedMetrics[name] = query - return demq -} - // WithNamedRaceStatistics tells the query-builder to eager-load the nodes that are connected to the "race_statistics" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (demq *DynamicExecutionMetricsQuery) WithNamedRaceStatistics(name string, opts ...func(*RaceStatisticsQuery)) *DynamicExecutionMetricsQuery { diff --git a/ent/gen/ent/dynamicexecutionmetrics_update.go b/ent/gen/ent/dynamicexecutionmetrics_update.go index 37034dd..a48544a 100644 --- a/ent/gen/ent/dynamicexecutionmetrics_update.go +++ b/ent/gen/ent/dynamicexecutionmetrics_update.go @@ -29,19 +29,23 @@ func (demu *DynamicExecutionMetricsUpdate) Where(ps ...predicate.DynamicExecutio return demu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (demu *DynamicExecutionMetricsUpdate) AddMetricIDs(ids ...int) *DynamicExecutionMetricsUpdate { - demu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (demu *DynamicExecutionMetricsUpdate) SetMetricsID(id int) *DynamicExecutionMetricsUpdate { + demu.mutation.SetMetricsID(id) return demu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (demu *DynamicExecutionMetricsUpdate) AddMetrics(m ...*Metrics) *DynamicExecutionMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (demu *DynamicExecutionMetricsUpdate) SetNillableMetricsID(id *int) *DynamicExecutionMetricsUpdate { + if id != nil { + demu = demu.SetMetricsID(*id) } - return demu.AddMetricIDs(ids...) + return demu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (demu *DynamicExecutionMetricsUpdate) SetMetrics(m *Metrics) *DynamicExecutionMetricsUpdate { + return demu.SetMetricsID(m.ID) } // AddRaceStatisticIDs adds the "race_statistics" edge to the RaceStatistics entity by IDs. @@ -64,27 +68,12 @@ func (demu *DynamicExecutionMetricsUpdate) Mutation() *DynamicExecutionMetricsMu return demu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (demu *DynamicExecutionMetricsUpdate) ClearMetrics() *DynamicExecutionMetricsUpdate { demu.mutation.ClearMetrics() return demu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (demu *DynamicExecutionMetricsUpdate) RemoveMetricIDs(ids ...int) *DynamicExecutionMetricsUpdate { - demu.mutation.RemoveMetricIDs(ids...) - return demu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (demu *DynamicExecutionMetricsUpdate) RemoveMetrics(m ...*Metrics) *DynamicExecutionMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return demu.RemoveMetricIDs(ids...) -} - // ClearRaceStatistics clears all "race_statistics" edges to the RaceStatistics entity. func (demu *DynamicExecutionMetricsUpdate) ClearRaceStatistics() *DynamicExecutionMetricsUpdate { demu.mutation.ClearRaceStatistics() @@ -144,39 +133,23 @@ func (demu *DynamicExecutionMetricsUpdate) sqlSave(ctx context.Context) (n int, } if demu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := demu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !demu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := demu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -189,10 +162,10 @@ func (demu *DynamicExecutionMetricsUpdate) sqlSave(ctx context.Context) (n int, } if demu.mutation.RaceStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), @@ -202,10 +175,10 @@ func (demu *DynamicExecutionMetricsUpdate) sqlSave(ctx context.Context) (n int, } if nodes := demu.mutation.RemovedRaceStatisticsIDs(); len(nodes) > 0 && !demu.mutation.RaceStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), @@ -218,10 +191,10 @@ func (demu *DynamicExecutionMetricsUpdate) sqlSave(ctx context.Context) (n int, } if nodes := demu.mutation.RaceStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), @@ -252,19 +225,23 @@ type DynamicExecutionMetricsUpdateOne struct { mutation *DynamicExecutionMetricsMutation } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (demuo *DynamicExecutionMetricsUpdateOne) AddMetricIDs(ids ...int) *DynamicExecutionMetricsUpdateOne { - demuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (demuo *DynamicExecutionMetricsUpdateOne) SetMetricsID(id int) *DynamicExecutionMetricsUpdateOne { + demuo.mutation.SetMetricsID(id) return demuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (demuo *DynamicExecutionMetricsUpdateOne) AddMetrics(m ...*Metrics) *DynamicExecutionMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (demuo *DynamicExecutionMetricsUpdateOne) SetNillableMetricsID(id *int) *DynamicExecutionMetricsUpdateOne { + if id != nil { + demuo = demuo.SetMetricsID(*id) } - return demuo.AddMetricIDs(ids...) + return demuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (demuo *DynamicExecutionMetricsUpdateOne) SetMetrics(m *Metrics) *DynamicExecutionMetricsUpdateOne { + return demuo.SetMetricsID(m.ID) } // AddRaceStatisticIDs adds the "race_statistics" edge to the RaceStatistics entity by IDs. @@ -287,27 +264,12 @@ func (demuo *DynamicExecutionMetricsUpdateOne) Mutation() *DynamicExecutionMetri return demuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (demuo *DynamicExecutionMetricsUpdateOne) ClearMetrics() *DynamicExecutionMetricsUpdateOne { demuo.mutation.ClearMetrics() return demuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (demuo *DynamicExecutionMetricsUpdateOne) RemoveMetricIDs(ids ...int) *DynamicExecutionMetricsUpdateOne { - demuo.mutation.RemoveMetricIDs(ids...) - return demuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (demuo *DynamicExecutionMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *DynamicExecutionMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return demuo.RemoveMetricIDs(ids...) -} - // ClearRaceStatistics clears all "race_statistics" edges to the RaceStatistics entity. func (demuo *DynamicExecutionMetricsUpdateOne) ClearRaceStatistics() *DynamicExecutionMetricsUpdateOne { demuo.mutation.ClearRaceStatistics() @@ -397,39 +359,23 @@ func (demuo *DynamicExecutionMetricsUpdateOne) sqlSave(ctx context.Context) (_no } if demuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := demuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !demuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := demuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: dynamicexecutionmetrics.MetricsTable, - Columns: dynamicexecutionmetrics.MetricsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -442,10 +388,10 @@ func (demuo *DynamicExecutionMetricsUpdateOne) sqlSave(ctx context.Context) (_no } if demuo.mutation.RaceStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), @@ -455,10 +401,10 @@ func (demuo *DynamicExecutionMetricsUpdateOne) sqlSave(ctx context.Context) (_no } if nodes := demuo.mutation.RemovedRaceStatisticsIDs(); len(nodes) > 0 && !demuo.mutation.RaceStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), @@ -471,10 +417,10 @@ func (demuo *DynamicExecutionMetricsUpdateOne) sqlSave(ctx context.Context) (_no } if nodes := demuo.mutation.RaceStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: dynamicexecutionmetrics.RaceStatisticsTable, - Columns: dynamicexecutionmetrics.RaceStatisticsPrimaryKey, + Columns: []string{dynamicexecutionmetrics.RaceStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(racestatistics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/evaluationstat.go b/ent/gen/ent/evaluationstat.go index 1c636c1..7cb8cd2 100644 --- a/ent/gen/ent/evaluationstat.go +++ b/ent/gen/ent/evaluationstat.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/buildgraphmetrics" "github.com/buildbarn/bb-portal/ent/gen/ent/evaluationstat" ) @@ -22,32 +23,29 @@ type EvaluationStat struct { Count int64 `json:"count,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the EvaluationStatQuery when eager-loading is set. - Edges EvaluationStatEdges `json:"edges"` - build_graph_metrics_dirtied_values *int - build_graph_metrics_changed_values *int - build_graph_metrics_built_values *int - build_graph_metrics_cleaned_values *int - selectValues sql.SelectValues + Edges EvaluationStatEdges `json:"edges"` + build_graph_metrics_evaluated_values *int + selectValues sql.SelectValues } // EvaluationStatEdges holds the relations/edges for other nodes in the graph. type EvaluationStatEdges struct { // BuildGraphMetrics holds the value of the build_graph_metrics edge. - BuildGraphMetrics []*BuildGraphMetrics `json:"build_graph_metrics,omitempty"` + BuildGraphMetrics *BuildGraphMetrics `json:"build_graph_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedBuildGraphMetrics map[string][]*BuildGraphMetrics } // BuildGraphMetricsOrErr returns the BuildGraphMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e EvaluationStatEdges) BuildGraphMetricsOrErr() ([]*BuildGraphMetrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e EvaluationStatEdges) BuildGraphMetricsOrErr() (*BuildGraphMetrics, error) { + if e.BuildGraphMetrics != nil { return e.BuildGraphMetrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: buildgraphmetrics.Label} } return nil, &NotLoadedError{edge: "build_graph_metrics"} } @@ -61,13 +59,7 @@ func (*EvaluationStat) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case evaluationstat.FieldSkyfunctionName: values[i] = new(sql.NullString) - case evaluationstat.ForeignKeys[0]: // build_graph_metrics_dirtied_values - values[i] = new(sql.NullInt64) - case evaluationstat.ForeignKeys[1]: // build_graph_metrics_changed_values - values[i] = new(sql.NullInt64) - case evaluationstat.ForeignKeys[2]: // build_graph_metrics_built_values - values[i] = new(sql.NullInt64) - case evaluationstat.ForeignKeys[3]: // build_graph_metrics_cleaned_values + case evaluationstat.ForeignKeys[0]: // build_graph_metrics_evaluated_values values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -104,31 +96,10 @@ func (es *EvaluationStat) assignValues(columns []string, values []any) error { } case evaluationstat.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_dirtied_values", value) - } else if value.Valid { - es.build_graph_metrics_dirtied_values = new(int) - *es.build_graph_metrics_dirtied_values = int(value.Int64) - } - case evaluationstat.ForeignKeys[1]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_changed_values", value) - } else if value.Valid { - es.build_graph_metrics_changed_values = new(int) - *es.build_graph_metrics_changed_values = int(value.Int64) - } - case evaluationstat.ForeignKeys[2]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_built_values", value) - } else if value.Valid { - es.build_graph_metrics_built_values = new(int) - *es.build_graph_metrics_built_values = int(value.Int64) - } - case evaluationstat.ForeignKeys[3]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_cleaned_values", value) + return fmt.Errorf("unexpected type %T for edge-field build_graph_metrics_evaluated_values", value) } else if value.Valid { - es.build_graph_metrics_cleaned_values = new(int) - *es.build_graph_metrics_cleaned_values = int(value.Int64) + es.build_graph_metrics_evaluated_values = new(int) + *es.build_graph_metrics_evaluated_values = int(value.Int64) } default: es.selectValues.Set(columns[i], values[i]) @@ -180,29 +151,5 @@ func (es *EvaluationStat) String() string { return builder.String() } -// NamedBuildGraphMetrics returns the BuildGraphMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (es *EvaluationStat) NamedBuildGraphMetrics(name string) ([]*BuildGraphMetrics, error) { - if es.Edges.namedBuildGraphMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := es.Edges.namedBuildGraphMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (es *EvaluationStat) appendNamedBuildGraphMetrics(name string, edges ...*BuildGraphMetrics) { - if es.Edges.namedBuildGraphMetrics == nil { - es.Edges.namedBuildGraphMetrics = make(map[string][]*BuildGraphMetrics) - } - if len(edges) == 0 { - es.Edges.namedBuildGraphMetrics[name] = []*BuildGraphMetrics{} - } else { - es.Edges.namedBuildGraphMetrics[name] = append(es.Edges.namedBuildGraphMetrics[name], edges...) - } -} - // EvaluationStats is a parsable slice of EvaluationStat. type EvaluationStats []*EvaluationStat diff --git a/ent/gen/ent/evaluationstat/evaluationstat.go b/ent/gen/ent/evaluationstat/evaluationstat.go index 1b6fce8..ef4091d 100644 --- a/ent/gen/ent/evaluationstat/evaluationstat.go +++ b/ent/gen/ent/evaluationstat/evaluationstat.go @@ -20,11 +20,13 @@ const ( EdgeBuildGraphMetrics = "build_graph_metrics" // Table holds the table name of the evaluationstat in the database. Table = "evaluation_stats" - // BuildGraphMetricsTable is the table that holds the build_graph_metrics relation/edge. The primary key declared below. - BuildGraphMetricsTable = "build_graph_metrics_evaluated_values" + // BuildGraphMetricsTable is the table that holds the build_graph_metrics relation/edge. + BuildGraphMetricsTable = "evaluation_stats" // BuildGraphMetricsInverseTable is the table name for the BuildGraphMetrics entity. // It exists in this package in order to avoid circular dependency with the "buildgraphmetrics" package. BuildGraphMetricsInverseTable = "build_graph_metrics" + // BuildGraphMetricsColumn is the table column denoting the build_graph_metrics relation/edge. + BuildGraphMetricsColumn = "build_graph_metrics_evaluated_values" ) // Columns holds all SQL columns for evaluationstat fields. @@ -37,18 +39,9 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "evaluation_stats" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "build_graph_metrics_dirtied_values", - "build_graph_metrics_changed_values", - "build_graph_metrics_built_values", - "build_graph_metrics_cleaned_values", + "build_graph_metrics_evaluated_values", } -var ( - // BuildGraphMetricsPrimaryKey and BuildGraphMetricsColumn2 are the table columns denoting the - // primary key for the build_graph_metrics relation (M2M). - BuildGraphMetricsPrimaryKey = []string{"build_graph_metrics_id", "evaluation_stat_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -82,23 +75,16 @@ func ByCount(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldCount, opts...).ToFunc() } -// ByBuildGraphMetricsCount orders the results by build_graph_metrics count. -func ByBuildGraphMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newBuildGraphMetricsStep(), opts...) - } -} - -// ByBuildGraphMetrics orders the results by build_graph_metrics terms. -func ByBuildGraphMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByBuildGraphMetricsField orders the results by build_graph_metrics field. +func ByBuildGraphMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newBuildGraphMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newBuildGraphMetricsStep(), sql.OrderByField(field, opts...)) } } func newBuildGraphMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(BuildGraphMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BuildGraphMetricsTable, BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, BuildGraphMetricsTable, BuildGraphMetricsColumn), ) } diff --git a/ent/gen/ent/evaluationstat/where.go b/ent/gen/ent/evaluationstat/where.go index 8f2e0be..2e59bc3 100644 --- a/ent/gen/ent/evaluationstat/where.go +++ b/ent/gen/ent/evaluationstat/where.go @@ -193,7 +193,7 @@ func HasBuildGraphMetrics() predicate.EvaluationStat { return predicate.EvaluationStat(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BuildGraphMetricsTable, BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, BuildGraphMetricsTable, BuildGraphMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/evaluationstat_create.go b/ent/gen/ent/evaluationstat_create.go index 35bbb12..0a499e7 100644 --- a/ent/gen/ent/evaluationstat_create.go +++ b/ent/gen/ent/evaluationstat_create.go @@ -47,19 +47,23 @@ func (esc *EvaluationStatCreate) SetNillableCount(i *int64) *EvaluationStatCreat return esc } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (esc *EvaluationStatCreate) AddBuildGraphMetricIDs(ids ...int) *EvaluationStatCreate { - esc.mutation.AddBuildGraphMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (esc *EvaluationStatCreate) SetBuildGraphMetricsID(id int) *EvaluationStatCreate { + esc.mutation.SetBuildGraphMetricsID(id) return esc } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (esc *EvaluationStatCreate) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *EvaluationStatCreate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (esc *EvaluationStatCreate) SetNillableBuildGraphMetricsID(id *int) *EvaluationStatCreate { + if id != nil { + esc = esc.SetBuildGraphMetricsID(*id) } - return esc.AddBuildGraphMetricIDs(ids...) + return esc +} + +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (esc *EvaluationStatCreate) SetBuildGraphMetrics(b *BuildGraphMetrics) *EvaluationStatCreate { + return esc.SetBuildGraphMetricsID(b.ID) } // Mutation returns the EvaluationStatMutation object of the builder. @@ -132,10 +136,10 @@ func (esc *EvaluationStatCreate) createSpec() (*EvaluationStat, *sqlgraph.Create } if nodes := esc.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, + Columns: []string{evaluationstat.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (esc *EvaluationStatCreate) createSpec() (*EvaluationStat, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.build_graph_metrics_evaluated_values = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/evaluationstat_query.go b/ent/gen/ent/evaluationstat_query.go index 2fbfa45..260acbc 100644 --- a/ent/gen/ent/evaluationstat_query.go +++ b/ent/gen/ent/evaluationstat_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,15 +18,14 @@ import ( // EvaluationStatQuery is the builder for querying EvaluationStat entities. type EvaluationStatQuery struct { config - ctx *QueryContext - order []evaluationstat.OrderOption - inters []Interceptor - predicates []predicate.EvaluationStat - withBuildGraphMetrics *BuildGraphMetricsQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*EvaluationStat) error - withNamedBuildGraphMetrics map[string]*BuildGraphMetricsQuery + ctx *QueryContext + order []evaluationstat.OrderOption + inters []Interceptor + predicates []predicate.EvaluationStat + withBuildGraphMetrics *BuildGraphMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*EvaluationStat) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -78,7 +76,7 @@ func (esq *EvaluationStatQuery) QueryBuildGraphMetrics() *BuildGraphMetricsQuery step := sqlgraph.NewStep( sqlgraph.From(evaluationstat.Table, evaluationstat.FieldID, selector), sqlgraph.To(buildgraphmetrics.Table, buildgraphmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, evaluationstat.BuildGraphMetricsTable, evaluationstat.BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, evaluationstat.BuildGraphMetricsTable, evaluationstat.BuildGraphMetricsColumn), ) fromU = sqlgraph.SetNeighbors(esq.driver.Dialect(), step) return fromU, nil @@ -379,6 +377,9 @@ func (esq *EvaluationStatQuery) sqlAll(ctx context.Context, hooks ...queryHook) esq.withBuildGraphMetrics != nil, } ) + if esq.withBuildGraphMetrics != nil { + withFKs = true + } if withFKs { _spec.Node.Columns = append(_spec.Node.Columns, evaluationstat.ForeignKeys...) } @@ -404,18 +405,8 @@ func (esq *EvaluationStatQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := esq.withBuildGraphMetrics; query != nil { - if err := esq.loadBuildGraphMetrics(ctx, query, nodes, - func(n *EvaluationStat) { n.Edges.BuildGraphMetrics = []*BuildGraphMetrics{} }, - func(n *EvaluationStat, e *BuildGraphMetrics) { - n.Edges.BuildGraphMetrics = append(n.Edges.BuildGraphMetrics, e) - }); err != nil { - return nil, err - } - } - for name, query := range esq.withNamedBuildGraphMetrics { - if err := esq.loadBuildGraphMetrics(ctx, query, nodes, - func(n *EvaluationStat) { n.appendNamedBuildGraphMetrics(name) }, - func(n *EvaluationStat, e *BuildGraphMetrics) { n.appendNamedBuildGraphMetrics(name, e) }); err != nil { + if err := esq.loadBuildGraphMetrics(ctx, query, nodes, nil, + func(n *EvaluationStat, e *BuildGraphMetrics) { n.Edges.BuildGraphMetrics = e }); err != nil { return nil, err } } @@ -428,62 +419,33 @@ func (esq *EvaluationStatQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (esq *EvaluationStatQuery) loadBuildGraphMetrics(ctx context.Context, query *BuildGraphMetricsQuery, nodes []*EvaluationStat, init func(*EvaluationStat), assign func(*EvaluationStat, *BuildGraphMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*EvaluationStat) - nids := make(map[int]map[*EvaluationStat]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*EvaluationStat) + for i := range nodes { + if nodes[i].build_graph_metrics_evaluated_values == nil { + continue + } + fk := *nodes[i].build_graph_metrics_evaluated_values + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(evaluationstat.BuildGraphMetricsTable) - s.Join(joinT).On(s.C(buildgraphmetrics.FieldID), joinT.C(evaluationstat.BuildGraphMetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(evaluationstat.BuildGraphMetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(evaluationstat.BuildGraphMetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*EvaluationStat]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*BuildGraphMetrics](ctx, query, qr, query.inters) + query.Where(buildgraphmetrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "build_graph_metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "build_graph_metrics_evaluated_values" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -573,20 +535,6 @@ func (esq *EvaluationStatQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedBuildGraphMetrics tells the query-builder to eager-load the nodes that are connected to the "build_graph_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (esq *EvaluationStatQuery) WithNamedBuildGraphMetrics(name string, opts ...func(*BuildGraphMetricsQuery)) *EvaluationStatQuery { - query := (&BuildGraphMetricsClient{config: esq.config}).Query() - for _, opt := range opts { - opt(query) - } - if esq.withNamedBuildGraphMetrics == nil { - esq.withNamedBuildGraphMetrics = make(map[string]*BuildGraphMetricsQuery) - } - esq.withNamedBuildGraphMetrics[name] = query - return esq -} - // EvaluationStatGroupBy is the group-by builder for EvaluationStat entities. type EvaluationStatGroupBy struct { selector diff --git a/ent/gen/ent/evaluationstat_update.go b/ent/gen/ent/evaluationstat_update.go index 7e68064..0f14d3e 100644 --- a/ent/gen/ent/evaluationstat_update.go +++ b/ent/gen/ent/evaluationstat_update.go @@ -75,19 +75,23 @@ func (esu *EvaluationStatUpdate) ClearCount() *EvaluationStatUpdate { return esu } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (esu *EvaluationStatUpdate) AddBuildGraphMetricIDs(ids ...int) *EvaluationStatUpdate { - esu.mutation.AddBuildGraphMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (esu *EvaluationStatUpdate) SetBuildGraphMetricsID(id int) *EvaluationStatUpdate { + esu.mutation.SetBuildGraphMetricsID(id) return esu } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (esu *EvaluationStatUpdate) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *EvaluationStatUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (esu *EvaluationStatUpdate) SetNillableBuildGraphMetricsID(id *int) *EvaluationStatUpdate { + if id != nil { + esu = esu.SetBuildGraphMetricsID(*id) } - return esu.AddBuildGraphMetricIDs(ids...) + return esu +} + +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (esu *EvaluationStatUpdate) SetBuildGraphMetrics(b *BuildGraphMetrics) *EvaluationStatUpdate { + return esu.SetBuildGraphMetricsID(b.ID) } // Mutation returns the EvaluationStatMutation object of the builder. @@ -95,27 +99,12 @@ func (esu *EvaluationStatUpdate) Mutation() *EvaluationStatMutation { return esu.mutation } -// ClearBuildGraphMetrics clears all "build_graph_metrics" edges to the BuildGraphMetrics entity. +// ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. func (esu *EvaluationStatUpdate) ClearBuildGraphMetrics() *EvaluationStatUpdate { esu.mutation.ClearBuildGraphMetrics() return esu } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to BuildGraphMetrics entities by IDs. -func (esu *EvaluationStatUpdate) RemoveBuildGraphMetricIDs(ids ...int) *EvaluationStatUpdate { - esu.mutation.RemoveBuildGraphMetricIDs(ids...) - return esu -} - -// RemoveBuildGraphMetrics removes "build_graph_metrics" edges to BuildGraphMetrics entities. -func (esu *EvaluationStatUpdate) RemoveBuildGraphMetrics(b ...*BuildGraphMetrics) *EvaluationStatUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return esu.RemoveBuildGraphMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (esu *EvaluationStatUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, esu.sqlSave, esu.mutation, esu.hooks) @@ -169,39 +158,23 @@ func (esu *EvaluationStatUpdate) sqlSave(ctx context.Context) (n int, err error) } if esu.mutation.BuildGraphMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := esu.mutation.RemovedBuildGraphMetricsIDs(); len(nodes) > 0 && !esu.mutation.BuildGraphMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, + Columns: []string{evaluationstat.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := esu.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, + Columns: []string{evaluationstat.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), @@ -279,19 +252,23 @@ func (esuo *EvaluationStatUpdateOne) ClearCount() *EvaluationStatUpdateOne { return esuo } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (esuo *EvaluationStatUpdateOne) AddBuildGraphMetricIDs(ids ...int) *EvaluationStatUpdateOne { - esuo.mutation.AddBuildGraphMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (esuo *EvaluationStatUpdateOne) SetBuildGraphMetricsID(id int) *EvaluationStatUpdateOne { + esuo.mutation.SetBuildGraphMetricsID(id) return esuo } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (esuo *EvaluationStatUpdateOne) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *EvaluationStatUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (esuo *EvaluationStatUpdateOne) SetNillableBuildGraphMetricsID(id *int) *EvaluationStatUpdateOne { + if id != nil { + esuo = esuo.SetBuildGraphMetricsID(*id) } - return esuo.AddBuildGraphMetricIDs(ids...) + return esuo +} + +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (esuo *EvaluationStatUpdateOne) SetBuildGraphMetrics(b *BuildGraphMetrics) *EvaluationStatUpdateOne { + return esuo.SetBuildGraphMetricsID(b.ID) } // Mutation returns the EvaluationStatMutation object of the builder. @@ -299,27 +276,12 @@ func (esuo *EvaluationStatUpdateOne) Mutation() *EvaluationStatMutation { return esuo.mutation } -// ClearBuildGraphMetrics clears all "build_graph_metrics" edges to the BuildGraphMetrics entity. +// ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. func (esuo *EvaluationStatUpdateOne) ClearBuildGraphMetrics() *EvaluationStatUpdateOne { esuo.mutation.ClearBuildGraphMetrics() return esuo } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to BuildGraphMetrics entities by IDs. -func (esuo *EvaluationStatUpdateOne) RemoveBuildGraphMetricIDs(ids ...int) *EvaluationStatUpdateOne { - esuo.mutation.RemoveBuildGraphMetricIDs(ids...) - return esuo -} - -// RemoveBuildGraphMetrics removes "build_graph_metrics" edges to BuildGraphMetrics entities. -func (esuo *EvaluationStatUpdateOne) RemoveBuildGraphMetrics(b ...*BuildGraphMetrics) *EvaluationStatUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return esuo.RemoveBuildGraphMetricIDs(ids...) -} - // Where appends a list predicates to the EvaluationStatUpdate builder. func (esuo *EvaluationStatUpdateOne) Where(ps ...predicate.EvaluationStat) *EvaluationStatUpdateOne { esuo.mutation.Where(ps...) @@ -403,39 +365,23 @@ func (esuo *EvaluationStatUpdateOne) sqlSave(ctx context.Context) (_node *Evalua } if esuo.mutation.BuildGraphMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := esuo.mutation.RemovedBuildGraphMetricsIDs(); len(nodes) > 0 && !esuo.mutation.BuildGraphMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, + Columns: []string{evaluationstat.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := esuo.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: evaluationstat.BuildGraphMetricsTable, - Columns: evaluationstat.BuildGraphMetricsPrimaryKey, + Columns: []string{evaluationstat.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/exectioninfo.go b/ent/gen/ent/exectioninfo.go index 7b67163..83a56f2 100644 --- a/ent/gen/ent/exectioninfo.go +++ b/ent/gen/ent/exectioninfo.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/exectioninfo" + "github.com/buildbarn/bb-portal/ent/gen/ent/testresultbes" "github.com/buildbarn/bb-portal/ent/gen/ent/timingbreakdown" ) @@ -30,14 +31,14 @@ type ExectionInfo struct { // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the ExectionInfoQuery when eager-loading is set. Edges ExectionInfoEdges `json:"edges"` - exection_info_timing_breakdown *int + test_result_bes_execution_info *int selectValues sql.SelectValues } // ExectionInfoEdges holds the relations/edges for other nodes in the graph. type ExectionInfoEdges struct { // TestResult holds the value of the test_result edge. - TestResult []*TestResultBES `json:"test_result,omitempty"` + TestResult *TestResultBES `json:"test_result,omitempty"` // TimingBreakdown holds the value of the timing_breakdown edge. TimingBreakdown *TimingBreakdown `json:"timing_breakdown,omitempty"` // ResourceUsage holds the value of the resource_usage edge. @@ -48,15 +49,16 @@ type ExectionInfoEdges struct { // totalCount holds the count of the edges above. totalCount [3]map[string]int - namedTestResult map[string][]*TestResultBES namedResourceUsage map[string][]*ResourceUsage } // TestResultOrErr returns the TestResult value or an error if the edge -// was not loaded in eager-loading. -func (e ExectionInfoEdges) TestResultOrErr() ([]*TestResultBES, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ExectionInfoEdges) TestResultOrErr() (*TestResultBES, error) { + if e.TestResult != nil { return e.TestResult, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: testresultbes.Label} } return nil, &NotLoadedError{edge: "test_result"} } @@ -92,7 +94,7 @@ func (*ExectionInfo) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case exectioninfo.FieldStrategy, exectioninfo.FieldHostname: values[i] = new(sql.NullString) - case exectioninfo.ForeignKeys[0]: // exection_info_timing_breakdown + case exectioninfo.ForeignKeys[0]: // test_result_bes_execution_info values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -147,10 +149,10 @@ func (ei *ExectionInfo) assignValues(columns []string, values []any) error { } case exectioninfo.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field exection_info_timing_breakdown", value) + return fmt.Errorf("unexpected type %T for edge-field test_result_bes_execution_info", value) } else if value.Valid { - ei.exection_info_timing_breakdown = new(int) - *ei.exection_info_timing_breakdown = int(value.Int64) + ei.test_result_bes_execution_info = new(int) + *ei.test_result_bes_execution_info = int(value.Int64) } default: ei.selectValues.Set(columns[i], values[i]) @@ -221,30 +223,6 @@ func (ei *ExectionInfo) String() string { return builder.String() } -// NamedTestResult returns the TestResult named value or an error if the edge was not -// loaded in eager-loading with this name. -func (ei *ExectionInfo) NamedTestResult(name string) ([]*TestResultBES, error) { - if ei.Edges.namedTestResult == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := ei.Edges.namedTestResult[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (ei *ExectionInfo) appendNamedTestResult(name string, edges ...*TestResultBES) { - if ei.Edges.namedTestResult == nil { - ei.Edges.namedTestResult = make(map[string][]*TestResultBES) - } - if len(edges) == 0 { - ei.Edges.namedTestResult[name] = []*TestResultBES{} - } else { - ei.Edges.namedTestResult[name] = append(ei.Edges.namedTestResult[name], edges...) - } -} - // NamedResourceUsage returns the ResourceUsage named value or an error if the edge was not // loaded in eager-loading with this name. func (ei *ExectionInfo) NamedResourceUsage(name string) ([]*ResourceUsage, error) { diff --git a/ent/gen/ent/exectioninfo/exectioninfo.go b/ent/gen/ent/exectioninfo/exectioninfo.go index 134f2dd..b35bcb2 100644 --- a/ent/gen/ent/exectioninfo/exectioninfo.go +++ b/ent/gen/ent/exectioninfo/exectioninfo.go @@ -31,24 +31,26 @@ const ( // Table holds the table name of the exectioninfo in the database. Table = "exection_infos" // TestResultTable is the table that holds the test_result relation/edge. - TestResultTable = "test_result_be_ss" + TestResultTable = "exection_infos" // TestResultInverseTable is the table name for the TestResultBES entity. // It exists in this package in order to avoid circular dependency with the "testresultbes" package. TestResultInverseTable = "test_result_be_ss" // TestResultColumn is the table column denoting the test_result relation/edge. TestResultColumn = "test_result_bes_execution_info" // TimingBreakdownTable is the table that holds the timing_breakdown relation/edge. - TimingBreakdownTable = "exection_infos" + TimingBreakdownTable = "timing_breakdowns" // TimingBreakdownInverseTable is the table name for the TimingBreakdown entity. // It exists in this package in order to avoid circular dependency with the "timingbreakdown" package. TimingBreakdownInverseTable = "timing_breakdowns" // TimingBreakdownColumn is the table column denoting the timing_breakdown relation/edge. TimingBreakdownColumn = "exection_info_timing_breakdown" - // ResourceUsageTable is the table that holds the resource_usage relation/edge. The primary key declared below. - ResourceUsageTable = "exection_info_resource_usage" + // ResourceUsageTable is the table that holds the resource_usage relation/edge. + ResourceUsageTable = "resource_usages" // ResourceUsageInverseTable is the table name for the ResourceUsage entity. // It exists in this package in order to avoid circular dependency with the "resourceusage" package. ResourceUsageInverseTable = "resource_usages" + // ResourceUsageColumn is the table column denoting the resource_usage relation/edge. + ResourceUsageColumn = "exection_info_resource_usage" ) // Columns holds all SQL columns for exectioninfo fields. @@ -64,15 +66,9 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "exection_infos" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "exection_info_timing_breakdown", + "test_result_bes_execution_info", } -var ( - // ResourceUsagePrimaryKey and ResourceUsageColumn2 are the table columns denoting the - // primary key for the resource_usage relation (M2M). - ResourceUsagePrimaryKey = []string{"exection_info_id", "resource_usage_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -121,17 +117,10 @@ func ByHostname(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldHostname, opts...).ToFunc() } -// ByTestResultCount orders the results by test_result count. -func ByTestResultCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTestResultStep(), opts...) - } -} - -// ByTestResult orders the results by test_result terms. -func ByTestResult(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTestResultField orders the results by test_result field. +func ByTestResultField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTestResultStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTestResultStep(), sql.OrderByField(field, opts...)) } } @@ -159,20 +148,20 @@ func newTestResultStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestResultInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TestResultTable, TestResultColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TestResultTable, TestResultColumn), ) } func newTimingBreakdownStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TimingBreakdownInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, TimingBreakdownTable, TimingBreakdownColumn), + sqlgraph.Edge(sqlgraph.O2O, false, TimingBreakdownTable, TimingBreakdownColumn), ) } func newResourceUsageStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ResourceUsageInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ResourceUsageTable, ResourceUsagePrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ResourceUsageTable, ResourceUsageColumn), ) } diff --git a/ent/gen/ent/exectioninfo/where.go b/ent/gen/ent/exectioninfo/where.go index 178295b..63653a0 100644 --- a/ent/gen/ent/exectioninfo/where.go +++ b/ent/gen/ent/exectioninfo/where.go @@ -353,7 +353,7 @@ func HasTestResult() predicate.ExectionInfo { return predicate.ExectionInfo(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TestResultTable, TestResultColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TestResultTable, TestResultColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -376,7 +376,7 @@ func HasTimingBreakdown() predicate.ExectionInfo { return predicate.ExectionInfo(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, TimingBreakdownTable, TimingBreakdownColumn), + sqlgraph.Edge(sqlgraph.O2O, false, TimingBreakdownTable, TimingBreakdownColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -399,7 +399,7 @@ func HasResourceUsage() predicate.ExectionInfo { return predicate.ExectionInfo(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ResourceUsageTable, ResourceUsagePrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ResourceUsageTable, ResourceUsageColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/exectioninfo_create.go b/ent/gen/ent/exectioninfo_create.go index 0cb8b18..a1abc5b 100644 --- a/ent/gen/ent/exectioninfo_create.go +++ b/ent/gen/ent/exectioninfo_create.go @@ -91,19 +91,23 @@ func (eic *ExectionInfoCreate) SetNillableHostname(s *string) *ExectionInfoCreat return eic } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (eic *ExectionInfoCreate) AddTestResultIDs(ids ...int) *ExectionInfoCreate { - eic.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (eic *ExectionInfoCreate) SetTestResultID(id int) *ExectionInfoCreate { + eic.mutation.SetTestResultID(id) return eic } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (eic *ExectionInfoCreate) AddTestResult(t ...*TestResultBES) *ExectionInfoCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (eic *ExectionInfoCreate) SetNillableTestResultID(id *int) *ExectionInfoCreate { + if id != nil { + eic = eic.SetTestResultID(*id) } - return eic.AddTestResultIDs(ids...) + return eic +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (eic *ExectionInfoCreate) SetTestResult(t *TestResultBES) *ExectionInfoCreate { + return eic.SetTestResultID(t.ID) } // SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. @@ -222,7 +226,7 @@ func (eic *ExectionInfoCreate) createSpec() (*ExectionInfo, *sqlgraph.CreateSpec } if nodes := eic.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: exectioninfo.TestResultTable, Columns: []string{exectioninfo.TestResultColumn}, @@ -234,11 +238,12 @@ func (eic *ExectionInfoCreate) createSpec() (*ExectionInfo, *sqlgraph.CreateSpec for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.test_result_bes_execution_info = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := eic.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: exectioninfo.TimingBreakdownTable, Columns: []string{exectioninfo.TimingBreakdownColumn}, @@ -250,15 +255,14 @@ func (eic *ExectionInfoCreate) createSpec() (*ExectionInfo, *sqlgraph.CreateSpec for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.exection_info_timing_breakdown = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := eic.mutation.ResourceUsageIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), diff --git a/ent/gen/ent/exectioninfo_query.go b/ent/gen/ent/exectioninfo_query.go index 58b4fe3..63d2dbc 100644 --- a/ent/gen/ent/exectioninfo_query.go +++ b/ent/gen/ent/exectioninfo_query.go @@ -31,7 +31,6 @@ type ExectionInfoQuery struct { withFKs bool modifiers []func(*sql.Selector) loadTotal []func(context.Context, []*ExectionInfo) error - withNamedTestResult map[string]*TestResultBESQuery withNamedResourceUsage map[string]*ResourceUsageQuery // intermediate query (i.e. traversal path). sql *sql.Selector @@ -83,7 +82,7 @@ func (eiq *ExectionInfoQuery) QueryTestResult() *TestResultBESQuery { step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, selector), sqlgraph.To(testresultbes.Table, testresultbes.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, exectioninfo.TestResultTable, exectioninfo.TestResultColumn), + sqlgraph.Edge(sqlgraph.O2O, true, exectioninfo.TestResultTable, exectioninfo.TestResultColumn), ) fromU = sqlgraph.SetNeighbors(eiq.driver.Dialect(), step) return fromU, nil @@ -105,7 +104,7 @@ func (eiq *ExectionInfoQuery) QueryTimingBreakdown() *TimingBreakdownQuery { step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, selector), sqlgraph.To(timingbreakdown.Table, timingbreakdown.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, exectioninfo.TimingBreakdownTable, exectioninfo.TimingBreakdownColumn), + sqlgraph.Edge(sqlgraph.O2O, false, exectioninfo.TimingBreakdownTable, exectioninfo.TimingBreakdownColumn), ) fromU = sqlgraph.SetNeighbors(eiq.driver.Dialect(), step) return fromU, nil @@ -127,7 +126,7 @@ func (eiq *ExectionInfoQuery) QueryResourceUsage() *ResourceUsageQuery { step := sqlgraph.NewStep( sqlgraph.From(exectioninfo.Table, exectioninfo.FieldID, selector), sqlgraph.To(resourceusage.Table, resourceusage.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, exectioninfo.ResourceUsageTable, exectioninfo.ResourceUsagePrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, exectioninfo.ResourceUsageTable, exectioninfo.ResourceUsageColumn), ) fromU = sqlgraph.SetNeighbors(eiq.driver.Dialect(), step) return fromU, nil @@ -454,7 +453,7 @@ func (eiq *ExectionInfoQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([ eiq.withResourceUsage != nil, } ) - if eiq.withTimingBreakdown != nil { + if eiq.withTestResult != nil { withFKs = true } if withFKs { @@ -482,9 +481,8 @@ func (eiq *ExectionInfoQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([ return nodes, nil } if query := eiq.withTestResult; query != nil { - if err := eiq.loadTestResult(ctx, query, nodes, - func(n *ExectionInfo) { n.Edges.TestResult = []*TestResultBES{} }, - func(n *ExectionInfo, e *TestResultBES) { n.Edges.TestResult = append(n.Edges.TestResult, e) }); err != nil { + if err := eiq.loadTestResult(ctx, query, nodes, nil, + func(n *ExectionInfo, e *TestResultBES) { n.Edges.TestResult = e }); err != nil { return nil, err } } @@ -501,13 +499,6 @@ func (eiq *ExectionInfoQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([ return nil, err } } - for name, query := range eiq.withNamedTestResult { - if err := eiq.loadTestResult(ctx, query, nodes, - func(n *ExectionInfo) { n.appendNamedTestResult(name) }, - func(n *ExectionInfo, e *TestResultBES) { n.appendNamedTestResult(name, e) }); err != nil { - return nil, err - } - } for name, query := range eiq.withNamedResourceUsage { if err := eiq.loadResourceUsage(ctx, query, nodes, func(n *ExectionInfo) { n.appendNamedResourceUsage(name) }, @@ -524,44 +515,13 @@ func (eiq *ExectionInfoQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([ } func (eiq *ExectionInfoQuery) loadTestResult(ctx context.Context, query *TestResultBESQuery, nodes []*ExectionInfo, init func(*ExectionInfo), assign func(*ExectionInfo, *TestResultBES)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*ExectionInfo) - for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) - } - } - query.withFKs = true - query.Where(predicate.TestResultBES(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(exectioninfo.TestResultColumn), fks...)) - })) - neighbors, err := query.All(ctx) - if err != nil { - return err - } - for _, n := range neighbors { - fk := n.test_result_bes_execution_info - if fk == nil { - return fmt.Errorf(`foreign-key "test_result_bes_execution_info" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] - if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "test_result_bes_execution_info" returned %v for node %v`, *fk, n.ID) - } - assign(node, n) - } - return nil -} -func (eiq *ExectionInfoQuery) loadTimingBreakdown(ctx context.Context, query *TimingBreakdownQuery, nodes []*ExectionInfo, init func(*ExectionInfo), assign func(*ExectionInfo, *TimingBreakdown)) error { ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*ExectionInfo) for i := range nodes { - if nodes[i].exection_info_timing_breakdown == nil { + if nodes[i].test_result_bes_execution_info == nil { continue } - fk := *nodes[i].exection_info_timing_breakdown + fk := *nodes[i].test_result_bes_execution_info if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } @@ -570,7 +530,7 @@ func (eiq *ExectionInfoQuery) loadTimingBreakdown(ctx context.Context, query *Ti if len(ids) == 0 { return nil } - query.Where(timingbreakdown.IDIn(ids...)) + query.Where(testresultbes.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err @@ -578,7 +538,7 @@ func (eiq *ExectionInfoQuery) loadTimingBreakdown(ctx context.Context, query *Ti for _, n := range neighbors { nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected foreign-key "exection_info_timing_breakdown" returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "test_result_bes_execution_info" returned %v`, n.ID) } for i := range nodes { assign(nodes[i], n) @@ -586,64 +546,62 @@ func (eiq *ExectionInfoQuery) loadTimingBreakdown(ctx context.Context, query *Ti } return nil } +func (eiq *ExectionInfoQuery) loadTimingBreakdown(ctx context.Context, query *TimingBreakdownQuery, nodes []*ExectionInfo, init func(*ExectionInfo), assign func(*ExectionInfo, *TimingBreakdown)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ExectionInfo) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + } + query.withFKs = true + query.Where(predicate.TimingBreakdown(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(exectioninfo.TimingBreakdownColumn), fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.exection_info_timing_breakdown + if fk == nil { + return fmt.Errorf(`foreign-key "exection_info_timing_breakdown" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "exection_info_timing_breakdown" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} func (eiq *ExectionInfoQuery) loadResourceUsage(ctx context.Context, query *ResourceUsageQuery, nodes []*ExectionInfo, init func(*ExectionInfo), assign func(*ExectionInfo, *ResourceUsage)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ExectionInfo) - nids := make(map[int]map[*ExectionInfo]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*ExectionInfo) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(exectioninfo.ResourceUsageTable) - s.Join(joinT).On(s.C(resourceusage.FieldID), joinT.C(exectioninfo.ResourceUsagePrimaryKey[1])) - s.Where(sql.InValues(joinT.C(exectioninfo.ResourceUsagePrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(exectioninfo.ResourceUsagePrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ExectionInfo]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ResourceUsage](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.ResourceUsage(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(exectioninfo.ResourceUsageColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "resource_usage" node returned %v`, n.ID) + fk := n.exection_info_resource_usage + if fk == nil { + return fmt.Errorf(`foreign-key "exection_info_resource_usage" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "exection_info_resource_usage" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -732,20 +690,6 @@ func (eiq *ExectionInfoQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTestResult tells the query-builder to eager-load the nodes that are connected to the "test_result" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (eiq *ExectionInfoQuery) WithNamedTestResult(name string, opts ...func(*TestResultBESQuery)) *ExectionInfoQuery { - query := (&TestResultBESClient{config: eiq.config}).Query() - for _, opt := range opts { - opt(query) - } - if eiq.withNamedTestResult == nil { - eiq.withNamedTestResult = make(map[string]*TestResultBESQuery) - } - eiq.withNamedTestResult[name] = query - return eiq -} - // WithNamedResourceUsage tells the query-builder to eager-load the nodes that are connected to the "resource_usage" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (eiq *ExectionInfoQuery) WithNamedResourceUsage(name string, opts ...func(*ResourceUsageQuery)) *ExectionInfoQuery { diff --git a/ent/gen/ent/exectioninfo_update.go b/ent/gen/ent/exectioninfo_update.go index f0a1d09..d1fa7fe 100644 --- a/ent/gen/ent/exectioninfo_update.go +++ b/ent/gen/ent/exectioninfo_update.go @@ -144,19 +144,23 @@ func (eiu *ExectionInfoUpdate) ClearHostname() *ExectionInfoUpdate { return eiu } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (eiu *ExectionInfoUpdate) AddTestResultIDs(ids ...int) *ExectionInfoUpdate { - eiu.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (eiu *ExectionInfoUpdate) SetTestResultID(id int) *ExectionInfoUpdate { + eiu.mutation.SetTestResultID(id) return eiu } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (eiu *ExectionInfoUpdate) AddTestResult(t ...*TestResultBES) *ExectionInfoUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (eiu *ExectionInfoUpdate) SetNillableTestResultID(id *int) *ExectionInfoUpdate { + if id != nil { + eiu = eiu.SetTestResultID(*id) } - return eiu.AddTestResultIDs(ids...) + return eiu +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (eiu *ExectionInfoUpdate) SetTestResult(t *TestResultBES) *ExectionInfoUpdate { + return eiu.SetTestResultID(t.ID) } // SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. @@ -198,27 +202,12 @@ func (eiu *ExectionInfoUpdate) Mutation() *ExectionInfoMutation { return eiu.mutation } -// ClearTestResult clears all "test_result" edges to the TestResultBES entity. +// ClearTestResult clears the "test_result" edge to the TestResultBES entity. func (eiu *ExectionInfoUpdate) ClearTestResult() *ExectionInfoUpdate { eiu.mutation.ClearTestResult() return eiu } -// RemoveTestResultIDs removes the "test_result" edge to TestResultBES entities by IDs. -func (eiu *ExectionInfoUpdate) RemoveTestResultIDs(ids ...int) *ExectionInfoUpdate { - eiu.mutation.RemoveTestResultIDs(ids...) - return eiu -} - -// RemoveTestResult removes "test_result" edges to TestResultBES entities. -func (eiu *ExectionInfoUpdate) RemoveTestResult(t ...*TestResultBES) *ExectionInfoUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return eiu.RemoveTestResultIDs(ids...) -} - // ClearTimingBreakdown clears the "timing_breakdown" edge to the TimingBreakdown entity. func (eiu *ExectionInfoUpdate) ClearTimingBreakdown() *ExectionInfoUpdate { eiu.mutation.ClearTimingBreakdown() @@ -320,20 +309,7 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if eiu.mutation.TestResultCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: exectioninfo.TestResultTable, - Columns: []string{exectioninfo.TestResultColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := eiu.mutation.RemovedTestResultIDs(); len(nodes) > 0 && !eiu.mutation.TestResultCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: exectioninfo.TestResultTable, Columns: []string{exectioninfo.TestResultColumn}, @@ -342,14 +318,11 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := eiu.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: exectioninfo.TestResultTable, Columns: []string{exectioninfo.TestResultColumn}, @@ -365,7 +338,7 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if eiu.mutation.TimingBreakdownCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: exectioninfo.TimingBreakdownTable, Columns: []string{exectioninfo.TimingBreakdownColumn}, @@ -378,7 +351,7 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := eiu.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: exectioninfo.TimingBreakdownTable, Columns: []string{exectioninfo.TimingBreakdownColumn}, @@ -394,10 +367,10 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if eiu.mutation.ResourceUsageCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), @@ -407,10 +380,10 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := eiu.mutation.RemovedResourceUsageIDs(); len(nodes) > 0 && !eiu.mutation.ResourceUsageCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), @@ -423,10 +396,10 @@ func (eiu *ExectionInfoUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := eiu.mutation.ResourceUsageIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), @@ -571,19 +544,23 @@ func (eiuo *ExectionInfoUpdateOne) ClearHostname() *ExectionInfoUpdateOne { return eiuo } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (eiuo *ExectionInfoUpdateOne) AddTestResultIDs(ids ...int) *ExectionInfoUpdateOne { - eiuo.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (eiuo *ExectionInfoUpdateOne) SetTestResultID(id int) *ExectionInfoUpdateOne { + eiuo.mutation.SetTestResultID(id) return eiuo } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (eiuo *ExectionInfoUpdateOne) AddTestResult(t ...*TestResultBES) *ExectionInfoUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (eiuo *ExectionInfoUpdateOne) SetNillableTestResultID(id *int) *ExectionInfoUpdateOne { + if id != nil { + eiuo = eiuo.SetTestResultID(*id) } - return eiuo.AddTestResultIDs(ids...) + return eiuo +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (eiuo *ExectionInfoUpdateOne) SetTestResult(t *TestResultBES) *ExectionInfoUpdateOne { + return eiuo.SetTestResultID(t.ID) } // SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. @@ -625,27 +602,12 @@ func (eiuo *ExectionInfoUpdateOne) Mutation() *ExectionInfoMutation { return eiuo.mutation } -// ClearTestResult clears all "test_result" edges to the TestResultBES entity. +// ClearTestResult clears the "test_result" edge to the TestResultBES entity. func (eiuo *ExectionInfoUpdateOne) ClearTestResult() *ExectionInfoUpdateOne { eiuo.mutation.ClearTestResult() return eiuo } -// RemoveTestResultIDs removes the "test_result" edge to TestResultBES entities by IDs. -func (eiuo *ExectionInfoUpdateOne) RemoveTestResultIDs(ids ...int) *ExectionInfoUpdateOne { - eiuo.mutation.RemoveTestResultIDs(ids...) - return eiuo -} - -// RemoveTestResult removes "test_result" edges to TestResultBES entities. -func (eiuo *ExectionInfoUpdateOne) RemoveTestResult(t ...*TestResultBES) *ExectionInfoUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return eiuo.RemoveTestResultIDs(ids...) -} - // ClearTimingBreakdown clears the "timing_breakdown" edge to the TimingBreakdown entity. func (eiuo *ExectionInfoUpdateOne) ClearTimingBreakdown() *ExectionInfoUpdateOne { eiuo.mutation.ClearTimingBreakdown() @@ -777,20 +739,7 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if eiuo.mutation.TestResultCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: exectioninfo.TestResultTable, - Columns: []string{exectioninfo.TestResultColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := eiuo.mutation.RemovedTestResultIDs(); len(nodes) > 0 && !eiuo.mutation.TestResultCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: exectioninfo.TestResultTable, Columns: []string{exectioninfo.TestResultColumn}, @@ -799,14 +748,11 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := eiuo.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: exectioninfo.TestResultTable, Columns: []string{exectioninfo.TestResultColumn}, @@ -822,7 +768,7 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if eiuo.mutation.TimingBreakdownCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: exectioninfo.TimingBreakdownTable, Columns: []string{exectioninfo.TimingBreakdownColumn}, @@ -835,7 +781,7 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if nodes := eiuo.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: exectioninfo.TimingBreakdownTable, Columns: []string{exectioninfo.TimingBreakdownColumn}, @@ -851,10 +797,10 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if eiuo.mutation.ResourceUsageCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), @@ -864,10 +810,10 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if nodes := eiuo.mutation.RemovedResourceUsageIDs(); len(nodes) > 0 && !eiuo.mutation.ResourceUsageCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), @@ -880,10 +826,10 @@ func (eiuo *ExectionInfoUpdateOne) sqlSave(ctx context.Context) (_node *Exection } if nodes := eiuo.mutation.ResourceUsageIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: exectioninfo.ResourceUsageTable, - Columns: exectioninfo.ResourceUsagePrimaryKey, + Columns: []string{exectioninfo.ResourceUsageColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(resourceusage.FieldID, field.TypeInt), diff --git a/ent/gen/ent/filesmetric.go b/ent/gen/ent/filesmetric.go index b70dd0e..8c44f82 100644 --- a/ent/gen/ent/filesmetric.go +++ b/ent/gen/ent/filesmetric.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/artifactmetrics" "github.com/buildbarn/bb-portal/ent/gen/ent/filesmetric" ) @@ -22,31 +23,29 @@ type FilesMetric struct { Count int32 `json:"count,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the FilesMetricQuery when eager-loading is set. - Edges FilesMetricEdges `json:"edges"` - artifact_metrics_source_artifacts_read *int - artifact_metrics_output_artifacts_seen *int - artifact_metrics_output_artifacts_from_action_cache *int - selectValues sql.SelectValues + Edges FilesMetricEdges `json:"edges"` + artifact_metrics_top_level_artifacts *int + selectValues sql.SelectValues } // FilesMetricEdges holds the relations/edges for other nodes in the graph. type FilesMetricEdges struct { // ArtifactMetrics holds the value of the artifact_metrics edge. - ArtifactMetrics []*ArtifactMetrics `json:"artifact_metrics,omitempty"` + ArtifactMetrics *ArtifactMetrics `json:"artifact_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedArtifactMetrics map[string][]*ArtifactMetrics } // ArtifactMetricsOrErr returns the ArtifactMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e FilesMetricEdges) ArtifactMetricsOrErr() ([]*ArtifactMetrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e FilesMetricEdges) ArtifactMetricsOrErr() (*ArtifactMetrics, error) { + if e.ArtifactMetrics != nil { return e.ArtifactMetrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: artifactmetrics.Label} } return nil, &NotLoadedError{edge: "artifact_metrics"} } @@ -58,11 +57,7 @@ func (*FilesMetric) scanValues(columns []string) ([]any, error) { switch columns[i] { case filesmetric.FieldID, filesmetric.FieldSizeInBytes, filesmetric.FieldCount: values[i] = new(sql.NullInt64) - case filesmetric.ForeignKeys[0]: // artifact_metrics_source_artifacts_read - values[i] = new(sql.NullInt64) - case filesmetric.ForeignKeys[1]: // artifact_metrics_output_artifacts_seen - values[i] = new(sql.NullInt64) - case filesmetric.ForeignKeys[2]: // artifact_metrics_output_artifacts_from_action_cache + case filesmetric.ForeignKeys[0]: // artifact_metrics_top_level_artifacts values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -99,24 +94,10 @@ func (fm *FilesMetric) assignValues(columns []string, values []any) error { } case filesmetric.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_source_artifacts_read", value) - } else if value.Valid { - fm.artifact_metrics_source_artifacts_read = new(int) - *fm.artifact_metrics_source_artifacts_read = int(value.Int64) - } - case filesmetric.ForeignKeys[1]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_output_artifacts_seen", value) - } else if value.Valid { - fm.artifact_metrics_output_artifacts_seen = new(int) - *fm.artifact_metrics_output_artifacts_seen = int(value.Int64) - } - case filesmetric.ForeignKeys[2]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_output_artifacts_from_action_cache", value) + return fmt.Errorf("unexpected type %T for edge-field artifact_metrics_top_level_artifacts", value) } else if value.Valid { - fm.artifact_metrics_output_artifacts_from_action_cache = new(int) - *fm.artifact_metrics_output_artifacts_from_action_cache = int(value.Int64) + fm.artifact_metrics_top_level_artifacts = new(int) + *fm.artifact_metrics_top_level_artifacts = int(value.Int64) } default: fm.selectValues.Set(columns[i], values[i]) @@ -168,29 +149,5 @@ func (fm *FilesMetric) String() string { return builder.String() } -// NamedArtifactMetrics returns the ArtifactMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (fm *FilesMetric) NamedArtifactMetrics(name string) ([]*ArtifactMetrics, error) { - if fm.Edges.namedArtifactMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := fm.Edges.namedArtifactMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (fm *FilesMetric) appendNamedArtifactMetrics(name string, edges ...*ArtifactMetrics) { - if fm.Edges.namedArtifactMetrics == nil { - fm.Edges.namedArtifactMetrics = make(map[string][]*ArtifactMetrics) - } - if len(edges) == 0 { - fm.Edges.namedArtifactMetrics[name] = []*ArtifactMetrics{} - } else { - fm.Edges.namedArtifactMetrics[name] = append(fm.Edges.namedArtifactMetrics[name], edges...) - } -} - // FilesMetrics is a parsable slice of FilesMetric. type FilesMetrics []*FilesMetric diff --git a/ent/gen/ent/filesmetric/filesmetric.go b/ent/gen/ent/filesmetric/filesmetric.go index c46297e..b0fff92 100644 --- a/ent/gen/ent/filesmetric/filesmetric.go +++ b/ent/gen/ent/filesmetric/filesmetric.go @@ -20,11 +20,13 @@ const ( EdgeArtifactMetrics = "artifact_metrics" // Table holds the table name of the filesmetric in the database. Table = "files_metrics" - // ArtifactMetricsTable is the table that holds the artifact_metrics relation/edge. The primary key declared below. - ArtifactMetricsTable = "artifact_metrics_top_level_artifacts" + // ArtifactMetricsTable is the table that holds the artifact_metrics relation/edge. + ArtifactMetricsTable = "files_metrics" // ArtifactMetricsInverseTable is the table name for the ArtifactMetrics entity. // It exists in this package in order to avoid circular dependency with the "artifactmetrics" package. ArtifactMetricsInverseTable = "artifact_metrics" + // ArtifactMetricsColumn is the table column denoting the artifact_metrics relation/edge. + ArtifactMetricsColumn = "artifact_metrics_top_level_artifacts" ) // Columns holds all SQL columns for filesmetric fields. @@ -37,17 +39,9 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "files_metrics" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "artifact_metrics_source_artifacts_read", - "artifact_metrics_output_artifacts_seen", - "artifact_metrics_output_artifacts_from_action_cache", + "artifact_metrics_top_level_artifacts", } -var ( - // ArtifactMetricsPrimaryKey and ArtifactMetricsColumn2 are the table columns denoting the - // primary key for the artifact_metrics relation (M2M). - ArtifactMetricsPrimaryKey = []string{"artifact_metrics_id", "files_metric_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -81,23 +75,16 @@ func ByCount(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldCount, opts...).ToFunc() } -// ByArtifactMetricsCount orders the results by artifact_metrics count. -func ByArtifactMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newArtifactMetricsStep(), opts...) - } -} - -// ByArtifactMetrics orders the results by artifact_metrics terms. -func ByArtifactMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByArtifactMetricsField orders the results by artifact_metrics field. +func ByArtifactMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newArtifactMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newArtifactMetricsStep(), sql.OrderByField(field, opts...)) } } func newArtifactMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ArtifactMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ArtifactMetricsTable, ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, ArtifactMetricsTable, ArtifactMetricsColumn), ) } diff --git a/ent/gen/ent/filesmetric/where.go b/ent/gen/ent/filesmetric/where.go index 16cfb11..e715ab2 100644 --- a/ent/gen/ent/filesmetric/where.go +++ b/ent/gen/ent/filesmetric/where.go @@ -168,7 +168,7 @@ func HasArtifactMetrics() predicate.FilesMetric { return predicate.FilesMetric(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ArtifactMetricsTable, ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, ArtifactMetricsTable, ArtifactMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/filesmetric_create.go b/ent/gen/ent/filesmetric_create.go index 85f9ffd..04dff6a 100644 --- a/ent/gen/ent/filesmetric_create.go +++ b/ent/gen/ent/filesmetric_create.go @@ -47,19 +47,23 @@ func (fmc *FilesMetricCreate) SetNillableCount(i *int32) *FilesMetricCreate { return fmc } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (fmc *FilesMetricCreate) AddArtifactMetricIDs(ids ...int) *FilesMetricCreate { - fmc.mutation.AddArtifactMetricIDs(ids...) +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (fmc *FilesMetricCreate) SetArtifactMetricsID(id int) *FilesMetricCreate { + fmc.mutation.SetArtifactMetricsID(id) return fmc } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (fmc *FilesMetricCreate) AddArtifactMetrics(a ...*ArtifactMetrics) *FilesMetricCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (fmc *FilesMetricCreate) SetNillableArtifactMetricsID(id *int) *FilesMetricCreate { + if id != nil { + fmc = fmc.SetArtifactMetricsID(*id) } - return fmc.AddArtifactMetricIDs(ids...) + return fmc +} + +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (fmc *FilesMetricCreate) SetArtifactMetrics(a *ArtifactMetrics) *FilesMetricCreate { + return fmc.SetArtifactMetricsID(a.ID) } // Mutation returns the FilesMetricMutation object of the builder. @@ -132,10 +136,10 @@ func (fmc *FilesMetricCreate) createSpec() (*FilesMetric, *sqlgraph.CreateSpec) } if nodes := fmc.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, + Columns: []string{filesmetric.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (fmc *FilesMetricCreate) createSpec() (*FilesMetric, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.artifact_metrics_top_level_artifacts = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/filesmetric_query.go b/ent/gen/ent/filesmetric_query.go index 224c328..c73c257 100644 --- a/ent/gen/ent/filesmetric_query.go +++ b/ent/gen/ent/filesmetric_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,15 +18,14 @@ import ( // FilesMetricQuery is the builder for querying FilesMetric entities. type FilesMetricQuery struct { config - ctx *QueryContext - order []filesmetric.OrderOption - inters []Interceptor - predicates []predicate.FilesMetric - withArtifactMetrics *ArtifactMetricsQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*FilesMetric) error - withNamedArtifactMetrics map[string]*ArtifactMetricsQuery + ctx *QueryContext + order []filesmetric.OrderOption + inters []Interceptor + predicates []predicate.FilesMetric + withArtifactMetrics *ArtifactMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*FilesMetric) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -78,7 +76,7 @@ func (fmq *FilesMetricQuery) QueryArtifactMetrics() *ArtifactMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(filesmetric.Table, filesmetric.FieldID, selector), sqlgraph.To(artifactmetrics.Table, artifactmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, filesmetric.ArtifactMetricsTable, filesmetric.ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, filesmetric.ArtifactMetricsTable, filesmetric.ArtifactMetricsColumn), ) fromU = sqlgraph.SetNeighbors(fmq.driver.Dialect(), step) return fromU, nil @@ -379,6 +377,9 @@ func (fmq *FilesMetricQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] fmq.withArtifactMetrics != nil, } ) + if fmq.withArtifactMetrics != nil { + withFKs = true + } if withFKs { _spec.Node.Columns = append(_spec.Node.Columns, filesmetric.ForeignKeys...) } @@ -404,16 +405,8 @@ func (fmq *FilesMetricQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nodes, nil } if query := fmq.withArtifactMetrics; query != nil { - if err := fmq.loadArtifactMetrics(ctx, query, nodes, - func(n *FilesMetric) { n.Edges.ArtifactMetrics = []*ArtifactMetrics{} }, - func(n *FilesMetric, e *ArtifactMetrics) { n.Edges.ArtifactMetrics = append(n.Edges.ArtifactMetrics, e) }); err != nil { - return nil, err - } - } - for name, query := range fmq.withNamedArtifactMetrics { - if err := fmq.loadArtifactMetrics(ctx, query, nodes, - func(n *FilesMetric) { n.appendNamedArtifactMetrics(name) }, - func(n *FilesMetric, e *ArtifactMetrics) { n.appendNamedArtifactMetrics(name, e) }); err != nil { + if err := fmq.loadArtifactMetrics(ctx, query, nodes, nil, + func(n *FilesMetric, e *ArtifactMetrics) { n.Edges.ArtifactMetrics = e }); err != nil { return nil, err } } @@ -426,62 +419,33 @@ func (fmq *FilesMetricQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] } func (fmq *FilesMetricQuery) loadArtifactMetrics(ctx context.Context, query *ArtifactMetricsQuery, nodes []*FilesMetric, init func(*FilesMetric), assign func(*FilesMetric, *ArtifactMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*FilesMetric) - nids := make(map[int]map[*FilesMetric]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*FilesMetric) + for i := range nodes { + if nodes[i].artifact_metrics_top_level_artifacts == nil { + continue + } + fk := *nodes[i].artifact_metrics_top_level_artifacts + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(filesmetric.ArtifactMetricsTable) - s.Join(joinT).On(s.C(artifactmetrics.FieldID), joinT.C(filesmetric.ArtifactMetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(filesmetric.ArtifactMetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(filesmetric.ArtifactMetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*FilesMetric]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ArtifactMetrics](ctx, query, qr, query.inters) + query.Where(artifactmetrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "artifact_metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "artifact_metrics_top_level_artifacts" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -571,20 +535,6 @@ func (fmq *FilesMetricQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedArtifactMetrics tells the query-builder to eager-load the nodes that are connected to the "artifact_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (fmq *FilesMetricQuery) WithNamedArtifactMetrics(name string, opts ...func(*ArtifactMetricsQuery)) *FilesMetricQuery { - query := (&ArtifactMetricsClient{config: fmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if fmq.withNamedArtifactMetrics == nil { - fmq.withNamedArtifactMetrics = make(map[string]*ArtifactMetricsQuery) - } - fmq.withNamedArtifactMetrics[name] = query - return fmq -} - // FilesMetricGroupBy is the group-by builder for FilesMetric entities. type FilesMetricGroupBy struct { selector diff --git a/ent/gen/ent/filesmetric_update.go b/ent/gen/ent/filesmetric_update.go index 7818d8d..a87491c 100644 --- a/ent/gen/ent/filesmetric_update.go +++ b/ent/gen/ent/filesmetric_update.go @@ -82,19 +82,23 @@ func (fmu *FilesMetricUpdate) ClearCount() *FilesMetricUpdate { return fmu } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (fmu *FilesMetricUpdate) AddArtifactMetricIDs(ids ...int) *FilesMetricUpdate { - fmu.mutation.AddArtifactMetricIDs(ids...) +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (fmu *FilesMetricUpdate) SetArtifactMetricsID(id int) *FilesMetricUpdate { + fmu.mutation.SetArtifactMetricsID(id) return fmu } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (fmu *FilesMetricUpdate) AddArtifactMetrics(a ...*ArtifactMetrics) *FilesMetricUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (fmu *FilesMetricUpdate) SetNillableArtifactMetricsID(id *int) *FilesMetricUpdate { + if id != nil { + fmu = fmu.SetArtifactMetricsID(*id) } - return fmu.AddArtifactMetricIDs(ids...) + return fmu +} + +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (fmu *FilesMetricUpdate) SetArtifactMetrics(a *ArtifactMetrics) *FilesMetricUpdate { + return fmu.SetArtifactMetricsID(a.ID) } // Mutation returns the FilesMetricMutation object of the builder. @@ -102,27 +106,12 @@ func (fmu *FilesMetricUpdate) Mutation() *FilesMetricMutation { return fmu.mutation } -// ClearArtifactMetrics clears all "artifact_metrics" edges to the ArtifactMetrics entity. +// ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. func (fmu *FilesMetricUpdate) ClearArtifactMetrics() *FilesMetricUpdate { fmu.mutation.ClearArtifactMetrics() return fmu } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to ArtifactMetrics entities by IDs. -func (fmu *FilesMetricUpdate) RemoveArtifactMetricIDs(ids ...int) *FilesMetricUpdate { - fmu.mutation.RemoveArtifactMetricIDs(ids...) - return fmu -} - -// RemoveArtifactMetrics removes "artifact_metrics" edges to ArtifactMetrics entities. -func (fmu *FilesMetricUpdate) RemoveArtifactMetrics(a ...*ArtifactMetrics) *FilesMetricUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return fmu.RemoveArtifactMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (fmu *FilesMetricUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, fmu.sqlSave, fmu.mutation, fmu.hooks) @@ -179,39 +168,23 @@ func (fmu *FilesMetricUpdate) sqlSave(ctx context.Context) (n int, err error) { } if fmu.mutation.ArtifactMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := fmu.mutation.RemovedArtifactMetricsIDs(); len(nodes) > 0 && !fmu.mutation.ArtifactMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, + Columns: []string{filesmetric.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := fmu.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, + Columns: []string{filesmetric.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), @@ -296,19 +269,23 @@ func (fmuo *FilesMetricUpdateOne) ClearCount() *FilesMetricUpdateOne { return fmuo } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (fmuo *FilesMetricUpdateOne) AddArtifactMetricIDs(ids ...int) *FilesMetricUpdateOne { - fmuo.mutation.AddArtifactMetricIDs(ids...) +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (fmuo *FilesMetricUpdateOne) SetArtifactMetricsID(id int) *FilesMetricUpdateOne { + fmuo.mutation.SetArtifactMetricsID(id) return fmuo } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (fmuo *FilesMetricUpdateOne) AddArtifactMetrics(a ...*ArtifactMetrics) *FilesMetricUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (fmuo *FilesMetricUpdateOne) SetNillableArtifactMetricsID(id *int) *FilesMetricUpdateOne { + if id != nil { + fmuo = fmuo.SetArtifactMetricsID(*id) } - return fmuo.AddArtifactMetricIDs(ids...) + return fmuo +} + +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (fmuo *FilesMetricUpdateOne) SetArtifactMetrics(a *ArtifactMetrics) *FilesMetricUpdateOne { + return fmuo.SetArtifactMetricsID(a.ID) } // Mutation returns the FilesMetricMutation object of the builder. @@ -316,27 +293,12 @@ func (fmuo *FilesMetricUpdateOne) Mutation() *FilesMetricMutation { return fmuo.mutation } -// ClearArtifactMetrics clears all "artifact_metrics" edges to the ArtifactMetrics entity. +// ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. func (fmuo *FilesMetricUpdateOne) ClearArtifactMetrics() *FilesMetricUpdateOne { fmuo.mutation.ClearArtifactMetrics() return fmuo } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to ArtifactMetrics entities by IDs. -func (fmuo *FilesMetricUpdateOne) RemoveArtifactMetricIDs(ids ...int) *FilesMetricUpdateOne { - fmuo.mutation.RemoveArtifactMetricIDs(ids...) - return fmuo -} - -// RemoveArtifactMetrics removes "artifact_metrics" edges to ArtifactMetrics entities. -func (fmuo *FilesMetricUpdateOne) RemoveArtifactMetrics(a ...*ArtifactMetrics) *FilesMetricUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return fmuo.RemoveArtifactMetricIDs(ids...) -} - // Where appends a list predicates to the FilesMetricUpdate builder. func (fmuo *FilesMetricUpdateOne) Where(ps ...predicate.FilesMetric) *FilesMetricUpdateOne { fmuo.mutation.Where(ps...) @@ -423,39 +385,23 @@ func (fmuo *FilesMetricUpdateOne) sqlSave(ctx context.Context) (_node *FilesMetr } if fmuo.mutation.ArtifactMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := fmuo.mutation.RemovedArtifactMetricsIDs(); len(nodes) > 0 && !fmuo.mutation.ArtifactMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, + Columns: []string{filesmetric.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := fmuo.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: filesmetric.ArtifactMetricsTable, - Columns: filesmetric.ArtifactMetricsPrimaryKey, + Columns: []string{filesmetric.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/garbagemetrics.go b/ent/gen/ent/garbagemetrics.go index ae810b2..48ae88b 100644 --- a/ent/gen/ent/garbagemetrics.go +++ b/ent/gen/ent/garbagemetrics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/garbagemetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/memorymetrics" ) // GarbageMetrics is the model entity for the GarbageMetrics schema. @@ -22,28 +23,29 @@ type GarbageMetrics struct { GarbageCollected int64 `json:"garbage_collected,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the GarbageMetricsQuery when eager-loading is set. - Edges GarbageMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges GarbageMetricsEdges `json:"edges"` + memory_metrics_garbage_metrics *int + selectValues sql.SelectValues } // GarbageMetricsEdges holds the relations/edges for other nodes in the graph. type GarbageMetricsEdges struct { // MemoryMetrics holds the value of the memory_metrics edge. - MemoryMetrics []*MemoryMetrics `json:"memory_metrics,omitempty"` + MemoryMetrics *MemoryMetrics `json:"memory_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedMemoryMetrics map[string][]*MemoryMetrics } // MemoryMetricsOrErr returns the MemoryMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e GarbageMetricsEdges) MemoryMetricsOrErr() ([]*MemoryMetrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e GarbageMetricsEdges) MemoryMetricsOrErr() (*MemoryMetrics, error) { + if e.MemoryMetrics != nil { return e.MemoryMetrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: memorymetrics.Label} } return nil, &NotLoadedError{edge: "memory_metrics"} } @@ -57,6 +59,8 @@ func (*GarbageMetrics) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case garbagemetrics.FieldType: values[i] = new(sql.NullString) + case garbagemetrics.ForeignKeys[0]: // memory_metrics_garbage_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -90,6 +94,13 @@ func (gm *GarbageMetrics) assignValues(columns []string, values []any) error { } else if value.Valid { gm.GarbageCollected = value.Int64 } + case garbagemetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field memory_metrics_garbage_metrics", value) + } else if value.Valid { + gm.memory_metrics_garbage_metrics = new(int) + *gm.memory_metrics_garbage_metrics = int(value.Int64) + } default: gm.selectValues.Set(columns[i], values[i]) } @@ -140,29 +151,5 @@ func (gm *GarbageMetrics) String() string { return builder.String() } -// NamedMemoryMetrics returns the MemoryMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (gm *GarbageMetrics) NamedMemoryMetrics(name string) ([]*MemoryMetrics, error) { - if gm.Edges.namedMemoryMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := gm.Edges.namedMemoryMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (gm *GarbageMetrics) appendNamedMemoryMetrics(name string, edges ...*MemoryMetrics) { - if gm.Edges.namedMemoryMetrics == nil { - gm.Edges.namedMemoryMetrics = make(map[string][]*MemoryMetrics) - } - if len(edges) == 0 { - gm.Edges.namedMemoryMetrics[name] = []*MemoryMetrics{} - } else { - gm.Edges.namedMemoryMetrics[name] = append(gm.Edges.namedMemoryMetrics[name], edges...) - } -} - // GarbageMetricsSlice is a parsable slice of GarbageMetrics. type GarbageMetricsSlice []*GarbageMetrics diff --git a/ent/gen/ent/garbagemetrics/garbagemetrics.go b/ent/gen/ent/garbagemetrics/garbagemetrics.go index 3459109..26bd88d 100644 --- a/ent/gen/ent/garbagemetrics/garbagemetrics.go +++ b/ent/gen/ent/garbagemetrics/garbagemetrics.go @@ -20,11 +20,13 @@ const ( EdgeMemoryMetrics = "memory_metrics" // Table holds the table name of the garbagemetrics in the database. Table = "garbage_metrics" - // MemoryMetricsTable is the table that holds the memory_metrics relation/edge. The primary key declared below. - MemoryMetricsTable = "memory_metrics_garbage_metrics" + // MemoryMetricsTable is the table that holds the memory_metrics relation/edge. + MemoryMetricsTable = "garbage_metrics" // MemoryMetricsInverseTable is the table name for the MemoryMetrics entity. // It exists in this package in order to avoid circular dependency with the "memorymetrics" package. MemoryMetricsInverseTable = "memory_metrics" + // MemoryMetricsColumn is the table column denoting the memory_metrics relation/edge. + MemoryMetricsColumn = "memory_metrics_garbage_metrics" ) // Columns holds all SQL columns for garbagemetrics fields. @@ -34,11 +36,11 @@ var Columns = []string{ FieldGarbageCollected, } -var ( - // MemoryMetricsPrimaryKey and MemoryMetricsColumn2 are the table columns denoting the - // primary key for the memory_metrics relation (M2M). - MemoryMetricsPrimaryKey = []string{"memory_metrics_id", "garbage_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "garbage_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "memory_metrics_garbage_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -47,6 +49,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -68,23 +75,16 @@ func ByGarbageCollected(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldGarbageCollected, opts...).ToFunc() } -// ByMemoryMetricsCount orders the results by memory_metrics count. -func ByMemoryMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMemoryMetricsStep(), opts...) - } -} - -// ByMemoryMetrics orders the results by memory_metrics terms. -func ByMemoryMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMemoryMetricsField orders the results by memory_metrics field. +func ByMemoryMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMemoryMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMemoryMetricsStep(), sql.OrderByField(field, opts...)) } } func newMemoryMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MemoryMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MemoryMetricsTable, MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, MemoryMetricsTable, MemoryMetricsColumn), ) } diff --git a/ent/gen/ent/garbagemetrics/where.go b/ent/gen/ent/garbagemetrics/where.go index fb40702..5d21cb1 100644 --- a/ent/gen/ent/garbagemetrics/where.go +++ b/ent/gen/ent/garbagemetrics/where.go @@ -193,7 +193,7 @@ func HasMemoryMetrics() predicate.GarbageMetrics { return predicate.GarbageMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MemoryMetricsTable, MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, MemoryMetricsTable, MemoryMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/garbagemetrics_create.go b/ent/gen/ent/garbagemetrics_create.go index 738ee34..5b6e392 100644 --- a/ent/gen/ent/garbagemetrics_create.go +++ b/ent/gen/ent/garbagemetrics_create.go @@ -47,19 +47,23 @@ func (gmc *GarbageMetricsCreate) SetNillableGarbageCollected(i *int64) *GarbageM return gmc } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (gmc *GarbageMetricsCreate) AddMemoryMetricIDs(ids ...int) *GarbageMetricsCreate { - gmc.mutation.AddMemoryMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (gmc *GarbageMetricsCreate) SetMemoryMetricsID(id int) *GarbageMetricsCreate { + gmc.mutation.SetMemoryMetricsID(id) return gmc } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (gmc *GarbageMetricsCreate) AddMemoryMetrics(m ...*MemoryMetrics) *GarbageMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (gmc *GarbageMetricsCreate) SetNillableMemoryMetricsID(id *int) *GarbageMetricsCreate { + if id != nil { + gmc = gmc.SetMemoryMetricsID(*id) } - return gmc.AddMemoryMetricIDs(ids...) + return gmc +} + +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (gmc *GarbageMetricsCreate) SetMemoryMetrics(m *MemoryMetrics) *GarbageMetricsCreate { + return gmc.SetMemoryMetricsID(m.ID) } // Mutation returns the GarbageMetricsMutation object of the builder. @@ -132,10 +136,10 @@ func (gmc *GarbageMetricsCreate) createSpec() (*GarbageMetrics, *sqlgraph.Create } if nodes := gmc.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, + Columns: []string{garbagemetrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (gmc *GarbageMetricsCreate) createSpec() (*GarbageMetrics, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.memory_metrics_garbage_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/garbagemetrics_query.go b/ent/gen/ent/garbagemetrics_query.go index 6571208..ac73940 100644 --- a/ent/gen/ent/garbagemetrics_query.go +++ b/ent/gen/ent/garbagemetrics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // GarbageMetricsQuery is the builder for querying GarbageMetrics entities. type GarbageMetricsQuery struct { config - ctx *QueryContext - order []garbagemetrics.OrderOption - inters []Interceptor - predicates []predicate.GarbageMetrics - withMemoryMetrics *MemoryMetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*GarbageMetrics) error - withNamedMemoryMetrics map[string]*MemoryMetricsQuery + ctx *QueryContext + order []garbagemetrics.OrderOption + inters []Interceptor + predicates []predicate.GarbageMetrics + withMemoryMetrics *MemoryMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*GarbageMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (gmq *GarbageMetricsQuery) QueryMemoryMetrics() *MemoryMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(garbagemetrics.Table, garbagemetrics.FieldID, selector), sqlgraph.To(memorymetrics.Table, memorymetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, garbagemetrics.MemoryMetricsTable, garbagemetrics.MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, garbagemetrics.MemoryMetricsTable, garbagemetrics.MemoryMetricsColumn), ) fromU = sqlgraph.SetNeighbors(gmq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (gmq *GarbageMetricsQuery) prepareQuery(ctx context.Context) error { func (gmq *GarbageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*GarbageMetrics, error) { var ( nodes = []*GarbageMetrics{} + withFKs = gmq.withFKs _spec = gmq.querySpec() loadedTypes = [1]bool{ gmq.withMemoryMetrics != nil, } ) + if gmq.withMemoryMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, garbagemetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*GarbageMetrics).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (gmq *GarbageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := gmq.withMemoryMetrics; query != nil { - if err := gmq.loadMemoryMetrics(ctx, query, nodes, - func(n *GarbageMetrics) { n.Edges.MemoryMetrics = []*MemoryMetrics{} }, - func(n *GarbageMetrics, e *MemoryMetrics) { n.Edges.MemoryMetrics = append(n.Edges.MemoryMetrics, e) }); err != nil { - return nil, err - } - } - for name, query := range gmq.withNamedMemoryMetrics { - if err := gmq.loadMemoryMetrics(ctx, query, nodes, - func(n *GarbageMetrics) { n.appendNamedMemoryMetrics(name) }, - func(n *GarbageMetrics, e *MemoryMetrics) { n.appendNamedMemoryMetrics(name, e) }); err != nil { + if err := gmq.loadMemoryMetrics(ctx, query, nodes, nil, + func(n *GarbageMetrics, e *MemoryMetrics) { n.Edges.MemoryMetrics = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (gmq *GarbageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (gmq *GarbageMetricsQuery) loadMemoryMetrics(ctx context.Context, query *MemoryMetricsQuery, nodes []*GarbageMetrics, init func(*GarbageMetrics), assign func(*GarbageMetrics, *MemoryMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*GarbageMetrics) - nids := make(map[int]map[*GarbageMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*GarbageMetrics) + for i := range nodes { + if nodes[i].memory_metrics_garbage_metrics == nil { + continue } + fk := *nodes[i].memory_metrics_garbage_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(garbagemetrics.MemoryMetricsTable) - s.Join(joinT).On(s.C(memorymetrics.FieldID), joinT.C(garbagemetrics.MemoryMetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(garbagemetrics.MemoryMetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(garbagemetrics.MemoryMetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*GarbageMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*MemoryMetrics](ctx, query, qr, query.inters) + query.Where(memorymetrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "memory_metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "memory_metrics_garbage_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (gmq *GarbageMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMemoryMetrics tells the query-builder to eager-load the nodes that are connected to the "memory_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (gmq *GarbageMetricsQuery) WithNamedMemoryMetrics(name string, opts ...func(*MemoryMetricsQuery)) *GarbageMetricsQuery { - query := (&MemoryMetricsClient{config: gmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if gmq.withNamedMemoryMetrics == nil { - gmq.withNamedMemoryMetrics = make(map[string]*MemoryMetricsQuery) - } - gmq.withNamedMemoryMetrics[name] = query - return gmq -} - // GarbageMetricsGroupBy is the group-by builder for GarbageMetrics entities. type GarbageMetricsGroupBy struct { selector diff --git a/ent/gen/ent/garbagemetrics_update.go b/ent/gen/ent/garbagemetrics_update.go index df213c3..8246068 100644 --- a/ent/gen/ent/garbagemetrics_update.go +++ b/ent/gen/ent/garbagemetrics_update.go @@ -75,19 +75,23 @@ func (gmu *GarbageMetricsUpdate) ClearGarbageCollected() *GarbageMetricsUpdate { return gmu } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (gmu *GarbageMetricsUpdate) AddMemoryMetricIDs(ids ...int) *GarbageMetricsUpdate { - gmu.mutation.AddMemoryMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (gmu *GarbageMetricsUpdate) SetMemoryMetricsID(id int) *GarbageMetricsUpdate { + gmu.mutation.SetMemoryMetricsID(id) return gmu } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (gmu *GarbageMetricsUpdate) AddMemoryMetrics(m ...*MemoryMetrics) *GarbageMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (gmu *GarbageMetricsUpdate) SetNillableMemoryMetricsID(id *int) *GarbageMetricsUpdate { + if id != nil { + gmu = gmu.SetMemoryMetricsID(*id) } - return gmu.AddMemoryMetricIDs(ids...) + return gmu +} + +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (gmu *GarbageMetricsUpdate) SetMemoryMetrics(m *MemoryMetrics) *GarbageMetricsUpdate { + return gmu.SetMemoryMetricsID(m.ID) } // Mutation returns the GarbageMetricsMutation object of the builder. @@ -95,27 +99,12 @@ func (gmu *GarbageMetricsUpdate) Mutation() *GarbageMetricsMutation { return gmu.mutation } -// ClearMemoryMetrics clears all "memory_metrics" edges to the MemoryMetrics entity. +// ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. func (gmu *GarbageMetricsUpdate) ClearMemoryMetrics() *GarbageMetricsUpdate { gmu.mutation.ClearMemoryMetrics() return gmu } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to MemoryMetrics entities by IDs. -func (gmu *GarbageMetricsUpdate) RemoveMemoryMetricIDs(ids ...int) *GarbageMetricsUpdate { - gmu.mutation.RemoveMemoryMetricIDs(ids...) - return gmu -} - -// RemoveMemoryMetrics removes "memory_metrics" edges to MemoryMetrics entities. -func (gmu *GarbageMetricsUpdate) RemoveMemoryMetrics(m ...*MemoryMetrics) *GarbageMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return gmu.RemoveMemoryMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (gmu *GarbageMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, gmu.sqlSave, gmu.mutation, gmu.hooks) @@ -169,39 +158,23 @@ func (gmu *GarbageMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if gmu.mutation.MemoryMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := gmu.mutation.RemovedMemoryMetricsIDs(); len(nodes) > 0 && !gmu.mutation.MemoryMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, + Columns: []string{garbagemetrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := gmu.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, + Columns: []string{garbagemetrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), @@ -279,19 +252,23 @@ func (gmuo *GarbageMetricsUpdateOne) ClearGarbageCollected() *GarbageMetricsUpda return gmuo } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (gmuo *GarbageMetricsUpdateOne) AddMemoryMetricIDs(ids ...int) *GarbageMetricsUpdateOne { - gmuo.mutation.AddMemoryMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (gmuo *GarbageMetricsUpdateOne) SetMemoryMetricsID(id int) *GarbageMetricsUpdateOne { + gmuo.mutation.SetMemoryMetricsID(id) return gmuo } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (gmuo *GarbageMetricsUpdateOne) AddMemoryMetrics(m ...*MemoryMetrics) *GarbageMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (gmuo *GarbageMetricsUpdateOne) SetNillableMemoryMetricsID(id *int) *GarbageMetricsUpdateOne { + if id != nil { + gmuo = gmuo.SetMemoryMetricsID(*id) } - return gmuo.AddMemoryMetricIDs(ids...) + return gmuo +} + +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (gmuo *GarbageMetricsUpdateOne) SetMemoryMetrics(m *MemoryMetrics) *GarbageMetricsUpdateOne { + return gmuo.SetMemoryMetricsID(m.ID) } // Mutation returns the GarbageMetricsMutation object of the builder. @@ -299,27 +276,12 @@ func (gmuo *GarbageMetricsUpdateOne) Mutation() *GarbageMetricsMutation { return gmuo.mutation } -// ClearMemoryMetrics clears all "memory_metrics" edges to the MemoryMetrics entity. +// ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. func (gmuo *GarbageMetricsUpdateOne) ClearMemoryMetrics() *GarbageMetricsUpdateOne { gmuo.mutation.ClearMemoryMetrics() return gmuo } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to MemoryMetrics entities by IDs. -func (gmuo *GarbageMetricsUpdateOne) RemoveMemoryMetricIDs(ids ...int) *GarbageMetricsUpdateOne { - gmuo.mutation.RemoveMemoryMetricIDs(ids...) - return gmuo -} - -// RemoveMemoryMetrics removes "memory_metrics" edges to MemoryMetrics entities. -func (gmuo *GarbageMetricsUpdateOne) RemoveMemoryMetrics(m ...*MemoryMetrics) *GarbageMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return gmuo.RemoveMemoryMetricIDs(ids...) -} - // Where appends a list predicates to the GarbageMetricsUpdate builder. func (gmuo *GarbageMetricsUpdateOne) Where(ps ...predicate.GarbageMetrics) *GarbageMetricsUpdateOne { gmuo.mutation.Where(ps...) @@ -403,39 +365,23 @@ func (gmuo *GarbageMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Garbag } if gmuo.mutation.MemoryMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := gmuo.mutation.RemovedMemoryMetricsIDs(); len(nodes) > 0 && !gmuo.mutation.MemoryMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, + Columns: []string{garbagemetrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := gmuo.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: garbagemetrics.MemoryMetricsTable, - Columns: garbagemetrics.MemoryMetricsPrimaryKey, + Columns: []string{garbagemetrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/gql_collection.go b/ent/gen/ent/gql_collection.go index 3cd8f05..40445ea 100644 --- a/ent/gen/ent/gql_collection.go +++ b/ent/gen/ent/gql_collection.go @@ -5,6 +5,7 @@ package ent import ( "context" + "entgo.io/contrib/entgql" "github.com/99designs/gqlgen/graphql" "github.com/buildbarn/bb-portal/ent/gen/ent/actioncachestatistics" "github.com/buildbarn/bb-portal/ent/gen/ent/actiondata" @@ -70,12 +71,10 @@ func (acs *ActionCacheStatisticsQuery) collectField(ctx context.Context, oneNode path = append(path, alias) query = (&ActionSummaryClient{config: acs.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { return err } - acs.WithNamedActionSummary(alias, func(wq *ActionSummaryQuery) { - *wq = *query - }) + acs.withActionSummary = query case "missDetails": var ( @@ -183,12 +182,10 @@ func (ad *ActionDataQuery) collectField(ctx context.Context, oneNode bool, opCtx path = append(path, alias) query = (&ActionSummaryClient{config: ad.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { return err } - ad.WithNamedActionSummary(alias, func(wq *ActionSummaryQuery) { - *wq = *query - }) + ad.withActionSummary = query case "mnemonic": if _, ok := fieldSeen[actiondata.FieldMnemonic]; !ok { selectedFields = append(selectedFields, actiondata.FieldMnemonic) @@ -330,12 +327,10 @@ func (as *ActionSummaryQuery) collectField(ctx context.Context, oneNode bool, op path = append(path, alias) query = (&ActionCacheStatisticsClient{config: as.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actioncachestatisticsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actioncachestatisticsImplementors)...); err != nil { return err } - as.WithNamedActionCacheStatistics(alias, func(wq *ActionCacheStatisticsQuery) { - *wq = *query - }) + as.withActionCacheStatistics = query case "actionsCreated": if _, ok := fieldSeen[actionsummary.FieldActionsCreated]; !ok { selectedFields = append(selectedFields, actionsummary.FieldActionsCreated) @@ -420,12 +415,10 @@ func (am *ArtifactMetricsQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&MetricsClient{config: am.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - am.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + am.withMetrics = query case "sourceArtifactsRead": var ( @@ -433,12 +426,10 @@ func (am *ArtifactMetricsQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&FilesMetricClient{config: am.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { return err } - am.WithNamedSourceArtifactsRead(alias, func(wq *FilesMetricQuery) { - *wq = *query - }) + am.withSourceArtifactsRead = query case "outputArtifactsSeen": var ( @@ -446,12 +437,10 @@ func (am *ArtifactMetricsQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&FilesMetricClient{config: am.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { return err } - am.WithNamedOutputArtifactsSeen(alias, func(wq *FilesMetricQuery) { - *wq = *query - }) + am.withOutputArtifactsSeen = query case "outputArtifactsFromActionCache": var ( @@ -459,12 +448,10 @@ func (am *ArtifactMetricsQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&FilesMetricClient{config: am.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { return err } - am.WithNamedOutputArtifactsFromActionCache(alias, func(wq *FilesMetricQuery) { - *wq = *query - }) + am.withOutputArtifactsFromActionCache = query case "topLevelArtifacts": var ( @@ -472,12 +459,10 @@ func (am *ArtifactMetricsQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&FilesMetricClient{config: am.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, filesmetricImplementors)...); err != nil { return err } - am.WithNamedTopLevelArtifacts(alias, func(wq *FilesMetricQuery) { - *wq = *query - }) + am.withTopLevelArtifacts = query } } return nil @@ -697,6 +682,28 @@ func newBazelInvocationPaginateArgs(rv map[string]any) *bazelinvocationPaginateA if v := rv[beforeField]; v != nil { args.before = v.(*Cursor) } + if v, ok := rv[orderByField]; ok { + switch v := v.(type) { + case map[string]any: + var ( + err1, err2 error + order = &BazelInvocationOrder{Field: &BazelInvocationOrderField{}, Direction: entgql.OrderDirectionAsc} + ) + if d, ok := v[directionField]; ok { + err1 = order.Direction.UnmarshalGQL(d) + } + if f, ok := v[fieldField]; ok { + err2 = order.Field.UnmarshalGQL(f) + } + if err1 == nil && err2 == nil { + args.opts = append(args.opts, WithBazelInvocationOrder(order)) + } + case *BazelInvocationOrder: + if v != nil { + args.opts = append(args.opts, WithBazelInvocationOrder(v)) + } + } + } if v, ok := rv[whereField].(*BazelInvocationWhereInput); ok { args.opts = append(args.opts, WithBazelInvocationFilter(v.Filter)) } @@ -986,12 +993,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&MetricsClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - bgm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + bgm.withMetrics = query case "dirtiedValues": var ( @@ -999,12 +1004,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&EvaluationStatClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { return err } - bgm.WithNamedDirtiedValues(alias, func(wq *EvaluationStatQuery) { - *wq = *query - }) + bgm.withDirtiedValues = query case "changedValues": var ( @@ -1012,12 +1015,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&EvaluationStatClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { return err } - bgm.WithNamedChangedValues(alias, func(wq *EvaluationStatQuery) { - *wq = *query - }) + bgm.withChangedValues = query case "builtValues": var ( @@ -1025,12 +1026,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&EvaluationStatClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { return err } - bgm.WithNamedBuiltValues(alias, func(wq *EvaluationStatQuery) { - *wq = *query - }) + bgm.withBuiltValues = query case "cleanedValues": var ( @@ -1038,12 +1037,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&EvaluationStatClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { return err } - bgm.WithNamedCleanedValues(alias, func(wq *EvaluationStatQuery) { - *wq = *query - }) + bgm.withCleanedValues = query case "evaluatedValues": var ( @@ -1051,12 +1048,10 @@ func (bgm *BuildGraphMetricsQuery) collectField(ctx context.Context, oneNode boo path = append(path, alias) query = (&EvaluationStatClient{config: bgm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, evaluationstatImplementors)...); err != nil { return err } - bgm.WithNamedEvaluatedValues(alias, func(wq *EvaluationStatQuery) { - *wq = *query - }) + bgm.withEvaluatedValues = query case "actionLookupValueCount": if _, ok := fieldSeen[buildgraphmetrics.FieldActionLookupValueCount]; !ok { selectedFields = append(selectedFields, buildgraphmetrics.FieldActionLookupValueCount) @@ -1171,12 +1166,10 @@ func (cm *CumulativeMetricsQuery) collectField(ctx context.Context, oneNode bool path = append(path, alias) query = (&MetricsClient{config: cm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - cm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + cm.withMetrics = query case "numAnalyses": if _, ok := fieldSeen[cumulativemetrics.FieldNumAnalyses]; !ok { selectedFields = append(selectedFields, cumulativemetrics.FieldNumAnalyses) @@ -1251,12 +1244,10 @@ func (dem *DynamicExecutionMetricsQuery) collectField(ctx context.Context, oneNo path = append(path, alias) query = (&MetricsClient{config: dem.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - dem.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + dem.withMetrics = query case "raceStatistics": var ( @@ -1332,12 +1323,10 @@ func (es *EvaluationStatQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&BuildGraphMetricsClient{config: es.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, buildgraphmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, buildgraphmetricsImplementors)...); err != nil { return err } - es.WithNamedBuildGraphMetrics(alias, func(wq *BuildGraphMetricsQuery) { - *wq = *query - }) + es.withBuildGraphMetrics = query case "skyfunctionName": if _, ok := fieldSeen[evaluationstat.FieldSkyfunctionName]; !ok { selectedFields = append(selectedFields, evaluationstat.FieldSkyfunctionName) @@ -1520,12 +1509,10 @@ func (ei *ExectionInfoQuery) collectField(ctx context.Context, oneNode bool, opC path = append(path, alias) query = (&TestResultBESClient{config: ei.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, testresultbesImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, testresultbesImplementors)...); err != nil { return err } - ei.WithNamedTestResult(alias, func(wq *TestResultBESQuery) { - *wq = *query - }) + ei.withTestResult = query case "timingBreakdown": var ( @@ -1644,12 +1631,10 @@ func (fm *FilesMetricQuery) collectField(ctx context.Context, oneNode bool, opCt path = append(path, alias) query = (&ArtifactMetricsClient{config: fm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, artifactmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, artifactmetricsImplementors)...); err != nil { return err } - fm.WithNamedArtifactMetrics(alias, func(wq *ArtifactMetricsQuery) { - *wq = *query - }) + fm.withArtifactMetrics = query case "sizeInBytes": if _, ok := fieldSeen[filesmetric.FieldSizeInBytes]; !ok { selectedFields = append(selectedFields, filesmetric.FieldSizeInBytes) @@ -1729,12 +1714,10 @@ func (gm *GarbageMetricsQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&MemoryMetricsClient{config: gm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, memorymetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, memorymetricsImplementors)...); err != nil { return err } - gm.WithNamedMemoryMetrics(alias, func(wq *MemoryMetricsQuery) { - *wq = *query - }) + gm.withMemoryMetrics = query case "type": if _, ok := fieldSeen[garbagemetrics.FieldType]; !ok { selectedFields = append(selectedFields, garbagemetrics.FieldType) @@ -1814,12 +1797,10 @@ func (mm *MemoryMetricsQuery) collectField(ctx context.Context, oneNode bool, op path = append(path, alias) query = (&MetricsClient{config: mm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - mm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + mm.withMetrics = query case "garbageMetrics": var ( @@ -1923,12 +1904,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&ActionSummaryClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { return err } - m.WithNamedActionSummary(alias, func(wq *ActionSummaryQuery) { - *wq = *query - }) + m.withActionSummary = query case "memoryMetrics": var ( @@ -1936,12 +1915,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&MemoryMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, memorymetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, memorymetricsImplementors)...); err != nil { return err } - m.WithNamedMemoryMetrics(alias, func(wq *MemoryMetricsQuery) { - *wq = *query - }) + m.withMemoryMetrics = query case "targetMetrics": var ( @@ -1949,12 +1926,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&TargetMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, targetmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, targetmetricsImplementors)...); err != nil { return err } - m.WithNamedTargetMetrics(alias, func(wq *TargetMetricsQuery) { - *wq = *query - }) + m.withTargetMetrics = query case "packageMetrics": var ( @@ -1962,12 +1937,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&PackageMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, packagemetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, packagemetricsImplementors)...); err != nil { return err } - m.WithNamedPackageMetrics(alias, func(wq *PackageMetricsQuery) { - *wq = *query - }) + m.withPackageMetrics = query case "timingMetrics": var ( @@ -1975,12 +1948,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&TimingMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, timingmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, timingmetricsImplementors)...); err != nil { return err } - m.WithNamedTimingMetrics(alias, func(wq *TimingMetricsQuery) { - *wq = *query - }) + m.withTimingMetrics = query case "cumulativeMetrics": var ( @@ -1988,12 +1959,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&CumulativeMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, cumulativemetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, cumulativemetricsImplementors)...); err != nil { return err } - m.WithNamedCumulativeMetrics(alias, func(wq *CumulativeMetricsQuery) { - *wq = *query - }) + m.withCumulativeMetrics = query case "artifactMetrics": var ( @@ -2001,12 +1970,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&ArtifactMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, artifactmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, artifactmetricsImplementors)...); err != nil { return err } - m.WithNamedArtifactMetrics(alias, func(wq *ArtifactMetricsQuery) { - *wq = *query - }) + m.withArtifactMetrics = query case "networkMetrics": var ( @@ -2014,12 +1981,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&NetworkMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, networkmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, networkmetricsImplementors)...); err != nil { return err } - m.WithNamedNetworkMetrics(alias, func(wq *NetworkMetricsQuery) { - *wq = *query - }) + m.withNetworkMetrics = query case "dynamicExecutionMetrics": var ( @@ -2027,12 +1992,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&DynamicExecutionMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, dynamicexecutionmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, dynamicexecutionmetricsImplementors)...); err != nil { return err } - m.WithNamedDynamicExecutionMetrics(alias, func(wq *DynamicExecutionMetricsQuery) { - *wq = *query - }) + m.withDynamicExecutionMetrics = query case "buildGraphMetrics": var ( @@ -2040,12 +2003,10 @@ func (m *MetricsQuery) collectField(ctx context.Context, oneNode bool, opCtx *gr path = append(path, alias) query = (&BuildGraphMetricsClient{config: m.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, buildgraphmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, buildgraphmetricsImplementors)...); err != nil { return err } - m.WithNamedBuildGraphMetrics(alias, func(wq *BuildGraphMetricsQuery) { - *wq = *query - }) + m.withBuildGraphMetrics = query } } return nil @@ -2108,12 +2069,10 @@ func (md *MissDetailQuery) collectField(ctx context.Context, oneNode bool, opCtx path = append(path, alias) query = (&ActionCacheStatisticsClient{config: md.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actioncachestatisticsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actioncachestatisticsImplementors)...); err != nil { return err } - md.WithNamedActionCacheStatistics(alias, func(wq *ActionCacheStatisticsQuery) { - *wq = *query - }) + md.withActionCacheStatistics = query case "reason": if _, ok := fieldSeen[missdetail.FieldReason]; !ok { selectedFields = append(selectedFields, missdetail.FieldReason) @@ -2188,12 +2147,10 @@ func (nsof *NamedSetOfFilesQuery) collectField(ctx context.Context, oneNode bool path = append(path, alias) query = (&OutputGroupClient{config: nsof.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, outputgroupImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, outputgroupImplementors)...); err != nil { return err } - nsof.WithNamedOutputGroup(alias, func(wq *OutputGroupQuery) { - *wq = *query - }) + nsof.withOutputGroup = query case "files": var ( @@ -2275,12 +2232,10 @@ func (nm *NetworkMetricsQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&MetricsClient{config: nm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - nm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + nm.withMetrics = query case "systemNetworkStats": var ( @@ -2288,12 +2243,10 @@ func (nm *NetworkMetricsQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&SystemNetworkStatsClient{config: nm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, systemnetworkstatsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, systemnetworkstatsImplementors)...); err != nil { return err } - nm.WithNamedSystemNetworkStats(alias, func(wq *SystemNetworkStatsQuery) { - *wq = *query - }) + nm.withSystemNetworkStats = query } } return nil @@ -2356,12 +2309,10 @@ func (og *OutputGroupQuery) collectField(ctx context.Context, oneNode bool, opCt path = append(path, alias) query = (&TargetCompleteClient{config: og.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, targetcompleteImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, targetcompleteImplementors)...); err != nil { return err } - og.WithNamedTargetComplete(alias, func(wq *TargetCompleteQuery) { - *wq = *query - }) + og.withTargetComplete = query case "inlineFiles": var ( @@ -2465,12 +2416,10 @@ func (plm *PackageLoadMetricsQuery) collectField(ctx context.Context, oneNode bo path = append(path, alias) query = (&PackageMetricsClient{config: plm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, packagemetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, packagemetricsImplementors)...); err != nil { return err } - plm.WithNamedPackageMetrics(alias, func(wq *PackageMetricsQuery) { - *wq = *query - }) + plm.withPackageMetrics = query case "name": if _, ok := fieldSeen[packageloadmetrics.FieldName]; !ok { selectedFields = append(selectedFields, packageloadmetrics.FieldName) @@ -2570,12 +2519,10 @@ func (pm *PackageMetricsQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&MetricsClient{config: pm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - pm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + pm.withMetrics = query case "packageLoadMetrics": var ( @@ -2663,12 +2610,10 @@ func (rs *RaceStatisticsQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&DynamicExecutionMetricsClient{config: rs.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, dynamicexecutionmetricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, dynamicexecutionmetricsImplementors)...); err != nil { return err } - rs.WithNamedDynamicExecutionMetrics(alias, func(wq *DynamicExecutionMetricsQuery) { - *wq = *query - }) + rs.withDynamicExecutionMetrics = query case "mnemonic": if _, ok := fieldSeen[racestatistics.FieldMnemonic]; !ok { selectedFields = append(selectedFields, racestatistics.FieldMnemonic) @@ -2763,12 +2708,10 @@ func (ru *ResourceUsageQuery) collectField(ctx context.Context, oneNode bool, op path = append(path, alias) query = (&ExectionInfoClient{config: ru.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, exectioninfoImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, exectioninfoImplementors)...); err != nil { return err } - ru.WithNamedExecutionInfo(alias, func(wq *ExectionInfoQuery) { - *wq = *query - }) + ru.withExecutionInfo = query case "name": if _, ok := fieldSeen[resourceusage.FieldName]; !ok { selectedFields = append(selectedFields, resourceusage.FieldName) @@ -2848,12 +2791,10 @@ func (rc *RunnerCountQuery) collectField(ctx context.Context, oneNode bool, opCt path = append(path, alias) query = (&ActionSummaryClient{config: rc.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, actionsummaryImplementors)...); err != nil { return err } - rc.WithNamedActionSummary(alias, func(wq *ActionSummaryQuery) { - *wq = *query - }) + rc.withActionSummary = query case "name": if _, ok := fieldSeen[runnercount.FieldName]; !ok { selectedFields = append(selectedFields, runnercount.FieldName) @@ -3051,12 +2992,10 @@ func (tc *TargetCompleteQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&TargetPairClient{config: tc.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, targetpairImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, targetpairImplementors)...); err != nil { return err } - tc.WithNamedTargetPair(alias, func(wq *TargetPairQuery) { - *wq = *query - }) + tc.withTargetPair = query case "importantOutput": var ( @@ -3198,12 +3137,10 @@ func (tc *TargetConfiguredQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&TargetPairClient{config: tc.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, targetpairImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, targetpairImplementors)...); err != nil { return err } - tc.WithNamedTargetPair(alias, func(wq *TargetPairQuery) { - *wq = *query - }) + tc.withTargetPair = query case "tag": if _, ok := fieldSeen[targetconfigured.FieldTag]; !ok { selectedFields = append(selectedFields, targetconfigured.FieldTag) @@ -3293,12 +3230,10 @@ func (tm *TargetMetricsQuery) collectField(ctx context.Context, oneNode bool, op path = append(path, alias) query = (&MetricsClient{config: tm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - tm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + tm.withMetrics = query case "targetsLoaded": if _, ok := fieldSeen[targetmetrics.FieldTargetsLoaded]; !ok { selectedFields = append(selectedFields, targetmetrics.FieldTargetsLoaded) @@ -3383,12 +3318,10 @@ func (tp *TargetPairQuery) collectField(ctx context.Context, oneNode bool, opCtx path = append(path, alias) query = (&BazelInvocationClient{config: tp.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, bazelinvocationImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, bazelinvocationImplementors)...); err != nil { return err } - tp.WithNamedBazelInvocation(alias, func(wq *BazelInvocationQuery) { - *wq = *query - }) + tp.withBazelInvocation = query case "configuration": var ( @@ -3510,12 +3443,10 @@ func (tc *TestCollectionQuery) collectField(ctx context.Context, oneNode bool, o path = append(path, alias) query = (&BazelInvocationClient{config: tc.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, bazelinvocationImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, bazelinvocationImplementors)...); err != nil { return err } - tc.WithNamedBazelInvocation(alias, func(wq *BazelInvocationQuery) { - *wq = *query - }) + tc.withBazelInvocation = query case "testSummary": var ( @@ -3565,6 +3496,11 @@ func (tc *TestCollectionQuery) collectField(ctx context.Context, oneNode bool, o selectedFields = append(selectedFields, testcollection.FieldCachedRemotely) fieldSeen[testcollection.FieldCachedRemotely] = struct{}{} } + case "firstSeen": + if _, ok := fieldSeen[testcollection.FieldFirstSeen]; !ok { + selectedFields = append(selectedFields, testcollection.FieldFirstSeen) + fieldSeen[testcollection.FieldFirstSeen] = struct{}{} + } case "durationMs": if _, ok := fieldSeen[testcollection.FieldDurationMs]; !ok { selectedFields = append(selectedFields, testcollection.FieldDurationMs) @@ -3605,6 +3541,28 @@ func newTestCollectionPaginateArgs(rv map[string]any) *testcollectionPaginateArg if v := rv[beforeField]; v != nil { args.before = v.(*Cursor) } + if v, ok := rv[orderByField]; ok { + switch v := v.(type) { + case map[string]any: + var ( + err1, err2 error + order = &TestCollectionOrder{Field: &TestCollectionOrderField{}, Direction: entgql.OrderDirectionAsc} + ) + if d, ok := v[directionField]; ok { + err1 = order.Direction.UnmarshalGQL(d) + } + if f, ok := v[fieldField]; ok { + err2 = order.Field.UnmarshalGQL(f) + } + if err1 == nil && err2 == nil { + args.opts = append(args.opts, WithTestCollectionOrder(order)) + } + case *TestCollectionOrder: + if v != nil { + args.opts = append(args.opts, WithTestCollectionOrder(v)) + } + } + } if v, ok := rv[whereField].(*TestCollectionWhereInput); ok { args.opts = append(args.opts, WithTestCollectionFilter(v.Filter)) } @@ -3639,12 +3597,10 @@ func (tf *TestFileQuery) collectField(ctx context.Context, oneNode bool, opCtx * path = append(path, alias) query = (&TestResultBESClient{config: tf.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, testresultbesImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, testresultbesImplementors)...); err != nil { return err } - tf.WithNamedTestResult(alias, func(wq *TestResultBESQuery) { - *wq = *query - }) + tf.withTestResult = query case "digest": if _, ok := fieldSeen[testfile.FieldDigest]; !ok { selectedFields = append(selectedFields, testfile.FieldDigest) @@ -3881,12 +3837,10 @@ func (ts *TestSummaryQuery) collectField(ctx context.Context, oneNode bool, opCt path = append(path, alias) query = (&TestCollectionClient{config: ts.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, testcollectionImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, testcollectionImplementors)...); err != nil { return err } - ts.WithNamedTestCollection(alias, func(wq *TestCollectionQuery) { - *wq = *query - }) + ts.withTestCollection = query case "passed": var ( @@ -4032,12 +3986,10 @@ func (tb *TimingBreakdownQuery) collectField(ctx context.Context, oneNode bool, path = append(path, alias) query = (&ExectionInfoClient{config: tb.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, exectioninfoImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, exectioninfoImplementors)...); err != nil { return err } - tb.WithNamedExecutionInfo(alias, func(wq *ExectionInfoQuery) { - *wq = *query - }) + tb.withExecutionInfo = query case "child": var ( @@ -4130,12 +4082,10 @@ func (tc *TimingChildQuery) collectField(ctx context.Context, oneNode bool, opCt path = append(path, alias) query = (&TimingBreakdownClient{config: tc.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, timingbreakdownImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, timingbreakdownImplementors)...); err != nil { return err } - tc.WithNamedTimingBreakdown(alias, func(wq *TimingBreakdownQuery) { - *wq = *query - }) + tc.withTimingBreakdown = query case "name": if _, ok := fieldSeen[timingchild.FieldName]; !ok { selectedFields = append(selectedFields, timingchild.FieldName) @@ -4215,12 +4165,10 @@ func (tm *TimingMetricsQuery) collectField(ctx context.Context, oneNode bool, op path = append(path, alias) query = (&MetricsClient{config: tm.config}).Query() ) - if err := query.collectField(ctx, false, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { + if err := query.collectField(ctx, oneNode, opCtx, field, path, mayAddCondition(satisfies, metricsImplementors)...); err != nil { return err } - tm.WithNamedMetrics(alias, func(wq *MetricsQuery) { - *wq = *query - }) + tm.withMetrics = query case "cpuTimeInMs": if _, ok := fieldSeen[timingmetrics.FieldCPUTimeInMs]; !ok { selectedFields = append(selectedFields, timingmetrics.FieldCPUTimeInMs) diff --git a/ent/gen/ent/gql_edge.go b/ent/gen/ent/gql_edge.go index c9b7e4e..11ae294 100644 --- a/ent/gen/ent/gql_edge.go +++ b/ent/gen/ent/gql_edge.go @@ -8,16 +8,12 @@ import ( "github.com/99designs/gqlgen/graphql" ) -func (acs *ActionCacheStatistics) ActionSummary(ctx context.Context) (result []*ActionSummary, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = acs.NamedActionSummary(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = acs.Edges.ActionSummaryOrErr() - } +func (acs *ActionCacheStatistics) ActionSummary(ctx context.Context) (*ActionSummary, error) { + result, err := acs.Edges.ActionSummaryOrErr() if IsNotLoaded(err) { - result, err = acs.QueryActionSummary().All(ctx) + result, err = acs.QueryActionSummary().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (acs *ActionCacheStatistics) MissDetails(ctx context.Context) (result []*MissDetail, err error) { @@ -32,16 +28,12 @@ func (acs *ActionCacheStatistics) MissDetails(ctx context.Context) (result []*Mi return result, err } -func (ad *ActionData) ActionSummary(ctx context.Context) (result []*ActionSummary, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = ad.NamedActionSummary(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = ad.Edges.ActionSummaryOrErr() - } +func (ad *ActionData) ActionSummary(ctx context.Context) (*ActionSummary, error) { + result, err := ad.Edges.ActionSummaryOrErr() if IsNotLoaded(err) { - result, err = ad.QueryActionSummary().All(ctx) + result, err = ad.QueryActionSummary().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (as *ActionSummary) Metrics(ctx context.Context) (*Metrics, error) { @@ -76,76 +68,52 @@ func (as *ActionSummary) RunnerCount(ctx context.Context) (result []*RunnerCount return result, err } -func (as *ActionSummary) ActionCacheStatistics(ctx context.Context) (result []*ActionCacheStatistics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = as.NamedActionCacheStatistics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = as.Edges.ActionCacheStatisticsOrErr() - } +func (as *ActionSummary) ActionCacheStatistics(ctx context.Context) (*ActionCacheStatistics, error) { + result, err := as.Edges.ActionCacheStatisticsOrErr() if IsNotLoaded(err) { - result, err = as.QueryActionCacheStatistics().All(ctx) + result, err = as.QueryActionCacheStatistics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (am *ArtifactMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = am.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = am.Edges.MetricsOrErr() - } +func (am *ArtifactMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := am.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = am.QueryMetrics().All(ctx) + result, err = am.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (am *ArtifactMetrics) SourceArtifactsRead(ctx context.Context) (result []*FilesMetric, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = am.NamedSourceArtifactsRead(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = am.Edges.SourceArtifactsReadOrErr() - } +func (am *ArtifactMetrics) SourceArtifactsRead(ctx context.Context) (*FilesMetric, error) { + result, err := am.Edges.SourceArtifactsReadOrErr() if IsNotLoaded(err) { - result, err = am.QuerySourceArtifactsRead().All(ctx) + result, err = am.QuerySourceArtifactsRead().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (am *ArtifactMetrics) OutputArtifactsSeen(ctx context.Context) (result []*FilesMetric, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = am.NamedOutputArtifactsSeen(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = am.Edges.OutputArtifactsSeenOrErr() - } +func (am *ArtifactMetrics) OutputArtifactsSeen(ctx context.Context) (*FilesMetric, error) { + result, err := am.Edges.OutputArtifactsSeenOrErr() if IsNotLoaded(err) { - result, err = am.QueryOutputArtifactsSeen().All(ctx) + result, err = am.QueryOutputArtifactsSeen().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (am *ArtifactMetrics) OutputArtifactsFromActionCache(ctx context.Context) (result []*FilesMetric, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = am.NamedOutputArtifactsFromActionCache(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = am.Edges.OutputArtifactsFromActionCacheOrErr() - } +func (am *ArtifactMetrics) OutputArtifactsFromActionCache(ctx context.Context) (*FilesMetric, error) { + result, err := am.Edges.OutputArtifactsFromActionCacheOrErr() if IsNotLoaded(err) { - result, err = am.QueryOutputArtifactsFromActionCache().All(ctx) + result, err = am.QueryOutputArtifactsFromActionCache().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (am *ArtifactMetrics) TopLevelArtifacts(ctx context.Context) (result []*FilesMetric, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = am.NamedTopLevelArtifacts(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = am.Edges.TopLevelArtifactsOrErr() - } +func (am *ArtifactMetrics) TopLevelArtifacts(ctx context.Context) (*FilesMetric, error) { + result, err := am.Edges.TopLevelArtifactsOrErr() if IsNotLoaded(err) { - result, err = am.QueryTopLevelArtifacts().All(ctx) + result, err = am.QueryTopLevelArtifacts().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (bi *BazelInvocation) EventFile(ctx context.Context) (*EventFile, error) { @@ -216,100 +184,68 @@ func (b *Build) Invocations(ctx context.Context) (result []*BazelInvocation, err return result, err } -func (bgm *BuildGraphMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.MetricsOrErr() - } +func (bgm *BuildGraphMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := bgm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryMetrics().All(ctx) + result, err = bgm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (bgm *BuildGraphMetrics) DirtiedValues(ctx context.Context) (result []*EvaluationStat, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedDirtiedValues(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.DirtiedValuesOrErr() - } +func (bgm *BuildGraphMetrics) DirtiedValues(ctx context.Context) (*EvaluationStat, error) { + result, err := bgm.Edges.DirtiedValuesOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryDirtiedValues().All(ctx) + result, err = bgm.QueryDirtiedValues().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (bgm *BuildGraphMetrics) ChangedValues(ctx context.Context) (result []*EvaluationStat, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedChangedValues(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.ChangedValuesOrErr() - } +func (bgm *BuildGraphMetrics) ChangedValues(ctx context.Context) (*EvaluationStat, error) { + result, err := bgm.Edges.ChangedValuesOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryChangedValues().All(ctx) + result, err = bgm.QueryChangedValues().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (bgm *BuildGraphMetrics) BuiltValues(ctx context.Context) (result []*EvaluationStat, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedBuiltValues(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.BuiltValuesOrErr() - } +func (bgm *BuildGraphMetrics) BuiltValues(ctx context.Context) (*EvaluationStat, error) { + result, err := bgm.Edges.BuiltValuesOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryBuiltValues().All(ctx) + result, err = bgm.QueryBuiltValues().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (bgm *BuildGraphMetrics) CleanedValues(ctx context.Context) (result []*EvaluationStat, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedCleanedValues(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.CleanedValuesOrErr() - } +func (bgm *BuildGraphMetrics) CleanedValues(ctx context.Context) (*EvaluationStat, error) { + result, err := bgm.Edges.CleanedValuesOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryCleanedValues().All(ctx) + result, err = bgm.QueryCleanedValues().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (bgm *BuildGraphMetrics) EvaluatedValues(ctx context.Context) (result []*EvaluationStat, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = bgm.NamedEvaluatedValues(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = bgm.Edges.EvaluatedValuesOrErr() - } +func (bgm *BuildGraphMetrics) EvaluatedValues(ctx context.Context) (*EvaluationStat, error) { + result, err := bgm.Edges.EvaluatedValuesOrErr() if IsNotLoaded(err) { - result, err = bgm.QueryEvaluatedValues().All(ctx) + result, err = bgm.QueryEvaluatedValues().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (cm *CumulativeMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = cm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = cm.Edges.MetricsOrErr() - } +func (cm *CumulativeMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := cm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = cm.QueryMetrics().All(ctx) + result, err = cm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (dem *DynamicExecutionMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = dem.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = dem.Edges.MetricsOrErr() - } +func (dem *DynamicExecutionMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := dem.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = dem.QueryMetrics().All(ctx) + result, err = dem.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (dem *DynamicExecutionMetrics) RaceStatistics(ctx context.Context) (result []*RaceStatistics, err error) { @@ -324,16 +260,12 @@ func (dem *DynamicExecutionMetrics) RaceStatistics(ctx context.Context) (result return result, err } -func (es *EvaluationStat) BuildGraphMetrics(ctx context.Context) (result []*BuildGraphMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = es.NamedBuildGraphMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = es.Edges.BuildGraphMetricsOrErr() - } +func (es *EvaluationStat) BuildGraphMetrics(ctx context.Context) (*BuildGraphMetrics, error) { + result, err := es.Edges.BuildGraphMetricsOrErr() if IsNotLoaded(err) { - result, err = es.QueryBuildGraphMetrics().All(ctx) + result, err = es.QueryBuildGraphMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (ef *EventFile) BazelInvocation(ctx context.Context) (*BazelInvocation, error) { @@ -344,16 +276,12 @@ func (ef *EventFile) BazelInvocation(ctx context.Context) (*BazelInvocation, err return result, MaskNotFound(err) } -func (ei *ExectionInfo) TestResult(ctx context.Context) (result []*TestResultBES, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = ei.NamedTestResult(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = ei.Edges.TestResultOrErr() - } +func (ei *ExectionInfo) TestResult(ctx context.Context) (*TestResultBES, error) { + result, err := ei.Edges.TestResultOrErr() if IsNotLoaded(err) { - result, err = ei.QueryTestResult().All(ctx) + result, err = ei.QueryTestResult().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (ei *ExectionInfo) TimingBreakdown(ctx context.Context) (*TimingBreakdown, error) { @@ -376,40 +304,28 @@ func (ei *ExectionInfo) ResourceUsage(ctx context.Context) (result []*ResourceUs return result, err } -func (fm *FilesMetric) ArtifactMetrics(ctx context.Context) (result []*ArtifactMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = fm.NamedArtifactMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = fm.Edges.ArtifactMetricsOrErr() - } +func (fm *FilesMetric) ArtifactMetrics(ctx context.Context) (*ArtifactMetrics, error) { + result, err := fm.Edges.ArtifactMetricsOrErr() if IsNotLoaded(err) { - result, err = fm.QueryArtifactMetrics().All(ctx) + result, err = fm.QueryArtifactMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (gm *GarbageMetrics) MemoryMetrics(ctx context.Context) (result []*MemoryMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = gm.NamedMemoryMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = gm.Edges.MemoryMetricsOrErr() - } +func (gm *GarbageMetrics) MemoryMetrics(ctx context.Context) (*MemoryMetrics, error) { + result, err := gm.Edges.MemoryMetricsOrErr() if IsNotLoaded(err) { - result, err = gm.QueryMemoryMetrics().All(ctx) + result, err = gm.QueryMemoryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (mm *MemoryMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = mm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = mm.Edges.MetricsOrErr() - } +func (mm *MemoryMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := mm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = mm.QueryMetrics().All(ctx) + result, err = mm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (mm *MemoryMetrics) GarbageMetrics(ctx context.Context) (result []*GarbageMetrics, err error) { @@ -432,148 +348,100 @@ func (m *Metrics) BazelInvocation(ctx context.Context) (*BazelInvocation, error) return result, MaskNotFound(err) } -func (m *Metrics) ActionSummary(ctx context.Context) (result []*ActionSummary, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedActionSummary(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.ActionSummaryOrErr() - } +func (m *Metrics) ActionSummary(ctx context.Context) (*ActionSummary, error) { + result, err := m.Edges.ActionSummaryOrErr() if IsNotLoaded(err) { - result, err = m.QueryActionSummary().All(ctx) + result, err = m.QueryActionSummary().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) MemoryMetrics(ctx context.Context) (result []*MemoryMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedMemoryMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.MemoryMetricsOrErr() - } +func (m *Metrics) MemoryMetrics(ctx context.Context) (*MemoryMetrics, error) { + result, err := m.Edges.MemoryMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryMemoryMetrics().All(ctx) + result, err = m.QueryMemoryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) TargetMetrics(ctx context.Context) (result []*TargetMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedTargetMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.TargetMetricsOrErr() - } +func (m *Metrics) TargetMetrics(ctx context.Context) (*TargetMetrics, error) { + result, err := m.Edges.TargetMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryTargetMetrics().All(ctx) + result, err = m.QueryTargetMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) PackageMetrics(ctx context.Context) (result []*PackageMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedPackageMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.PackageMetricsOrErr() - } +func (m *Metrics) PackageMetrics(ctx context.Context) (*PackageMetrics, error) { + result, err := m.Edges.PackageMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryPackageMetrics().All(ctx) + result, err = m.QueryPackageMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) TimingMetrics(ctx context.Context) (result []*TimingMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedTimingMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.TimingMetricsOrErr() - } +func (m *Metrics) TimingMetrics(ctx context.Context) (*TimingMetrics, error) { + result, err := m.Edges.TimingMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryTimingMetrics().All(ctx) + result, err = m.QueryTimingMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) CumulativeMetrics(ctx context.Context) (result []*CumulativeMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedCumulativeMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.CumulativeMetricsOrErr() - } +func (m *Metrics) CumulativeMetrics(ctx context.Context) (*CumulativeMetrics, error) { + result, err := m.Edges.CumulativeMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryCumulativeMetrics().All(ctx) + result, err = m.QueryCumulativeMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) ArtifactMetrics(ctx context.Context) (result []*ArtifactMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedArtifactMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.ArtifactMetricsOrErr() - } +func (m *Metrics) ArtifactMetrics(ctx context.Context) (*ArtifactMetrics, error) { + result, err := m.Edges.ArtifactMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryArtifactMetrics().All(ctx) + result, err = m.QueryArtifactMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) NetworkMetrics(ctx context.Context) (result []*NetworkMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedNetworkMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.NetworkMetricsOrErr() - } +func (m *Metrics) NetworkMetrics(ctx context.Context) (*NetworkMetrics, error) { + result, err := m.Edges.NetworkMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryNetworkMetrics().All(ctx) + result, err = m.QueryNetworkMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) DynamicExecutionMetrics(ctx context.Context) (result []*DynamicExecutionMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedDynamicExecutionMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.DynamicExecutionMetricsOrErr() - } +func (m *Metrics) DynamicExecutionMetrics(ctx context.Context) (*DynamicExecutionMetrics, error) { + result, err := m.Edges.DynamicExecutionMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryDynamicExecutionMetrics().All(ctx) + result, err = m.QueryDynamicExecutionMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (m *Metrics) BuildGraphMetrics(ctx context.Context) (result []*BuildGraphMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = m.NamedBuildGraphMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = m.Edges.BuildGraphMetricsOrErr() - } +func (m *Metrics) BuildGraphMetrics(ctx context.Context) (*BuildGraphMetrics, error) { + result, err := m.Edges.BuildGraphMetricsOrErr() if IsNotLoaded(err) { - result, err = m.QueryBuildGraphMetrics().All(ctx) + result, err = m.QueryBuildGraphMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (md *MissDetail) ActionCacheStatistics(ctx context.Context) (result []*ActionCacheStatistics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = md.NamedActionCacheStatistics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = md.Edges.ActionCacheStatisticsOrErr() - } +func (md *MissDetail) ActionCacheStatistics(ctx context.Context) (*ActionCacheStatistics, error) { + result, err := md.Edges.ActionCacheStatisticsOrErr() if IsNotLoaded(err) { - result, err = md.QueryActionCacheStatistics().All(ctx) + result, err = md.QueryActionCacheStatistics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (nsof *NamedSetOfFiles) OutputGroup(ctx context.Context) (result []*OutputGroup, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = nsof.NamedOutputGroup(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = nsof.Edges.OutputGroupOrErr() - } +func (nsof *NamedSetOfFiles) OutputGroup(ctx context.Context) (*OutputGroup, error) { + result, err := nsof.Edges.OutputGroupOrErr() if IsNotLoaded(err) { - result, err = nsof.QueryOutputGroup().All(ctx) + result, err = nsof.QueryOutputGroup().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (nsof *NamedSetOfFiles) Files(ctx context.Context) (result []*TestFile, err error) { @@ -596,40 +464,28 @@ func (nsof *NamedSetOfFiles) FileSets(ctx context.Context) (*NamedSetOfFiles, er return result, MaskNotFound(err) } -func (nm *NetworkMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = nm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = nm.Edges.MetricsOrErr() - } +func (nm *NetworkMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := nm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = nm.QueryMetrics().All(ctx) + result, err = nm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (nm *NetworkMetrics) SystemNetworkStats(ctx context.Context) (result []*SystemNetworkStats, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = nm.NamedSystemNetworkStats(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = nm.Edges.SystemNetworkStatsOrErr() - } +func (nm *NetworkMetrics) SystemNetworkStats(ctx context.Context) (*SystemNetworkStats, error) { + result, err := nm.Edges.SystemNetworkStatsOrErr() if IsNotLoaded(err) { - result, err = nm.QuerySystemNetworkStats().All(ctx) + result, err = nm.QuerySystemNetworkStats().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (og *OutputGroup) TargetComplete(ctx context.Context) (result []*TargetComplete, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = og.NamedTargetComplete(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = og.Edges.TargetCompleteOrErr() - } +func (og *OutputGroup) TargetComplete(ctx context.Context) (*TargetComplete, error) { + result, err := og.Edges.TargetCompleteOrErr() if IsNotLoaded(err) { - result, err = og.QueryTargetComplete().All(ctx) + result, err = og.QueryTargetComplete().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (og *OutputGroup) InlineFiles(ctx context.Context) (result []*TestFile, err error) { @@ -652,28 +508,20 @@ func (og *OutputGroup) FileSets(ctx context.Context) (*NamedSetOfFiles, error) { return result, MaskNotFound(err) } -func (plm *PackageLoadMetrics) PackageMetrics(ctx context.Context) (result []*PackageMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = plm.NamedPackageMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = plm.Edges.PackageMetricsOrErr() - } +func (plm *PackageLoadMetrics) PackageMetrics(ctx context.Context) (*PackageMetrics, error) { + result, err := plm.Edges.PackageMetricsOrErr() if IsNotLoaded(err) { - result, err = plm.QueryPackageMetrics().All(ctx) + result, err = plm.QueryPackageMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (pm *PackageMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = pm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = pm.Edges.MetricsOrErr() - } +func (pm *PackageMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := pm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = pm.QueryMetrics().All(ctx) + result, err = pm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (pm *PackageMetrics) PackageLoadMetrics(ctx context.Context) (result []*PackageLoadMetrics, err error) { @@ -688,40 +536,28 @@ func (pm *PackageMetrics) PackageLoadMetrics(ctx context.Context) (result []*Pac return result, err } -func (rs *RaceStatistics) DynamicExecutionMetrics(ctx context.Context) (result []*DynamicExecutionMetrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = rs.NamedDynamicExecutionMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = rs.Edges.DynamicExecutionMetricsOrErr() - } +func (rs *RaceStatistics) DynamicExecutionMetrics(ctx context.Context) (*DynamicExecutionMetrics, error) { + result, err := rs.Edges.DynamicExecutionMetricsOrErr() if IsNotLoaded(err) { - result, err = rs.QueryDynamicExecutionMetrics().All(ctx) + result, err = rs.QueryDynamicExecutionMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (ru *ResourceUsage) ExecutionInfo(ctx context.Context) (result []*ExectionInfo, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = ru.NamedExecutionInfo(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = ru.Edges.ExecutionInfoOrErr() - } +func (ru *ResourceUsage) ExecutionInfo(ctx context.Context) (*ExectionInfo, error) { + result, err := ru.Edges.ExecutionInfoOrErr() if IsNotLoaded(err) { - result, err = ru.QueryExecutionInfo().All(ctx) + result, err = ru.QueryExecutionInfo().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (rc *RunnerCount) ActionSummary(ctx context.Context) (result []*ActionSummary, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = rc.NamedActionSummary(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = rc.Edges.ActionSummaryOrErr() - } +func (rc *RunnerCount) ActionSummary(ctx context.Context) (*ActionSummary, error) { + result, err := rc.Edges.ActionSummaryOrErr() if IsNotLoaded(err) { - result, err = rc.QueryActionSummary().All(ctx) + result, err = rc.QueryActionSummary().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (sns *SystemNetworkStats) NetworkMetrics(ctx context.Context) (*NetworkMetrics, error) { @@ -732,16 +568,12 @@ func (sns *SystemNetworkStats) NetworkMetrics(ctx context.Context) (*NetworkMetr return result, MaskNotFound(err) } -func (tc *TargetComplete) TargetPair(ctx context.Context) (result []*TargetPair, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tc.NamedTargetPair(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tc.Edges.TargetPairOrErr() - } +func (tc *TargetComplete) TargetPair(ctx context.Context) (*TargetPair, error) { + result, err := tc.Edges.TargetPairOrErr() if IsNotLoaded(err) { - result, err = tc.QueryTargetPair().All(ctx) + result, err = tc.QueryTargetPair().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (tc *TargetComplete) ImportantOutput(ctx context.Context) (result []*TestFile, err error) { @@ -776,40 +608,28 @@ func (tc *TargetComplete) OutputGroup(ctx context.Context) (*OutputGroup, error) return result, MaskNotFound(err) } -func (tc *TargetConfigured) TargetPair(ctx context.Context) (result []*TargetPair, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tc.NamedTargetPair(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tc.Edges.TargetPairOrErr() - } +func (tc *TargetConfigured) TargetPair(ctx context.Context) (*TargetPair, error) { + result, err := tc.Edges.TargetPairOrErr() if IsNotLoaded(err) { - result, err = tc.QueryTargetPair().All(ctx) + result, err = tc.QueryTargetPair().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (tm *TargetMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tm.Edges.MetricsOrErr() - } +func (tm *TargetMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := tm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = tm.QueryMetrics().All(ctx) + result, err = tm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (tp *TargetPair) BazelInvocation(ctx context.Context) (result []*BazelInvocation, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tp.NamedBazelInvocation(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tp.Edges.BazelInvocationOrErr() - } +func (tp *TargetPair) BazelInvocation(ctx context.Context) (*BazelInvocation, error) { + result, err := tp.Edges.BazelInvocationOrErr() if IsNotLoaded(err) { - result, err = tp.QueryBazelInvocation().All(ctx) + result, err = tp.QueryBazelInvocation().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (tp *TargetPair) Configuration(ctx context.Context) (*TargetConfigured, error) { @@ -828,16 +648,12 @@ func (tp *TargetPair) Completion(ctx context.Context) (*TargetComplete, error) { return result, MaskNotFound(err) } -func (tc *TestCollection) BazelInvocation(ctx context.Context) (result []*BazelInvocation, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tc.NamedBazelInvocation(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tc.Edges.BazelInvocationOrErr() - } +func (tc *TestCollection) BazelInvocation(ctx context.Context) (*BazelInvocation, error) { + result, err := tc.Edges.BazelInvocationOrErr() if IsNotLoaded(err) { - result, err = tc.QueryBazelInvocation().All(ctx) + result, err = tc.QueryBazelInvocation().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (tc *TestCollection) TestSummary(ctx context.Context) (*TestSummary, error) { @@ -860,16 +676,12 @@ func (tc *TestCollection) TestResults(ctx context.Context) (result []*TestResult return result, err } -func (tf *TestFile) TestResult(ctx context.Context) (result []*TestResultBES, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tf.NamedTestResult(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tf.Edges.TestResultOrErr() - } +func (tf *TestFile) TestResult(ctx context.Context) (*TestResultBES, error) { + result, err := tf.Edges.TestResultOrErr() if IsNotLoaded(err) { - result, err = tf.QueryTestResult().All(ctx) + result, err = tf.QueryTestResult().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (trb *TestResultBES) TestCollection(ctx context.Context) (*TestCollection, error) { @@ -900,16 +712,12 @@ func (trb *TestResultBES) ExecutionInfo(ctx context.Context) (*ExectionInfo, err return result, MaskNotFound(err) } -func (ts *TestSummary) TestCollection(ctx context.Context) (result []*TestCollection, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = ts.NamedTestCollection(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = ts.Edges.TestCollectionOrErr() - } +func (ts *TestSummary) TestCollection(ctx context.Context) (*TestCollection, error) { + result, err := ts.Edges.TestCollectionOrErr() if IsNotLoaded(err) { - result, err = ts.QueryTestCollection().All(ctx) + result, err = ts.QueryTestCollection().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (ts *TestSummary) Passed(ctx context.Context) (result []*TestFile, err error) { @@ -936,16 +744,12 @@ func (ts *TestSummary) Failed(ctx context.Context) (result []*TestFile, err erro return result, err } -func (tb *TimingBreakdown) ExecutionInfo(ctx context.Context) (result []*ExectionInfo, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tb.NamedExecutionInfo(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tb.Edges.ExecutionInfoOrErr() - } +func (tb *TimingBreakdown) ExecutionInfo(ctx context.Context) (*ExectionInfo, error) { + result, err := tb.Edges.ExecutionInfoOrErr() if IsNotLoaded(err) { - result, err = tb.QueryExecutionInfo().All(ctx) + result, err = tb.QueryExecutionInfo().Only(ctx) } - return result, err + return result, MaskNotFound(err) } func (tb *TimingBreakdown) Child(ctx context.Context) (result []*TimingChild, err error) { @@ -960,26 +764,18 @@ func (tb *TimingBreakdown) Child(ctx context.Context) (result []*TimingChild, er return result, err } -func (tc *TimingChild) TimingBreakdown(ctx context.Context) (result []*TimingBreakdown, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tc.NamedTimingBreakdown(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tc.Edges.TimingBreakdownOrErr() - } +func (tc *TimingChild) TimingBreakdown(ctx context.Context) (*TimingBreakdown, error) { + result, err := tc.Edges.TimingBreakdownOrErr() if IsNotLoaded(err) { - result, err = tc.QueryTimingBreakdown().All(ctx) + result, err = tc.QueryTimingBreakdown().Only(ctx) } - return result, err + return result, MaskNotFound(err) } -func (tm *TimingMetrics) Metrics(ctx context.Context) (result []*Metrics, err error) { - if fc := graphql.GetFieldContext(ctx); fc != nil && fc.Field.Alias != "" { - result, err = tm.NamedMetrics(graphql.GetFieldContext(ctx).Field.Alias) - } else { - result, err = tm.Edges.MetricsOrErr() - } +func (tm *TimingMetrics) Metrics(ctx context.Context) (*Metrics, error) { + result, err := tm.Edges.MetricsOrErr() if IsNotLoaded(err) { - result, err = tm.QueryMetrics().All(ctx) + result, err = tm.QueryMetrics().Only(ctx) } - return result, err + return result, MaskNotFound(err) } diff --git a/ent/gen/ent/gql_pagination.go b/ent/gen/ent/gql_pagination.go index f26b96d..c0c7d46 100644 --- a/ent/gen/ent/gql_pagination.go +++ b/ent/gen/ent/gql_pagination.go @@ -5,6 +5,9 @@ package ent import ( "context" "errors" + "fmt" + "io" + "strconv" "entgo.io/contrib/entgql" "entgo.io/ent" @@ -1337,6 +1340,71 @@ func (bi *BazelInvocationQuery) Paginate( return conn, nil } +var ( + // BazelInvocationOrderFieldStartedAt orders BazelInvocation by started_at. + BazelInvocationOrderFieldStartedAt = &BazelInvocationOrderField{ + Value: func(bi *BazelInvocation) (ent.Value, error) { + return bi.StartedAt, nil + }, + column: bazelinvocation.FieldStartedAt, + toTerm: bazelinvocation.ByStartedAt, + toCursor: func(bi *BazelInvocation) Cursor { + return Cursor{ + ID: bi.ID, + Value: bi.StartedAt, + } + }, + } + // BazelInvocationOrderFieldUserLdap orders BazelInvocation by user_ldap. + BazelInvocationOrderFieldUserLdap = &BazelInvocationOrderField{ + Value: func(bi *BazelInvocation) (ent.Value, error) { + return bi.UserLdap, nil + }, + column: bazelinvocation.FieldUserLdap, + toTerm: bazelinvocation.ByUserLdap, + toCursor: func(bi *BazelInvocation) Cursor { + return Cursor{ + ID: bi.ID, + Value: bi.UserLdap, + } + }, + } +) + +// String implement fmt.Stringer interface. +func (f BazelInvocationOrderField) String() string { + var str string + switch f.column { + case BazelInvocationOrderFieldStartedAt.column: + str = "STARTED_AT" + case BazelInvocationOrderFieldUserLdap.column: + str = "USER_LDAP" + } + return str +} + +// MarshalGQL implements graphql.Marshaler interface. +func (f BazelInvocationOrderField) MarshalGQL(w io.Writer) { + io.WriteString(w, strconv.Quote(f.String())) +} + +// UnmarshalGQL implements graphql.Unmarshaler interface. +func (f *BazelInvocationOrderField) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("BazelInvocationOrderField %T must be a string", v) + } + switch str { + case "STARTED_AT": + *f = *BazelInvocationOrderFieldStartedAt + case "USER_LDAP": + *f = *BazelInvocationOrderFieldUserLdap + default: + return fmt.Errorf("%s is not a valid BazelInvocationOrderField", str) + } + return nil +} + // BazelInvocationOrderField defines the ordering field of BazelInvocation. type BazelInvocationOrderField struct { // Value extracts the ordering value from the given BazelInvocation. @@ -8309,6 +8377,71 @@ func (tc *TestCollectionQuery) Paginate( return conn, nil } +var ( + // TestCollectionOrderFieldFirstSeen orders TestCollection by first_seen. + TestCollectionOrderFieldFirstSeen = &TestCollectionOrderField{ + Value: func(tc *TestCollection) (ent.Value, error) { + return tc.FirstSeen, nil + }, + column: testcollection.FieldFirstSeen, + toTerm: testcollection.ByFirstSeen, + toCursor: func(tc *TestCollection) Cursor { + return Cursor{ + ID: tc.ID, + Value: tc.FirstSeen, + } + }, + } + // TestCollectionOrderFieldDurationMs orders TestCollection by duration_ms. + TestCollectionOrderFieldDurationMs = &TestCollectionOrderField{ + Value: func(tc *TestCollection) (ent.Value, error) { + return tc.DurationMs, nil + }, + column: testcollection.FieldDurationMs, + toTerm: testcollection.ByDurationMs, + toCursor: func(tc *TestCollection) Cursor { + return Cursor{ + ID: tc.ID, + Value: tc.DurationMs, + } + }, + } +) + +// String implement fmt.Stringer interface. +func (f TestCollectionOrderField) String() string { + var str string + switch f.column { + case TestCollectionOrderFieldFirstSeen.column: + str = "FIRST_SEEN" + case TestCollectionOrderFieldDurationMs.column: + str = "DURATION" + } + return str +} + +// MarshalGQL implements graphql.Marshaler interface. +func (f TestCollectionOrderField) MarshalGQL(w io.Writer) { + io.WriteString(w, strconv.Quote(f.String())) +} + +// UnmarshalGQL implements graphql.Unmarshaler interface. +func (f *TestCollectionOrderField) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("TestCollectionOrderField %T must be a string", v) + } + switch str { + case "FIRST_SEEN": + *f = *TestCollectionOrderFieldFirstSeen + case "DURATION": + *f = *TestCollectionOrderFieldDurationMs + default: + return fmt.Errorf("%s is not a valid TestCollectionOrderField", str) + } + return nil +} + // TestCollectionOrderField defines the ordering field of TestCollection. type TestCollectionOrderField struct { // Value extracts the ordering value from the given TestCollection. diff --git a/ent/gen/ent/gql_where_input.go b/ent/gen/ent/gql_where_input.go index 6dc8282..345bb61 100644 --- a/ent/gen/ent/gql_where_input.go +++ b/ent/gen/ent/gql_where_input.go @@ -11077,6 +11077,18 @@ type TestCollectionWhereInput struct { CachedRemotelyIsNil bool `json:"cachedRemotelyIsNil,omitempty"` CachedRemotelyNotNil bool `json:"cachedRemotelyNotNil,omitempty"` + // "first_seen" field predicates. + FirstSeen *time.Time `json:"firstSeen,omitempty"` + FirstSeenNEQ *time.Time `json:"firstSeenNEQ,omitempty"` + FirstSeenIn []time.Time `json:"firstSeenIn,omitempty"` + FirstSeenNotIn []time.Time `json:"firstSeenNotIn,omitempty"` + FirstSeenGT *time.Time `json:"firstSeenGT,omitempty"` + FirstSeenGTE *time.Time `json:"firstSeenGTE,omitempty"` + FirstSeenLT *time.Time `json:"firstSeenLT,omitempty"` + FirstSeenLTE *time.Time `json:"firstSeenLTE,omitempty"` + FirstSeenIsNil bool `json:"firstSeenIsNil,omitempty"` + FirstSeenNotNil bool `json:"firstSeenNotNil,omitempty"` + // "duration_ms" field predicates. DurationMs *int64 `json:"durationMs,omitempty"` DurationMsNEQ *int64 `json:"durationMsNEQ,omitempty"` @@ -11329,6 +11341,36 @@ func (i *TestCollectionWhereInput) P() (predicate.TestCollection, error) { if i.CachedRemotelyNotNil { predicates = append(predicates, testcollection.CachedRemotelyNotNil()) } + if i.FirstSeen != nil { + predicates = append(predicates, testcollection.FirstSeenEQ(*i.FirstSeen)) + } + if i.FirstSeenNEQ != nil { + predicates = append(predicates, testcollection.FirstSeenNEQ(*i.FirstSeenNEQ)) + } + if len(i.FirstSeenIn) > 0 { + predicates = append(predicates, testcollection.FirstSeenIn(i.FirstSeenIn...)) + } + if len(i.FirstSeenNotIn) > 0 { + predicates = append(predicates, testcollection.FirstSeenNotIn(i.FirstSeenNotIn...)) + } + if i.FirstSeenGT != nil { + predicates = append(predicates, testcollection.FirstSeenGT(*i.FirstSeenGT)) + } + if i.FirstSeenGTE != nil { + predicates = append(predicates, testcollection.FirstSeenGTE(*i.FirstSeenGTE)) + } + if i.FirstSeenLT != nil { + predicates = append(predicates, testcollection.FirstSeenLT(*i.FirstSeenLT)) + } + if i.FirstSeenLTE != nil { + predicates = append(predicates, testcollection.FirstSeenLTE(*i.FirstSeenLTE)) + } + if i.FirstSeenIsNil { + predicates = append(predicates, testcollection.FirstSeenIsNil()) + } + if i.FirstSeenNotNil { + predicates = append(predicates, testcollection.FirstSeenNotNil()) + } if i.DurationMs != nil { predicates = append(predicates, testcollection.DurationMsEQ(*i.DurationMs)) } diff --git a/ent/gen/ent/memorymetrics.go b/ent/gen/ent/memorymetrics.go index 604c0f1..70f7bca 100644 --- a/ent/gen/ent/memorymetrics.go +++ b/ent/gen/ent/memorymetrics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/memorymetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" ) // MemoryMetrics is the model entity for the MemoryMetrics schema. @@ -24,14 +25,15 @@ type MemoryMetrics struct { PeakPostGcTenuredSpaceHeapSize int64 `json:"peak_post_gc_tenured_space_heap_size,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the MemoryMetricsQuery when eager-loading is set. - Edges MemoryMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges MemoryMetricsEdges `json:"edges"` + metrics_memory_metrics *int + selectValues sql.SelectValues } // MemoryMetricsEdges holds the relations/edges for other nodes in the graph. type MemoryMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // GarbageMetrics holds the value of the garbage_metrics edge. GarbageMetrics []*GarbageMetrics `json:"garbage_metrics,omitempty"` // loadedTypes holds the information for reporting if a @@ -40,15 +42,16 @@ type MemoryMetricsEdges struct { // totalCount holds the count of the edges above. totalCount [2]map[string]int - namedMetrics map[string][]*Metrics namedGarbageMetrics map[string][]*GarbageMetrics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e MemoryMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MemoryMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -69,6 +72,8 @@ func (*MemoryMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case memorymetrics.FieldID, memorymetrics.FieldPeakPostGcHeapSize, memorymetrics.FieldUsedHeapSizePostBuild, memorymetrics.FieldPeakPostGcTenuredSpaceHeapSize: values[i] = new(sql.NullInt64) + case memorymetrics.ForeignKeys[0]: // metrics_memory_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -108,6 +113,13 @@ func (mm *MemoryMetrics) assignValues(columns []string, values []any) error { } else if value.Valid { mm.PeakPostGcTenuredSpaceHeapSize = value.Int64 } + case memorymetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_memory_metrics", value) + } else if value.Valid { + mm.metrics_memory_metrics = new(int) + *mm.metrics_memory_metrics = int(value.Int64) + } default: mm.selectValues.Set(columns[i], values[i]) } @@ -166,30 +178,6 @@ func (mm *MemoryMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (mm *MemoryMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if mm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := mm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (mm *MemoryMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if mm.Edges.namedMetrics == nil { - mm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - mm.Edges.namedMetrics[name] = []*Metrics{} - } else { - mm.Edges.namedMetrics[name] = append(mm.Edges.namedMetrics[name], edges...) - } -} - // NamedGarbageMetrics returns the GarbageMetrics named value or an error if the edge was not // loaded in eager-loading with this name. func (mm *MemoryMetrics) NamedGarbageMetrics(name string) ([]*GarbageMetrics, error) { diff --git a/ent/gen/ent/memorymetrics/memorymetrics.go b/ent/gen/ent/memorymetrics/memorymetrics.go index 76b67eb..ce480f1 100644 --- a/ent/gen/ent/memorymetrics/memorymetrics.go +++ b/ent/gen/ent/memorymetrics/memorymetrics.go @@ -24,16 +24,20 @@ const ( EdgeGarbageMetrics = "garbage_metrics" // Table holds the table name of the memorymetrics in the database. Table = "memory_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_memory_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "memory_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" - // GarbageMetricsTable is the table that holds the garbage_metrics relation/edge. The primary key declared below. - GarbageMetricsTable = "memory_metrics_garbage_metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_memory_metrics" + // GarbageMetricsTable is the table that holds the garbage_metrics relation/edge. + GarbageMetricsTable = "garbage_metrics" // GarbageMetricsInverseTable is the table name for the GarbageMetrics entity. // It exists in this package in order to avoid circular dependency with the "garbagemetrics" package. GarbageMetricsInverseTable = "garbage_metrics" + // GarbageMetricsColumn is the table column denoting the garbage_metrics relation/edge. + GarbageMetricsColumn = "memory_metrics_garbage_metrics" ) // Columns holds all SQL columns for memorymetrics fields. @@ -44,14 +48,11 @@ var Columns = []string{ FieldPeakPostGcTenuredSpaceHeapSize, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "memory_metrics_id"} - // GarbageMetricsPrimaryKey and GarbageMetricsColumn2 are the table columns denoting the - // primary key for the garbage_metrics relation (M2M). - GarbageMetricsPrimaryKey = []string{"memory_metrics_id", "garbage_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "memory_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_memory_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -60,6 +61,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -86,17 +92,10 @@ func ByPeakPostGcTenuredSpaceHeapSize(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldPeakPostGcTenuredSpaceHeapSize, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } @@ -117,13 +116,13 @@ func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newGarbageMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(GarbageMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, GarbageMetricsTable, GarbageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, GarbageMetricsTable, GarbageMetricsColumn), ) } diff --git a/ent/gen/ent/memorymetrics/where.go b/ent/gen/ent/memorymetrics/where.go index 2cfaa7b..b1bdaaa 100644 --- a/ent/gen/ent/memorymetrics/where.go +++ b/ent/gen/ent/memorymetrics/where.go @@ -223,7 +223,7 @@ func HasMetrics() predicate.MemoryMetrics { return predicate.MemoryMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -246,7 +246,7 @@ func HasGarbageMetrics() predicate.MemoryMetrics { return predicate.MemoryMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, GarbageMetricsTable, GarbageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, GarbageMetricsTable, GarbageMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/memorymetrics_create.go b/ent/gen/ent/memorymetrics_create.go index 1b7427a..c9cb51a 100644 --- a/ent/gen/ent/memorymetrics_create.go +++ b/ent/gen/ent/memorymetrics_create.go @@ -62,19 +62,23 @@ func (mmc *MemoryMetricsCreate) SetNillablePeakPostGcTenuredSpaceHeapSize(i *int return mmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (mmc *MemoryMetricsCreate) AddMetricIDs(ids ...int) *MemoryMetricsCreate { - mmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (mmc *MemoryMetricsCreate) SetMetricsID(id int) *MemoryMetricsCreate { + mmc.mutation.SetMetricsID(id) return mmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (mmc *MemoryMetricsCreate) AddMetrics(m ...*Metrics) *MemoryMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (mmc *MemoryMetricsCreate) SetNillableMetricsID(id *int) *MemoryMetricsCreate { + if id != nil { + mmc = mmc.SetMetricsID(*id) } - return mmc.AddMetricIDs(ids...) + return mmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (mmc *MemoryMetricsCreate) SetMetrics(m *Metrics) *MemoryMetricsCreate { + return mmc.SetMetricsID(m.ID) } // AddGarbageMetricIDs adds the "garbage_metrics" edge to the GarbageMetrics entity by IDs. @@ -166,10 +170,10 @@ func (mmc *MemoryMetricsCreate) createSpec() (*MemoryMetrics, *sqlgraph.CreateSp } if nodes := mmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, + Columns: []string{memorymetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -178,14 +182,15 @@ func (mmc *MemoryMetricsCreate) createSpec() (*MemoryMetrics, *sqlgraph.CreateSp for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_memory_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := mmc.mutation.GarbageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/memorymetrics_query.go b/ent/gen/ent/memorymetrics_query.go index e2f5b7c..8abb160 100644 --- a/ent/gen/ent/memorymetrics_query.go +++ b/ent/gen/ent/memorymetrics_query.go @@ -26,9 +26,9 @@ type MemoryMetricsQuery struct { predicates []predicate.MemoryMetrics withMetrics *MetricsQuery withGarbageMetrics *GarbageMetricsQuery + withFKs bool modifiers []func(*sql.Selector) loadTotal []func(context.Context, []*MemoryMetrics) error - withNamedMetrics map[string]*MetricsQuery withNamedGarbageMetrics map[string]*GarbageMetricsQuery // intermediate query (i.e. traversal path). sql *sql.Selector @@ -80,7 +80,7 @@ func (mmq *MemoryMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(memorymetrics.Table, memorymetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, memorymetrics.MetricsTable, memorymetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, memorymetrics.MetricsTable, memorymetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(mmq.driver.Dialect(), step) return fromU, nil @@ -102,7 +102,7 @@ func (mmq *MemoryMetricsQuery) QueryGarbageMetrics() *GarbageMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(memorymetrics.Table, memorymetrics.FieldID, selector), sqlgraph.To(garbagemetrics.Table, garbagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, memorymetrics.GarbageMetricsTable, memorymetrics.GarbageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, memorymetrics.GarbageMetricsTable, memorymetrics.GarbageMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mmq.driver.Dialect(), step) return fromU, nil @@ -409,12 +409,19 @@ func (mmq *MemoryMetricsQuery) prepareQuery(ctx context.Context) error { func (mmq *MemoryMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*MemoryMetrics, error) { var ( nodes = []*MemoryMetrics{} + withFKs = mmq.withFKs _spec = mmq.querySpec() loadedTypes = [2]bool{ mmq.withMetrics != nil, mmq.withGarbageMetrics != nil, } ) + if mmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, memorymetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*MemoryMetrics).scanValues(nil, columns) } @@ -437,9 +444,8 @@ func (mmq *MemoryMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nodes, nil } if query := mmq.withMetrics; query != nil { - if err := mmq.loadMetrics(ctx, query, nodes, - func(n *MemoryMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *MemoryMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := mmq.loadMetrics(ctx, query, nodes, nil, + func(n *MemoryMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -450,13 +456,6 @@ func (mmq *MemoryMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nil, err } } - for name, query := range mmq.withNamedMetrics { - if err := mmq.loadMetrics(ctx, query, nodes, - func(n *MemoryMetrics) { n.appendNamedMetrics(name) }, - func(n *MemoryMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } for name, query := range mmq.withNamedGarbageMetrics { if err := mmq.loadGarbageMetrics(ctx, query, nodes, func(n *MemoryMetrics) { n.appendNamedGarbageMetrics(name) }, @@ -473,124 +472,65 @@ func (mmq *MemoryMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( } func (mmq *MemoryMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*MemoryMetrics, init func(*MemoryMetrics), assign func(*MemoryMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*MemoryMetrics) - nids := make(map[int]map[*MemoryMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*MemoryMetrics) + for i := range nodes { + if nodes[i].metrics_memory_metrics == nil { + continue + } + fk := *nodes[i].metrics_memory_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(memorymetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(memorymetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(memorymetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(memorymetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*MemoryMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_memory_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (mmq *MemoryMetricsQuery) loadGarbageMetrics(ctx context.Context, query *GarbageMetricsQuery, nodes []*MemoryMetrics, init func(*MemoryMetrics), assign func(*MemoryMetrics, *GarbageMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*MemoryMetrics) - nids := make(map[int]map[*MemoryMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*MemoryMetrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(memorymetrics.GarbageMetricsTable) - s.Join(joinT).On(s.C(garbagemetrics.FieldID), joinT.C(memorymetrics.GarbageMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(memorymetrics.GarbageMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(memorymetrics.GarbageMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*MemoryMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*GarbageMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.GarbageMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(memorymetrics.GarbageMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "garbage_metrics" node returned %v`, n.ID) + fk := n.memory_metrics_garbage_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "memory_metrics_garbage_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "memory_metrics_garbage_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -679,20 +619,6 @@ func (mmq *MemoryMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mmq *MemoryMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *MemoryMetricsQuery { - query := (&MetricsClient{config: mmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mmq.withNamedMetrics == nil { - mmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - mmq.withNamedMetrics[name] = query - return mmq -} - // WithNamedGarbageMetrics tells the query-builder to eager-load the nodes that are connected to the "garbage_metrics" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (mmq *MemoryMetricsQuery) WithNamedGarbageMetrics(name string, opts ...func(*GarbageMetricsQuery)) *MemoryMetricsQuery { diff --git a/ent/gen/ent/memorymetrics_update.go b/ent/gen/ent/memorymetrics_update.go index e0cb32e..e5421da 100644 --- a/ent/gen/ent/memorymetrics_update.go +++ b/ent/gen/ent/memorymetrics_update.go @@ -110,19 +110,23 @@ func (mmu *MemoryMetricsUpdate) ClearPeakPostGcTenuredSpaceHeapSize() *MemoryMet return mmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (mmu *MemoryMetricsUpdate) AddMetricIDs(ids ...int) *MemoryMetricsUpdate { - mmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (mmu *MemoryMetricsUpdate) SetMetricsID(id int) *MemoryMetricsUpdate { + mmu.mutation.SetMetricsID(id) return mmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (mmu *MemoryMetricsUpdate) AddMetrics(m ...*Metrics) *MemoryMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (mmu *MemoryMetricsUpdate) SetNillableMetricsID(id *int) *MemoryMetricsUpdate { + if id != nil { + mmu = mmu.SetMetricsID(*id) } - return mmu.AddMetricIDs(ids...) + return mmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (mmu *MemoryMetricsUpdate) SetMetrics(m *Metrics) *MemoryMetricsUpdate { + return mmu.SetMetricsID(m.ID) } // AddGarbageMetricIDs adds the "garbage_metrics" edge to the GarbageMetrics entity by IDs. @@ -145,27 +149,12 @@ func (mmu *MemoryMetricsUpdate) Mutation() *MemoryMetricsMutation { return mmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (mmu *MemoryMetricsUpdate) ClearMetrics() *MemoryMetricsUpdate { mmu.mutation.ClearMetrics() return mmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (mmu *MemoryMetricsUpdate) RemoveMetricIDs(ids ...int) *MemoryMetricsUpdate { - mmu.mutation.RemoveMetricIDs(ids...) - return mmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (mmu *MemoryMetricsUpdate) RemoveMetrics(m ...*Metrics) *MemoryMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return mmu.RemoveMetricIDs(ids...) -} - // ClearGarbageMetrics clears all "garbage_metrics" edges to the GarbageMetrics entity. func (mmu *MemoryMetricsUpdate) ClearGarbageMetrics() *MemoryMetricsUpdate { mmu.mutation.ClearGarbageMetrics() @@ -252,39 +241,23 @@ func (mmu *MemoryMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if mmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !mmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, + Columns: []string{memorymetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, + Columns: []string{memorymetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -297,10 +270,10 @@ func (mmu *MemoryMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if mmu.mutation.GarbageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), @@ -310,10 +283,10 @@ func (mmu *MemoryMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := mmu.mutation.RemovedGarbageMetricsIDs(); len(nodes) > 0 && !mmu.mutation.GarbageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), @@ -326,10 +299,10 @@ func (mmu *MemoryMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := mmu.mutation.GarbageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), @@ -441,19 +414,23 @@ func (mmuo *MemoryMetricsUpdateOne) ClearPeakPostGcTenuredSpaceHeapSize() *Memor return mmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (mmuo *MemoryMetricsUpdateOne) AddMetricIDs(ids ...int) *MemoryMetricsUpdateOne { - mmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (mmuo *MemoryMetricsUpdateOne) SetMetricsID(id int) *MemoryMetricsUpdateOne { + mmuo.mutation.SetMetricsID(id) return mmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (mmuo *MemoryMetricsUpdateOne) AddMetrics(m ...*Metrics) *MemoryMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (mmuo *MemoryMetricsUpdateOne) SetNillableMetricsID(id *int) *MemoryMetricsUpdateOne { + if id != nil { + mmuo = mmuo.SetMetricsID(*id) } - return mmuo.AddMetricIDs(ids...) + return mmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (mmuo *MemoryMetricsUpdateOne) SetMetrics(m *Metrics) *MemoryMetricsUpdateOne { + return mmuo.SetMetricsID(m.ID) } // AddGarbageMetricIDs adds the "garbage_metrics" edge to the GarbageMetrics entity by IDs. @@ -476,27 +453,12 @@ func (mmuo *MemoryMetricsUpdateOne) Mutation() *MemoryMetricsMutation { return mmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (mmuo *MemoryMetricsUpdateOne) ClearMetrics() *MemoryMetricsUpdateOne { mmuo.mutation.ClearMetrics() return mmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (mmuo *MemoryMetricsUpdateOne) RemoveMetricIDs(ids ...int) *MemoryMetricsUpdateOne { - mmuo.mutation.RemoveMetricIDs(ids...) - return mmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (mmuo *MemoryMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *MemoryMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return mmuo.RemoveMetricIDs(ids...) -} - // ClearGarbageMetrics clears all "garbage_metrics" edges to the GarbageMetrics entity. func (mmuo *MemoryMetricsUpdateOne) ClearGarbageMetrics() *MemoryMetricsUpdateOne { mmuo.mutation.ClearGarbageMetrics() @@ -613,39 +575,23 @@ func (mmuo *MemoryMetricsUpdateOne) sqlSave(ctx context.Context) (_node *MemoryM } if mmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !mmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, + Columns: []string{memorymetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: memorymetrics.MetricsTable, - Columns: memorymetrics.MetricsPrimaryKey, + Columns: []string{memorymetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -658,10 +604,10 @@ func (mmuo *MemoryMetricsUpdateOne) sqlSave(ctx context.Context) (_node *MemoryM } if mmuo.mutation.GarbageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), @@ -671,10 +617,10 @@ func (mmuo *MemoryMetricsUpdateOne) sqlSave(ctx context.Context) (_node *MemoryM } if nodes := mmuo.mutation.RemovedGarbageMetricsIDs(); len(nodes) > 0 && !mmuo.mutation.GarbageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), @@ -687,10 +633,10 @@ func (mmuo *MemoryMetricsUpdateOne) sqlSave(ctx context.Context) (_node *MemoryM } if nodes := mmuo.mutation.GarbageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: memorymetrics.GarbageMetricsTable, - Columns: memorymetrics.GarbageMetricsPrimaryKey, + Columns: []string{memorymetrics.GarbageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(garbagemetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/metrics.go b/ent/gen/ent/metrics.go index ab2e305..c63d767 100644 --- a/ent/gen/ent/metrics.go +++ b/ent/gen/ent/metrics.go @@ -8,8 +8,18 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/actionsummary" + "github.com/buildbarn/bb-portal/ent/gen/ent/artifactmetrics" "github.com/buildbarn/bb-portal/ent/gen/ent/bazelinvocation" + "github.com/buildbarn/bb-portal/ent/gen/ent/buildgraphmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/cumulativemetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/dynamicexecutionmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/memorymetrics" "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/networkmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/packagemetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/targetmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/timingmetrics" ) // Metrics is the model entity for the Metrics schema. @@ -29,41 +39,30 @@ type MetricsEdges struct { // BazelInvocation holds the value of the bazel_invocation edge. BazelInvocation *BazelInvocation `json:"bazel_invocation,omitempty"` // ActionSummary holds the value of the action_summary edge. - ActionSummary []*ActionSummary `json:"action_summary,omitempty"` + ActionSummary *ActionSummary `json:"action_summary,omitempty"` // MemoryMetrics holds the value of the memory_metrics edge. - MemoryMetrics []*MemoryMetrics `json:"memory_metrics,omitempty"` + MemoryMetrics *MemoryMetrics `json:"memory_metrics,omitempty"` // TargetMetrics holds the value of the target_metrics edge. - TargetMetrics []*TargetMetrics `json:"target_metrics,omitempty"` + TargetMetrics *TargetMetrics `json:"target_metrics,omitempty"` // PackageMetrics holds the value of the package_metrics edge. - PackageMetrics []*PackageMetrics `json:"package_metrics,omitempty"` + PackageMetrics *PackageMetrics `json:"package_metrics,omitempty"` // TimingMetrics holds the value of the timing_metrics edge. - TimingMetrics []*TimingMetrics `json:"timing_metrics,omitempty"` + TimingMetrics *TimingMetrics `json:"timing_metrics,omitempty"` // CumulativeMetrics holds the value of the cumulative_metrics edge. - CumulativeMetrics []*CumulativeMetrics `json:"cumulative_metrics,omitempty"` + CumulativeMetrics *CumulativeMetrics `json:"cumulative_metrics,omitempty"` // ArtifactMetrics holds the value of the artifact_metrics edge. - ArtifactMetrics []*ArtifactMetrics `json:"artifact_metrics,omitempty"` + ArtifactMetrics *ArtifactMetrics `json:"artifact_metrics,omitempty"` // NetworkMetrics holds the value of the network_metrics edge. - NetworkMetrics []*NetworkMetrics `json:"network_metrics,omitempty"` + NetworkMetrics *NetworkMetrics `json:"network_metrics,omitempty"` // DynamicExecutionMetrics holds the value of the dynamic_execution_metrics edge. - DynamicExecutionMetrics []*DynamicExecutionMetrics `json:"dynamic_execution_metrics,omitempty"` + DynamicExecutionMetrics *DynamicExecutionMetrics `json:"dynamic_execution_metrics,omitempty"` // BuildGraphMetrics holds the value of the build_graph_metrics edge. - BuildGraphMetrics []*BuildGraphMetrics `json:"build_graph_metrics,omitempty"` + BuildGraphMetrics *BuildGraphMetrics `json:"build_graph_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [11]bool // totalCount holds the count of the edges above. totalCount [11]map[string]int - - namedActionSummary map[string][]*ActionSummary - namedMemoryMetrics map[string][]*MemoryMetrics - namedTargetMetrics map[string][]*TargetMetrics - namedPackageMetrics map[string][]*PackageMetrics - namedTimingMetrics map[string][]*TimingMetrics - namedCumulativeMetrics map[string][]*CumulativeMetrics - namedArtifactMetrics map[string][]*ArtifactMetrics - namedNetworkMetrics map[string][]*NetworkMetrics - namedDynamicExecutionMetrics map[string][]*DynamicExecutionMetrics - namedBuildGraphMetrics map[string][]*BuildGraphMetrics } // BazelInvocationOrErr returns the BazelInvocation value or an error if the edge @@ -78,91 +77,111 @@ func (e MetricsEdges) BazelInvocationOrErr() (*BazelInvocation, error) { } // ActionSummaryOrErr returns the ActionSummary value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) ActionSummaryOrErr() ([]*ActionSummary, error) { - if e.loadedTypes[1] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) ActionSummaryOrErr() (*ActionSummary, error) { + if e.ActionSummary != nil { return e.ActionSummary, nil + } else if e.loadedTypes[1] { + return nil, &NotFoundError{label: actionsummary.Label} } return nil, &NotLoadedError{edge: "action_summary"} } // MemoryMetricsOrErr returns the MemoryMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) MemoryMetricsOrErr() ([]*MemoryMetrics, error) { - if e.loadedTypes[2] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) MemoryMetricsOrErr() (*MemoryMetrics, error) { + if e.MemoryMetrics != nil { return e.MemoryMetrics, nil + } else if e.loadedTypes[2] { + return nil, &NotFoundError{label: memorymetrics.Label} } return nil, &NotLoadedError{edge: "memory_metrics"} } // TargetMetricsOrErr returns the TargetMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) TargetMetricsOrErr() ([]*TargetMetrics, error) { - if e.loadedTypes[3] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) TargetMetricsOrErr() (*TargetMetrics, error) { + if e.TargetMetrics != nil { return e.TargetMetrics, nil + } else if e.loadedTypes[3] { + return nil, &NotFoundError{label: targetmetrics.Label} } return nil, &NotLoadedError{edge: "target_metrics"} } // PackageMetricsOrErr returns the PackageMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) PackageMetricsOrErr() ([]*PackageMetrics, error) { - if e.loadedTypes[4] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) PackageMetricsOrErr() (*PackageMetrics, error) { + if e.PackageMetrics != nil { return e.PackageMetrics, nil + } else if e.loadedTypes[4] { + return nil, &NotFoundError{label: packagemetrics.Label} } return nil, &NotLoadedError{edge: "package_metrics"} } // TimingMetricsOrErr returns the TimingMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) TimingMetricsOrErr() ([]*TimingMetrics, error) { - if e.loadedTypes[5] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) TimingMetricsOrErr() (*TimingMetrics, error) { + if e.TimingMetrics != nil { return e.TimingMetrics, nil + } else if e.loadedTypes[5] { + return nil, &NotFoundError{label: timingmetrics.Label} } return nil, &NotLoadedError{edge: "timing_metrics"} } // CumulativeMetricsOrErr returns the CumulativeMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) CumulativeMetricsOrErr() ([]*CumulativeMetrics, error) { - if e.loadedTypes[6] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) CumulativeMetricsOrErr() (*CumulativeMetrics, error) { + if e.CumulativeMetrics != nil { return e.CumulativeMetrics, nil + } else if e.loadedTypes[6] { + return nil, &NotFoundError{label: cumulativemetrics.Label} } return nil, &NotLoadedError{edge: "cumulative_metrics"} } // ArtifactMetricsOrErr returns the ArtifactMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) ArtifactMetricsOrErr() ([]*ArtifactMetrics, error) { - if e.loadedTypes[7] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) ArtifactMetricsOrErr() (*ArtifactMetrics, error) { + if e.ArtifactMetrics != nil { return e.ArtifactMetrics, nil + } else if e.loadedTypes[7] { + return nil, &NotFoundError{label: artifactmetrics.Label} } return nil, &NotLoadedError{edge: "artifact_metrics"} } // NetworkMetricsOrErr returns the NetworkMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) NetworkMetricsOrErr() ([]*NetworkMetrics, error) { - if e.loadedTypes[8] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) NetworkMetricsOrErr() (*NetworkMetrics, error) { + if e.NetworkMetrics != nil { return e.NetworkMetrics, nil + } else if e.loadedTypes[8] { + return nil, &NotFoundError{label: networkmetrics.Label} } return nil, &NotLoadedError{edge: "network_metrics"} } // DynamicExecutionMetricsOrErr returns the DynamicExecutionMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) DynamicExecutionMetricsOrErr() ([]*DynamicExecutionMetrics, error) { - if e.loadedTypes[9] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) DynamicExecutionMetricsOrErr() (*DynamicExecutionMetrics, error) { + if e.DynamicExecutionMetrics != nil { return e.DynamicExecutionMetrics, nil + } else if e.loadedTypes[9] { + return nil, &NotFoundError{label: dynamicexecutionmetrics.Label} } return nil, &NotLoadedError{edge: "dynamic_execution_metrics"} } // BuildGraphMetricsOrErr returns the BuildGraphMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e MetricsEdges) BuildGraphMetricsOrErr() ([]*BuildGraphMetrics, error) { - if e.loadedTypes[10] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MetricsEdges) BuildGraphMetricsOrErr() (*BuildGraphMetrics, error) { + if e.BuildGraphMetrics != nil { return e.BuildGraphMetrics, nil + } else if e.loadedTypes[10] { + return nil, &NotFoundError{label: buildgraphmetrics.Label} } return nil, &NotLoadedError{edge: "build_graph_metrics"} } @@ -299,245 +318,5 @@ func (m *Metrics) String() string { return builder.String() } -// NamedActionSummary returns the ActionSummary named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedActionSummary(name string) ([]*ActionSummary, error) { - if m.Edges.namedActionSummary == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedActionSummary[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedActionSummary(name string, edges ...*ActionSummary) { - if m.Edges.namedActionSummary == nil { - m.Edges.namedActionSummary = make(map[string][]*ActionSummary) - } - if len(edges) == 0 { - m.Edges.namedActionSummary[name] = []*ActionSummary{} - } else { - m.Edges.namedActionSummary[name] = append(m.Edges.namedActionSummary[name], edges...) - } -} - -// NamedMemoryMetrics returns the MemoryMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedMemoryMetrics(name string) ([]*MemoryMetrics, error) { - if m.Edges.namedMemoryMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedMemoryMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedMemoryMetrics(name string, edges ...*MemoryMetrics) { - if m.Edges.namedMemoryMetrics == nil { - m.Edges.namedMemoryMetrics = make(map[string][]*MemoryMetrics) - } - if len(edges) == 0 { - m.Edges.namedMemoryMetrics[name] = []*MemoryMetrics{} - } else { - m.Edges.namedMemoryMetrics[name] = append(m.Edges.namedMemoryMetrics[name], edges...) - } -} - -// NamedTargetMetrics returns the TargetMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedTargetMetrics(name string) ([]*TargetMetrics, error) { - if m.Edges.namedTargetMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedTargetMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedTargetMetrics(name string, edges ...*TargetMetrics) { - if m.Edges.namedTargetMetrics == nil { - m.Edges.namedTargetMetrics = make(map[string][]*TargetMetrics) - } - if len(edges) == 0 { - m.Edges.namedTargetMetrics[name] = []*TargetMetrics{} - } else { - m.Edges.namedTargetMetrics[name] = append(m.Edges.namedTargetMetrics[name], edges...) - } -} - -// NamedPackageMetrics returns the PackageMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedPackageMetrics(name string) ([]*PackageMetrics, error) { - if m.Edges.namedPackageMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedPackageMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedPackageMetrics(name string, edges ...*PackageMetrics) { - if m.Edges.namedPackageMetrics == nil { - m.Edges.namedPackageMetrics = make(map[string][]*PackageMetrics) - } - if len(edges) == 0 { - m.Edges.namedPackageMetrics[name] = []*PackageMetrics{} - } else { - m.Edges.namedPackageMetrics[name] = append(m.Edges.namedPackageMetrics[name], edges...) - } -} - -// NamedTimingMetrics returns the TimingMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedTimingMetrics(name string) ([]*TimingMetrics, error) { - if m.Edges.namedTimingMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedTimingMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedTimingMetrics(name string, edges ...*TimingMetrics) { - if m.Edges.namedTimingMetrics == nil { - m.Edges.namedTimingMetrics = make(map[string][]*TimingMetrics) - } - if len(edges) == 0 { - m.Edges.namedTimingMetrics[name] = []*TimingMetrics{} - } else { - m.Edges.namedTimingMetrics[name] = append(m.Edges.namedTimingMetrics[name], edges...) - } -} - -// NamedCumulativeMetrics returns the CumulativeMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedCumulativeMetrics(name string) ([]*CumulativeMetrics, error) { - if m.Edges.namedCumulativeMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedCumulativeMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedCumulativeMetrics(name string, edges ...*CumulativeMetrics) { - if m.Edges.namedCumulativeMetrics == nil { - m.Edges.namedCumulativeMetrics = make(map[string][]*CumulativeMetrics) - } - if len(edges) == 0 { - m.Edges.namedCumulativeMetrics[name] = []*CumulativeMetrics{} - } else { - m.Edges.namedCumulativeMetrics[name] = append(m.Edges.namedCumulativeMetrics[name], edges...) - } -} - -// NamedArtifactMetrics returns the ArtifactMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedArtifactMetrics(name string) ([]*ArtifactMetrics, error) { - if m.Edges.namedArtifactMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedArtifactMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedArtifactMetrics(name string, edges ...*ArtifactMetrics) { - if m.Edges.namedArtifactMetrics == nil { - m.Edges.namedArtifactMetrics = make(map[string][]*ArtifactMetrics) - } - if len(edges) == 0 { - m.Edges.namedArtifactMetrics[name] = []*ArtifactMetrics{} - } else { - m.Edges.namedArtifactMetrics[name] = append(m.Edges.namedArtifactMetrics[name], edges...) - } -} - -// NamedNetworkMetrics returns the NetworkMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedNetworkMetrics(name string) ([]*NetworkMetrics, error) { - if m.Edges.namedNetworkMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedNetworkMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedNetworkMetrics(name string, edges ...*NetworkMetrics) { - if m.Edges.namedNetworkMetrics == nil { - m.Edges.namedNetworkMetrics = make(map[string][]*NetworkMetrics) - } - if len(edges) == 0 { - m.Edges.namedNetworkMetrics[name] = []*NetworkMetrics{} - } else { - m.Edges.namedNetworkMetrics[name] = append(m.Edges.namedNetworkMetrics[name], edges...) - } -} - -// NamedDynamicExecutionMetrics returns the DynamicExecutionMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedDynamicExecutionMetrics(name string) ([]*DynamicExecutionMetrics, error) { - if m.Edges.namedDynamicExecutionMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedDynamicExecutionMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedDynamicExecutionMetrics(name string, edges ...*DynamicExecutionMetrics) { - if m.Edges.namedDynamicExecutionMetrics == nil { - m.Edges.namedDynamicExecutionMetrics = make(map[string][]*DynamicExecutionMetrics) - } - if len(edges) == 0 { - m.Edges.namedDynamicExecutionMetrics[name] = []*DynamicExecutionMetrics{} - } else { - m.Edges.namedDynamicExecutionMetrics[name] = append(m.Edges.namedDynamicExecutionMetrics[name], edges...) - } -} - -// NamedBuildGraphMetrics returns the BuildGraphMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (m *Metrics) NamedBuildGraphMetrics(name string) ([]*BuildGraphMetrics, error) { - if m.Edges.namedBuildGraphMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := m.Edges.namedBuildGraphMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (m *Metrics) appendNamedBuildGraphMetrics(name string, edges ...*BuildGraphMetrics) { - if m.Edges.namedBuildGraphMetrics == nil { - m.Edges.namedBuildGraphMetrics = make(map[string][]*BuildGraphMetrics) - } - if len(edges) == 0 { - m.Edges.namedBuildGraphMetrics[name] = []*BuildGraphMetrics{} - } else { - m.Edges.namedBuildGraphMetrics[name] = append(m.Edges.namedBuildGraphMetrics[name], edges...) - } -} - // MetricsSlice is a parsable slice of Metrics. type MetricsSlice []*Metrics diff --git a/ent/gen/ent/metrics/metrics.go b/ent/gen/ent/metrics/metrics.go index b66a1d0..4f6eb93 100644 --- a/ent/gen/ent/metrics/metrics.go +++ b/ent/gen/ent/metrics/metrics.go @@ -50,51 +50,69 @@ const ( ActionSummaryInverseTable = "action_summaries" // ActionSummaryColumn is the table column denoting the action_summary relation/edge. ActionSummaryColumn = "metrics_action_summary" - // MemoryMetricsTable is the table that holds the memory_metrics relation/edge. The primary key declared below. - MemoryMetricsTable = "metrics_memory_metrics" + // MemoryMetricsTable is the table that holds the memory_metrics relation/edge. + MemoryMetricsTable = "memory_metrics" // MemoryMetricsInverseTable is the table name for the MemoryMetrics entity. // It exists in this package in order to avoid circular dependency with the "memorymetrics" package. MemoryMetricsInverseTable = "memory_metrics" - // TargetMetricsTable is the table that holds the target_metrics relation/edge. The primary key declared below. - TargetMetricsTable = "metrics_target_metrics" + // MemoryMetricsColumn is the table column denoting the memory_metrics relation/edge. + MemoryMetricsColumn = "metrics_memory_metrics" + // TargetMetricsTable is the table that holds the target_metrics relation/edge. + TargetMetricsTable = "target_metrics" // TargetMetricsInverseTable is the table name for the TargetMetrics entity. // It exists in this package in order to avoid circular dependency with the "targetmetrics" package. TargetMetricsInverseTable = "target_metrics" - // PackageMetricsTable is the table that holds the package_metrics relation/edge. The primary key declared below. - PackageMetricsTable = "metrics_package_metrics" + // TargetMetricsColumn is the table column denoting the target_metrics relation/edge. + TargetMetricsColumn = "metrics_target_metrics" + // PackageMetricsTable is the table that holds the package_metrics relation/edge. + PackageMetricsTable = "package_metrics" // PackageMetricsInverseTable is the table name for the PackageMetrics entity. // It exists in this package in order to avoid circular dependency with the "packagemetrics" package. PackageMetricsInverseTable = "package_metrics" - // TimingMetricsTable is the table that holds the timing_metrics relation/edge. The primary key declared below. - TimingMetricsTable = "metrics_timing_metrics" + // PackageMetricsColumn is the table column denoting the package_metrics relation/edge. + PackageMetricsColumn = "metrics_package_metrics" + // TimingMetricsTable is the table that holds the timing_metrics relation/edge. + TimingMetricsTable = "timing_metrics" // TimingMetricsInverseTable is the table name for the TimingMetrics entity. // It exists in this package in order to avoid circular dependency with the "timingmetrics" package. TimingMetricsInverseTable = "timing_metrics" - // CumulativeMetricsTable is the table that holds the cumulative_metrics relation/edge. The primary key declared below. - CumulativeMetricsTable = "metrics_cumulative_metrics" + // TimingMetricsColumn is the table column denoting the timing_metrics relation/edge. + TimingMetricsColumn = "metrics_timing_metrics" + // CumulativeMetricsTable is the table that holds the cumulative_metrics relation/edge. + CumulativeMetricsTable = "cumulative_metrics" // CumulativeMetricsInverseTable is the table name for the CumulativeMetrics entity. // It exists in this package in order to avoid circular dependency with the "cumulativemetrics" package. CumulativeMetricsInverseTable = "cumulative_metrics" - // ArtifactMetricsTable is the table that holds the artifact_metrics relation/edge. The primary key declared below. - ArtifactMetricsTable = "metrics_artifact_metrics" + // CumulativeMetricsColumn is the table column denoting the cumulative_metrics relation/edge. + CumulativeMetricsColumn = "metrics_cumulative_metrics" + // ArtifactMetricsTable is the table that holds the artifact_metrics relation/edge. + ArtifactMetricsTable = "artifact_metrics" // ArtifactMetricsInverseTable is the table name for the ArtifactMetrics entity. // It exists in this package in order to avoid circular dependency with the "artifactmetrics" package. ArtifactMetricsInverseTable = "artifact_metrics" - // NetworkMetricsTable is the table that holds the network_metrics relation/edge. The primary key declared below. - NetworkMetricsTable = "metrics_network_metrics" + // ArtifactMetricsColumn is the table column denoting the artifact_metrics relation/edge. + ArtifactMetricsColumn = "metrics_artifact_metrics" + // NetworkMetricsTable is the table that holds the network_metrics relation/edge. + NetworkMetricsTable = "network_metrics" // NetworkMetricsInverseTable is the table name for the NetworkMetrics entity. // It exists in this package in order to avoid circular dependency with the "networkmetrics" package. NetworkMetricsInverseTable = "network_metrics" - // DynamicExecutionMetricsTable is the table that holds the dynamic_execution_metrics relation/edge. The primary key declared below. - DynamicExecutionMetricsTable = "metrics_dynamic_execution_metrics" + // NetworkMetricsColumn is the table column denoting the network_metrics relation/edge. + NetworkMetricsColumn = "metrics_network_metrics" + // DynamicExecutionMetricsTable is the table that holds the dynamic_execution_metrics relation/edge. + DynamicExecutionMetricsTable = "dynamic_execution_metrics" // DynamicExecutionMetricsInverseTable is the table name for the DynamicExecutionMetrics entity. // It exists in this package in order to avoid circular dependency with the "dynamicexecutionmetrics" package. DynamicExecutionMetricsInverseTable = "dynamic_execution_metrics" - // BuildGraphMetricsTable is the table that holds the build_graph_metrics relation/edge. The primary key declared below. - BuildGraphMetricsTable = "metrics_build_graph_metrics" + // DynamicExecutionMetricsColumn is the table column denoting the dynamic_execution_metrics relation/edge. + DynamicExecutionMetricsColumn = "metrics_dynamic_execution_metrics" + // BuildGraphMetricsTable is the table that holds the build_graph_metrics relation/edge. + BuildGraphMetricsTable = "build_graph_metrics" // BuildGraphMetricsInverseTable is the table name for the BuildGraphMetrics entity. // It exists in this package in order to avoid circular dependency with the "buildgraphmetrics" package. BuildGraphMetricsInverseTable = "build_graph_metrics" + // BuildGraphMetricsColumn is the table column denoting the build_graph_metrics relation/edge. + BuildGraphMetricsColumn = "metrics_build_graph_metrics" ) // Columns holds all SQL columns for metrics fields. @@ -108,36 +126,6 @@ var ForeignKeys = []string{ "bazel_invocation_metrics", } -var ( - // MemoryMetricsPrimaryKey and MemoryMetricsColumn2 are the table columns denoting the - // primary key for the memory_metrics relation (M2M). - MemoryMetricsPrimaryKey = []string{"metrics_id", "memory_metrics_id"} - // TargetMetricsPrimaryKey and TargetMetricsColumn2 are the table columns denoting the - // primary key for the target_metrics relation (M2M). - TargetMetricsPrimaryKey = []string{"metrics_id", "target_metrics_id"} - // PackageMetricsPrimaryKey and PackageMetricsColumn2 are the table columns denoting the - // primary key for the package_metrics relation (M2M). - PackageMetricsPrimaryKey = []string{"metrics_id", "package_metrics_id"} - // TimingMetricsPrimaryKey and TimingMetricsColumn2 are the table columns denoting the - // primary key for the timing_metrics relation (M2M). - TimingMetricsPrimaryKey = []string{"metrics_id", "timing_metrics_id"} - // CumulativeMetricsPrimaryKey and CumulativeMetricsColumn2 are the table columns denoting the - // primary key for the cumulative_metrics relation (M2M). - CumulativeMetricsPrimaryKey = []string{"metrics_id", "cumulative_metrics_id"} - // ArtifactMetricsPrimaryKey and ArtifactMetricsColumn2 are the table columns denoting the - // primary key for the artifact_metrics relation (M2M). - ArtifactMetricsPrimaryKey = []string{"metrics_id", "artifact_metrics_id"} - // NetworkMetricsPrimaryKey and NetworkMetricsColumn2 are the table columns denoting the - // primary key for the network_metrics relation (M2M). - NetworkMetricsPrimaryKey = []string{"metrics_id", "network_metrics_id"} - // DynamicExecutionMetricsPrimaryKey and DynamicExecutionMetricsColumn2 are the table columns denoting the - // primary key for the dynamic_execution_metrics relation (M2M). - DynamicExecutionMetricsPrimaryKey = []string{"metrics_id", "dynamic_execution_metrics_id"} - // BuildGraphMetricsPrimaryKey and BuildGraphMetricsColumn2 are the table columns denoting the - // primary key for the build_graph_metrics relation (M2M). - BuildGraphMetricsPrimaryKey = []string{"metrics_id", "build_graph_metrics_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -168,143 +156,73 @@ func ByBazelInvocationField(field string, opts ...sql.OrderTermOption) OrderOpti } } -// ByActionSummaryCount orders the results by action_summary count. -func ByActionSummaryCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionSummaryStep(), opts...) - } -} - -// ByActionSummary orders the results by action_summary terms. -func ByActionSummary(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByMemoryMetricsCount orders the results by memory_metrics count. -func ByMemoryMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMemoryMetricsStep(), opts...) - } -} - -// ByMemoryMetrics orders the results by memory_metrics terms. -func ByMemoryMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMemoryMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByTargetMetricsCount orders the results by target_metrics count. -func ByTargetMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTargetMetricsStep(), opts...) - } -} - -// ByTargetMetrics orders the results by target_metrics terms. -func ByTargetMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTargetMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByPackageMetricsCount orders the results by package_metrics count. -func ByPackageMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newPackageMetricsStep(), opts...) - } -} - -// ByPackageMetrics orders the results by package_metrics terms. -func ByPackageMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newPackageMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByTimingMetricsCount orders the results by timing_metrics count. -func ByTimingMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTimingMetricsStep(), opts...) - } -} - -// ByTimingMetrics orders the results by timing_metrics terms. -func ByTimingMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTimingMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// ByCumulativeMetricsCount orders the results by cumulative_metrics count. -func ByCumulativeMetricsCount(opts ...sql.OrderTermOption) OrderOption { +// ByActionSummaryField orders the results by action_summary field. +func ByActionSummaryField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newCumulativeMetricsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), sql.OrderByField(field, opts...)) } } -// ByCumulativeMetrics orders the results by cumulative_metrics terms. -func ByCumulativeMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMemoryMetricsField orders the results by memory_metrics field. +func ByMemoryMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newCumulativeMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMemoryMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByArtifactMetricsCount orders the results by artifact_metrics count. -func ByArtifactMetricsCount(opts ...sql.OrderTermOption) OrderOption { +// ByTargetMetricsField orders the results by target_metrics field. +func ByTargetMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newArtifactMetricsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newTargetMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByArtifactMetrics orders the results by artifact_metrics terms. -func ByArtifactMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByPackageMetricsField orders the results by package_metrics field. +func ByPackageMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newArtifactMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newPackageMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByNetworkMetricsCount orders the results by network_metrics count. -func ByNetworkMetricsCount(opts ...sql.OrderTermOption) OrderOption { +// ByTimingMetricsField orders the results by timing_metrics field. +func ByTimingMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newNetworkMetricsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newTimingMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByNetworkMetrics orders the results by network_metrics terms. -func ByNetworkMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByCumulativeMetricsField orders the results by cumulative_metrics field. +func ByCumulativeMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newNetworkMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newCumulativeMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByDynamicExecutionMetricsCount orders the results by dynamic_execution_metrics count. -func ByDynamicExecutionMetricsCount(opts ...sql.OrderTermOption) OrderOption { +// ByArtifactMetricsField orders the results by artifact_metrics field. +func ByArtifactMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newDynamicExecutionMetricsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newArtifactMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByDynamicExecutionMetrics orders the results by dynamic_execution_metrics terms. -func ByDynamicExecutionMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByNetworkMetricsField orders the results by network_metrics field. +func ByNetworkMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newDynamicExecutionMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newNetworkMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByBuildGraphMetricsCount orders the results by build_graph_metrics count. -func ByBuildGraphMetricsCount(opts ...sql.OrderTermOption) OrderOption { +// ByDynamicExecutionMetricsField orders the results by dynamic_execution_metrics field. +func ByDynamicExecutionMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newBuildGraphMetricsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newDynamicExecutionMetricsStep(), sql.OrderByField(field, opts...)) } } -// ByBuildGraphMetrics orders the results by build_graph_metrics terms. -func ByBuildGraphMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByBuildGraphMetricsField orders the results by build_graph_metrics field. +func ByBuildGraphMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newBuildGraphMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newBuildGraphMetricsStep(), sql.OrderByField(field, opts...)) } } func newBazelInvocationStep() *sqlgraph.Step { @@ -318,69 +236,69 @@ func newActionSummaryStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionSummaryInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, ActionSummaryTable, ActionSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ActionSummaryTable, ActionSummaryColumn), ) } func newMemoryMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MemoryMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, MemoryMetricsTable, MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, MemoryMetricsTable, MemoryMetricsColumn), ) } func newTargetMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TargetMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TargetMetricsTable, TargetMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TargetMetricsTable, TargetMetricsColumn), ) } func newPackageMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(PackageMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, PackageMetricsTable, PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, PackageMetricsTable, PackageMetricsColumn), ) } func newTimingMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TimingMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TimingMetricsTable, TimingMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TimingMetricsTable, TimingMetricsColumn), ) } func newCumulativeMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(CumulativeMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, CumulativeMetricsTable, CumulativeMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, CumulativeMetricsTable, CumulativeMetricsColumn), ) } func newArtifactMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ArtifactMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ArtifactMetricsTable, ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, ArtifactMetricsTable, ArtifactMetricsColumn), ) } func newNetworkMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(NetworkMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, NetworkMetricsTable, NetworkMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, NetworkMetricsTable, NetworkMetricsColumn), ) } func newDynamicExecutionMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(DynamicExecutionMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, DynamicExecutionMetricsTable, DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, DynamicExecutionMetricsTable, DynamicExecutionMetricsColumn), ) } func newBuildGraphMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(BuildGraphMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, BuildGraphMetricsTable, BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, BuildGraphMetricsTable, BuildGraphMetricsColumn), ) } diff --git a/ent/gen/ent/metrics/where.go b/ent/gen/ent/metrics/where.go index 5862a05..f134407 100644 --- a/ent/gen/ent/metrics/where.go +++ b/ent/gen/ent/metrics/where.go @@ -81,7 +81,7 @@ func HasActionSummary() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, ActionSummaryTable, ActionSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ActionSummaryTable, ActionSummaryColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -104,7 +104,7 @@ func HasMemoryMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, MemoryMetricsTable, MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, MemoryMetricsTable, MemoryMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -127,7 +127,7 @@ func HasTargetMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TargetMetricsTable, TargetMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TargetMetricsTable, TargetMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -150,7 +150,7 @@ func HasPackageMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, PackageMetricsTable, PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, PackageMetricsTable, PackageMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -173,7 +173,7 @@ func HasTimingMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TimingMetricsTable, TimingMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, TimingMetricsTable, TimingMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -196,7 +196,7 @@ func HasCumulativeMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, CumulativeMetricsTable, CumulativeMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, CumulativeMetricsTable, CumulativeMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -219,7 +219,7 @@ func HasArtifactMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ArtifactMetricsTable, ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, ArtifactMetricsTable, ArtifactMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -242,7 +242,7 @@ func HasNetworkMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, NetworkMetricsTable, NetworkMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, NetworkMetricsTable, NetworkMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -265,7 +265,7 @@ func HasDynamicExecutionMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, DynamicExecutionMetricsTable, DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, DynamicExecutionMetricsTable, DynamicExecutionMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -288,7 +288,7 @@ func HasBuildGraphMetrics() predicate.Metrics { return predicate.Metrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, BuildGraphMetricsTable, BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, BuildGraphMetricsTable, BuildGraphMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/metrics_create.go b/ent/gen/ent/metrics_create.go index e20a820..106e19c 100644 --- a/ent/gen/ent/metrics_create.go +++ b/ent/gen/ent/metrics_create.go @@ -48,154 +48,194 @@ func (mc *MetricsCreate) SetBazelInvocation(b *BazelInvocation) *MetricsCreate { return mc.SetBazelInvocationID(b.ID) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (mc *MetricsCreate) AddActionSummaryIDs(ids ...int) *MetricsCreate { - mc.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (mc *MetricsCreate) SetActionSummaryID(id int) *MetricsCreate { + mc.mutation.SetActionSummaryID(id) return mc } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (mc *MetricsCreate) AddActionSummary(a ...*ActionSummary) *MetricsCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableActionSummaryID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetActionSummaryID(*id) } - return mc.AddActionSummaryIDs(ids...) + return mc +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (mc *MetricsCreate) SetActionSummary(a *ActionSummary) *MetricsCreate { + return mc.SetActionSummaryID(a.ID) } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (mc *MetricsCreate) AddMemoryMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddMemoryMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (mc *MetricsCreate) SetMemoryMetricsID(id int) *MetricsCreate { + mc.mutation.SetMemoryMetricsID(id) return mc } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (mc *MetricsCreate) AddMemoryMetrics(m ...*MemoryMetrics) *MetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableMemoryMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetMemoryMetricsID(*id) } - return mc.AddMemoryMetricIDs(ids...) + return mc } -// AddTargetMetricIDs adds the "target_metrics" edge to the TargetMetrics entity by IDs. -func (mc *MetricsCreate) AddTargetMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddTargetMetricIDs(ids...) +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (mc *MetricsCreate) SetMemoryMetrics(m *MemoryMetrics) *MetricsCreate { + return mc.SetMemoryMetricsID(m.ID) +} + +// SetTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID. +func (mc *MetricsCreate) SetTargetMetricsID(id int) *MetricsCreate { + mc.mutation.SetTargetMetricsID(id) return mc } -// AddTargetMetrics adds the "target_metrics" edges to the TargetMetrics entity. -func (mc *MetricsCreate) AddTargetMetrics(t ...*TargetMetrics) *MetricsCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableTargetMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetTargetMetricsID(*id) } - return mc.AddTargetMetricIDs(ids...) + return mc } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (mc *MetricsCreate) AddPackageMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddPackageMetricIDs(ids...) +// SetTargetMetrics sets the "target_metrics" edge to the TargetMetrics entity. +func (mc *MetricsCreate) SetTargetMetrics(t *TargetMetrics) *MetricsCreate { + return mc.SetTargetMetricsID(t.ID) +} + +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (mc *MetricsCreate) SetPackageMetricsID(id int) *MetricsCreate { + mc.mutation.SetPackageMetricsID(id) return mc } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (mc *MetricsCreate) AddPackageMetrics(p ...*PackageMetrics) *MetricsCreate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillablePackageMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetPackageMetricsID(*id) } - return mc.AddPackageMetricIDs(ids...) + return mc +} + +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (mc *MetricsCreate) SetPackageMetrics(p *PackageMetrics) *MetricsCreate { + return mc.SetPackageMetricsID(p.ID) } -// AddTimingMetricIDs adds the "timing_metrics" edge to the TimingMetrics entity by IDs. -func (mc *MetricsCreate) AddTimingMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddTimingMetricIDs(ids...) +// SetTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID. +func (mc *MetricsCreate) SetTimingMetricsID(id int) *MetricsCreate { + mc.mutation.SetTimingMetricsID(id) return mc } -// AddTimingMetrics adds the "timing_metrics" edges to the TimingMetrics entity. -func (mc *MetricsCreate) AddTimingMetrics(t ...*TimingMetrics) *MetricsCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableTimingMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetTimingMetricsID(*id) } - return mc.AddTimingMetricIDs(ids...) + return mc +} + +// SetTimingMetrics sets the "timing_metrics" edge to the TimingMetrics entity. +func (mc *MetricsCreate) SetTimingMetrics(t *TimingMetrics) *MetricsCreate { + return mc.SetTimingMetricsID(t.ID) } -// AddCumulativeMetricIDs adds the "cumulative_metrics" edge to the CumulativeMetrics entity by IDs. -func (mc *MetricsCreate) AddCumulativeMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddCumulativeMetricIDs(ids...) +// SetCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID. +func (mc *MetricsCreate) SetCumulativeMetricsID(id int) *MetricsCreate { + mc.mutation.SetCumulativeMetricsID(id) return mc } -// AddCumulativeMetrics adds the "cumulative_metrics" edges to the CumulativeMetrics entity. -func (mc *MetricsCreate) AddCumulativeMetrics(c ...*CumulativeMetrics) *MetricsCreate { - ids := make([]int, len(c)) - for i := range c { - ids[i] = c[i].ID +// SetNillableCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableCumulativeMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetCumulativeMetricsID(*id) } - return mc.AddCumulativeMetricIDs(ids...) + return mc } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (mc *MetricsCreate) AddArtifactMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddArtifactMetricIDs(ids...) +// SetCumulativeMetrics sets the "cumulative_metrics" edge to the CumulativeMetrics entity. +func (mc *MetricsCreate) SetCumulativeMetrics(c *CumulativeMetrics) *MetricsCreate { + return mc.SetCumulativeMetricsID(c.ID) +} + +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (mc *MetricsCreate) SetArtifactMetricsID(id int) *MetricsCreate { + mc.mutation.SetArtifactMetricsID(id) return mc } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (mc *MetricsCreate) AddArtifactMetrics(a ...*ArtifactMetrics) *MetricsCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableArtifactMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetArtifactMetricsID(*id) } - return mc.AddArtifactMetricIDs(ids...) + return mc +} + +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (mc *MetricsCreate) SetArtifactMetrics(a *ArtifactMetrics) *MetricsCreate { + return mc.SetArtifactMetricsID(a.ID) } -// AddNetworkMetricIDs adds the "network_metrics" edge to the NetworkMetrics entity by IDs. -func (mc *MetricsCreate) AddNetworkMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddNetworkMetricIDs(ids...) +// SetNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID. +func (mc *MetricsCreate) SetNetworkMetricsID(id int) *MetricsCreate { + mc.mutation.SetNetworkMetricsID(id) return mc } -// AddNetworkMetrics adds the "network_metrics" edges to the NetworkMetrics entity. -func (mc *MetricsCreate) AddNetworkMetrics(n ...*NetworkMetrics) *MetricsCreate { - ids := make([]int, len(n)) - for i := range n { - ids[i] = n[i].ID +// SetNillableNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableNetworkMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetNetworkMetricsID(*id) } - return mc.AddNetworkMetricIDs(ids...) + return mc +} + +// SetNetworkMetrics sets the "network_metrics" edge to the NetworkMetrics entity. +func (mc *MetricsCreate) SetNetworkMetrics(n *NetworkMetrics) *MetricsCreate { + return mc.SetNetworkMetricsID(n.ID) } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (mc *MetricsCreate) AddDynamicExecutionMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (mc *MetricsCreate) SetDynamicExecutionMetricsID(id int) *MetricsCreate { + mc.mutation.SetDynamicExecutionMetricsID(id) return mc } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (mc *MetricsCreate) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *MetricsCreate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableDynamicExecutionMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetDynamicExecutionMetricsID(*id) } - return mc.AddDynamicExecutionMetricIDs(ids...) + return mc +} + +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (mc *MetricsCreate) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *MetricsCreate { + return mc.SetDynamicExecutionMetricsID(d.ID) } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (mc *MetricsCreate) AddBuildGraphMetricIDs(ids ...int) *MetricsCreate { - mc.mutation.AddBuildGraphMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (mc *MetricsCreate) SetBuildGraphMetricsID(id int) *MetricsCreate { + mc.mutation.SetBuildGraphMetricsID(id) return mc } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (mc *MetricsCreate) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *MetricsCreate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (mc *MetricsCreate) SetNillableBuildGraphMetricsID(id *int) *MetricsCreate { + if id != nil { + mc = mc.SetBuildGraphMetricsID(*id) } - return mc.AddBuildGraphMetricIDs(ids...) + return mc +} + +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (mc *MetricsCreate) SetBuildGraphMetrics(b *BuildGraphMetrics) *MetricsCreate { + return mc.SetBuildGraphMetricsID(b.ID) } // Mutation returns the MetricsMutation object of the builder. @@ -277,7 +317,7 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ActionSummaryTable, Columns: []string{metrics.ActionSummaryColumn}, @@ -293,10 +333,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, + Columns: []string{metrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), @@ -309,10 +349,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.TargetMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, + Columns: []string{metrics.TargetMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), @@ -325,10 +365,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, + Columns: []string{metrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -341,10 +381,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.TimingMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, + Columns: []string{metrics.TimingMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), @@ -357,10 +397,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.CumulativeMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, + Columns: []string{metrics.CumulativeMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), @@ -373,10 +413,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, + Columns: []string{metrics.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), @@ -389,10 +429,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, + Columns: []string{metrics.NetworkMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), @@ -405,10 +445,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{metrics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), @@ -421,10 +461,10 @@ func (mc *MetricsCreate) createSpec() (*Metrics, *sqlgraph.CreateSpec) { } if nodes := mc.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, + Columns: []string{metrics.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/metrics_query.go b/ent/gen/ent/metrics_query.go index 66e1065..665863e 100644 --- a/ent/gen/ent/metrics_query.go +++ b/ent/gen/ent/metrics_query.go @@ -29,34 +29,24 @@ import ( // MetricsQuery is the builder for querying Metrics entities. type MetricsQuery struct { config - ctx *QueryContext - order []metrics.OrderOption - inters []Interceptor - predicates []predicate.Metrics - withBazelInvocation *BazelInvocationQuery - withActionSummary *ActionSummaryQuery - withMemoryMetrics *MemoryMetricsQuery - withTargetMetrics *TargetMetricsQuery - withPackageMetrics *PackageMetricsQuery - withTimingMetrics *TimingMetricsQuery - withCumulativeMetrics *CumulativeMetricsQuery - withArtifactMetrics *ArtifactMetricsQuery - withNetworkMetrics *NetworkMetricsQuery - withDynamicExecutionMetrics *DynamicExecutionMetricsQuery - withBuildGraphMetrics *BuildGraphMetricsQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*Metrics) error - withNamedActionSummary map[string]*ActionSummaryQuery - withNamedMemoryMetrics map[string]*MemoryMetricsQuery - withNamedTargetMetrics map[string]*TargetMetricsQuery - withNamedPackageMetrics map[string]*PackageMetricsQuery - withNamedTimingMetrics map[string]*TimingMetricsQuery - withNamedCumulativeMetrics map[string]*CumulativeMetricsQuery - withNamedArtifactMetrics map[string]*ArtifactMetricsQuery - withNamedNetworkMetrics map[string]*NetworkMetricsQuery - withNamedDynamicExecutionMetrics map[string]*DynamicExecutionMetricsQuery - withNamedBuildGraphMetrics map[string]*BuildGraphMetricsQuery + ctx *QueryContext + order []metrics.OrderOption + inters []Interceptor + predicates []predicate.Metrics + withBazelInvocation *BazelInvocationQuery + withActionSummary *ActionSummaryQuery + withMemoryMetrics *MemoryMetricsQuery + withTargetMetrics *TargetMetricsQuery + withPackageMetrics *PackageMetricsQuery + withTimingMetrics *TimingMetricsQuery + withCumulativeMetrics *CumulativeMetricsQuery + withArtifactMetrics *ArtifactMetricsQuery + withNetworkMetrics *NetworkMetricsQuery + withDynamicExecutionMetrics *DynamicExecutionMetricsQuery + withBuildGraphMetrics *BuildGraphMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*Metrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -129,7 +119,7 @@ func (mq *MetricsQuery) QueryActionSummary() *ActionSummaryQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, metrics.ActionSummaryTable, metrics.ActionSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.ActionSummaryTable, metrics.ActionSummaryColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -151,7 +141,7 @@ func (mq *MetricsQuery) QueryMemoryMetrics() *MemoryMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(memorymetrics.Table, memorymetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.MemoryMetricsTable, metrics.MemoryMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.MemoryMetricsTable, metrics.MemoryMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -173,7 +163,7 @@ func (mq *MetricsQuery) QueryTargetMetrics() *TargetMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(targetmetrics.Table, targetmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.TargetMetricsTable, metrics.TargetMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.TargetMetricsTable, metrics.TargetMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -195,7 +185,7 @@ func (mq *MetricsQuery) QueryPackageMetrics() *PackageMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(packagemetrics.Table, packagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.PackageMetricsTable, metrics.PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.PackageMetricsTable, metrics.PackageMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -217,7 +207,7 @@ func (mq *MetricsQuery) QueryTimingMetrics() *TimingMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(timingmetrics.Table, timingmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.TimingMetricsTable, metrics.TimingMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.TimingMetricsTable, metrics.TimingMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -239,7 +229,7 @@ func (mq *MetricsQuery) QueryCumulativeMetrics() *CumulativeMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(cumulativemetrics.Table, cumulativemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.CumulativeMetricsTable, metrics.CumulativeMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.CumulativeMetricsTable, metrics.CumulativeMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -261,7 +251,7 @@ func (mq *MetricsQuery) QueryArtifactMetrics() *ArtifactMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(artifactmetrics.Table, artifactmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.ArtifactMetricsTable, metrics.ArtifactMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.ArtifactMetricsTable, metrics.ArtifactMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -283,7 +273,7 @@ func (mq *MetricsQuery) QueryNetworkMetrics() *NetworkMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(networkmetrics.Table, networkmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.NetworkMetricsTable, metrics.NetworkMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.NetworkMetricsTable, metrics.NetworkMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -305,7 +295,7 @@ func (mq *MetricsQuery) QueryDynamicExecutionMetrics() *DynamicExecutionMetricsQ step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.DynamicExecutionMetricsTable, metrics.DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.DynamicExecutionMetricsTable, metrics.DynamicExecutionMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -327,7 +317,7 @@ func (mq *MetricsQuery) QueryBuildGraphMetrics() *BuildGraphMetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(metrics.Table, metrics.FieldID, selector), sqlgraph.To(buildgraphmetrics.Table, buildgraphmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, metrics.BuildGraphMetricsTable, metrics.BuildGraphMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, false, metrics.BuildGraphMetricsTable, metrics.BuildGraphMetricsColumn), ) fromU = sqlgraph.SetNeighbors(mq.driver.Dialect(), step) return fromU, nil @@ -770,148 +760,62 @@ func (mq *MetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Metr } } if query := mq.withActionSummary; query != nil { - if err := mq.loadActionSummary(ctx, query, nodes, - func(n *Metrics) { n.Edges.ActionSummary = []*ActionSummary{} }, - func(n *Metrics, e *ActionSummary) { n.Edges.ActionSummary = append(n.Edges.ActionSummary, e) }); err != nil { + if err := mq.loadActionSummary(ctx, query, nodes, nil, + func(n *Metrics, e *ActionSummary) { n.Edges.ActionSummary = e }); err != nil { return nil, err } } if query := mq.withMemoryMetrics; query != nil { - if err := mq.loadMemoryMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.MemoryMetrics = []*MemoryMetrics{} }, - func(n *Metrics, e *MemoryMetrics) { n.Edges.MemoryMetrics = append(n.Edges.MemoryMetrics, e) }); err != nil { + if err := mq.loadMemoryMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *MemoryMetrics) { n.Edges.MemoryMetrics = e }); err != nil { return nil, err } } if query := mq.withTargetMetrics; query != nil { - if err := mq.loadTargetMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.TargetMetrics = []*TargetMetrics{} }, - func(n *Metrics, e *TargetMetrics) { n.Edges.TargetMetrics = append(n.Edges.TargetMetrics, e) }); err != nil { + if err := mq.loadTargetMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *TargetMetrics) { n.Edges.TargetMetrics = e }); err != nil { return nil, err } } if query := mq.withPackageMetrics; query != nil { - if err := mq.loadPackageMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.PackageMetrics = []*PackageMetrics{} }, - func(n *Metrics, e *PackageMetrics) { n.Edges.PackageMetrics = append(n.Edges.PackageMetrics, e) }); err != nil { + if err := mq.loadPackageMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *PackageMetrics) { n.Edges.PackageMetrics = e }); err != nil { return nil, err } } if query := mq.withTimingMetrics; query != nil { - if err := mq.loadTimingMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.TimingMetrics = []*TimingMetrics{} }, - func(n *Metrics, e *TimingMetrics) { n.Edges.TimingMetrics = append(n.Edges.TimingMetrics, e) }); err != nil { + if err := mq.loadTimingMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *TimingMetrics) { n.Edges.TimingMetrics = e }); err != nil { return nil, err } } if query := mq.withCumulativeMetrics; query != nil { - if err := mq.loadCumulativeMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.CumulativeMetrics = []*CumulativeMetrics{} }, - func(n *Metrics, e *CumulativeMetrics) { - n.Edges.CumulativeMetrics = append(n.Edges.CumulativeMetrics, e) - }); err != nil { + if err := mq.loadCumulativeMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *CumulativeMetrics) { n.Edges.CumulativeMetrics = e }); err != nil { return nil, err } } if query := mq.withArtifactMetrics; query != nil { - if err := mq.loadArtifactMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.ArtifactMetrics = []*ArtifactMetrics{} }, - func(n *Metrics, e *ArtifactMetrics) { n.Edges.ArtifactMetrics = append(n.Edges.ArtifactMetrics, e) }); err != nil { + if err := mq.loadArtifactMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *ArtifactMetrics) { n.Edges.ArtifactMetrics = e }); err != nil { return nil, err } } if query := mq.withNetworkMetrics; query != nil { - if err := mq.loadNetworkMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.NetworkMetrics = []*NetworkMetrics{} }, - func(n *Metrics, e *NetworkMetrics) { n.Edges.NetworkMetrics = append(n.Edges.NetworkMetrics, e) }); err != nil { + if err := mq.loadNetworkMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *NetworkMetrics) { n.Edges.NetworkMetrics = e }); err != nil { return nil, err } } if query := mq.withDynamicExecutionMetrics; query != nil { - if err := mq.loadDynamicExecutionMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.DynamicExecutionMetrics = []*DynamicExecutionMetrics{} }, - func(n *Metrics, e *DynamicExecutionMetrics) { - n.Edges.DynamicExecutionMetrics = append(n.Edges.DynamicExecutionMetrics, e) - }); err != nil { + if err := mq.loadDynamicExecutionMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *DynamicExecutionMetrics) { n.Edges.DynamicExecutionMetrics = e }); err != nil { return nil, err } } if query := mq.withBuildGraphMetrics; query != nil { - if err := mq.loadBuildGraphMetrics(ctx, query, nodes, - func(n *Metrics) { n.Edges.BuildGraphMetrics = []*BuildGraphMetrics{} }, - func(n *Metrics, e *BuildGraphMetrics) { - n.Edges.BuildGraphMetrics = append(n.Edges.BuildGraphMetrics, e) - }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedActionSummary { - if err := mq.loadActionSummary(ctx, query, nodes, - func(n *Metrics) { n.appendNamedActionSummary(name) }, - func(n *Metrics, e *ActionSummary) { n.appendNamedActionSummary(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedMemoryMetrics { - if err := mq.loadMemoryMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedMemoryMetrics(name) }, - func(n *Metrics, e *MemoryMetrics) { n.appendNamedMemoryMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedTargetMetrics { - if err := mq.loadTargetMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedTargetMetrics(name) }, - func(n *Metrics, e *TargetMetrics) { n.appendNamedTargetMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedPackageMetrics { - if err := mq.loadPackageMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedPackageMetrics(name) }, - func(n *Metrics, e *PackageMetrics) { n.appendNamedPackageMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedTimingMetrics { - if err := mq.loadTimingMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedTimingMetrics(name) }, - func(n *Metrics, e *TimingMetrics) { n.appendNamedTimingMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedCumulativeMetrics { - if err := mq.loadCumulativeMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedCumulativeMetrics(name) }, - func(n *Metrics, e *CumulativeMetrics) { n.appendNamedCumulativeMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedArtifactMetrics { - if err := mq.loadArtifactMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedArtifactMetrics(name) }, - func(n *Metrics, e *ArtifactMetrics) { n.appendNamedArtifactMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedNetworkMetrics { - if err := mq.loadNetworkMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedNetworkMetrics(name) }, - func(n *Metrics, e *NetworkMetrics) { n.appendNamedNetworkMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedDynamicExecutionMetrics { - if err := mq.loadDynamicExecutionMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedDynamicExecutionMetrics(name) }, - func(n *Metrics, e *DynamicExecutionMetrics) { n.appendNamedDynamicExecutionMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range mq.withNamedBuildGraphMetrics { - if err := mq.loadBuildGraphMetrics(ctx, query, nodes, - func(n *Metrics) { n.appendNamedBuildGraphMetrics(name) }, - func(n *Metrics, e *BuildGraphMetrics) { n.appendNamedBuildGraphMetrics(name, e) }); err != nil { + if err := mq.loadBuildGraphMetrics(ctx, query, nodes, nil, + func(n *Metrics, e *BuildGraphMetrics) { n.Edges.BuildGraphMetrics = e }); err != nil { return nil, err } } @@ -961,9 +865,6 @@ func (mq *MetricsQuery) loadActionSummary(ctx context.Context, query *ActionSumm for i := range nodes { fks = append(fks, nodes[i].ID) nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) - } } query.withFKs = true query.Where(predicate.ActionSummary(func(s *sql.Selector) { @@ -987,551 +888,254 @@ func (mq *MetricsQuery) loadActionSummary(ctx context.Context, query *ActionSumm return nil } func (mq *MetricsQuery) loadMemoryMetrics(ctx context.Context, query *MemoryMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *MemoryMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.MemoryMetricsTable) - s.Join(joinT).On(s.C(memorymetrics.FieldID), joinT.C(metrics.MemoryMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.MemoryMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.MemoryMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*MemoryMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.MemoryMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.MemoryMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "memory_metrics" node returned %v`, n.ID) + fk := n.metrics_memory_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_memory_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_memory_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadTargetMetrics(ctx context.Context, query *TargetMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *TargetMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.TargetMetricsTable) - s.Join(joinT).On(s.C(targetmetrics.FieldID), joinT.C(metrics.TargetMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.TargetMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.TargetMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TargetMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TargetMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.TargetMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "target_metrics" node returned %v`, n.ID) + fk := n.metrics_target_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_target_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_target_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadPackageMetrics(ctx context.Context, query *PackageMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *PackageMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.PackageMetricsTable) - s.Join(joinT).On(s.C(packagemetrics.FieldID), joinT.C(metrics.PackageMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.PackageMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.PackageMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*PackageMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.PackageMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.PackageMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "package_metrics" node returned %v`, n.ID) + fk := n.metrics_package_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_package_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_package_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadTimingMetrics(ctx context.Context, query *TimingMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *TimingMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.TimingMetricsTable) - s.Join(joinT).On(s.C(timingmetrics.FieldID), joinT.C(metrics.TimingMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.TimingMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.TimingMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TimingMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TimingMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.TimingMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "timing_metrics" node returned %v`, n.ID) + fk := n.metrics_timing_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_timing_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_timing_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadCumulativeMetrics(ctx context.Context, query *CumulativeMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *CumulativeMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.CumulativeMetricsTable) - s.Join(joinT).On(s.C(cumulativemetrics.FieldID), joinT.C(metrics.CumulativeMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.CumulativeMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.CumulativeMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*CumulativeMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.CumulativeMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.CumulativeMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "cumulative_metrics" node returned %v`, n.ID) + fk := n.metrics_cumulative_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_cumulative_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_cumulative_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadArtifactMetrics(ctx context.Context, query *ArtifactMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *ArtifactMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.ArtifactMetricsTable) - s.Join(joinT).On(s.C(artifactmetrics.FieldID), joinT.C(metrics.ArtifactMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.ArtifactMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.ArtifactMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ArtifactMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.ArtifactMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.ArtifactMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "artifact_metrics" node returned %v`, n.ID) + fk := n.metrics_artifact_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_artifact_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_artifact_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadNetworkMetrics(ctx context.Context, query *NetworkMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *NetworkMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.NetworkMetricsTable) - s.Join(joinT).On(s.C(networkmetrics.FieldID), joinT.C(metrics.NetworkMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.NetworkMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.NetworkMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*NetworkMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.NetworkMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.NetworkMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "network_metrics" node returned %v`, n.ID) + fk := n.metrics_network_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_network_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_network_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadDynamicExecutionMetrics(ctx context.Context, query *DynamicExecutionMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *DynamicExecutionMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.DynamicExecutionMetricsTable) - s.Join(joinT).On(s.C(dynamicexecutionmetrics.FieldID), joinT.C(metrics.DynamicExecutionMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.DynamicExecutionMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.DynamicExecutionMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*DynamicExecutionMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.DynamicExecutionMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.DynamicExecutionMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "dynamic_execution_metrics" node returned %v`, n.ID) + fk := n.metrics_dynamic_execution_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_dynamic_execution_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_dynamic_execution_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (mq *MetricsQuery) loadBuildGraphMetrics(ctx context.Context, query *BuildGraphMetricsQuery, nodes []*Metrics, init func(*Metrics), assign func(*Metrics, *BuildGraphMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*Metrics) - nids := make(map[int]map[*Metrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(metrics.BuildGraphMetricsTable) - s.Join(joinT).On(s.C(buildgraphmetrics.FieldID), joinT.C(metrics.BuildGraphMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(metrics.BuildGraphMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(metrics.BuildGraphMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*Metrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*Metrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*BuildGraphMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.BuildGraphMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(metrics.BuildGraphMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "build_graph_metrics" node returned %v`, n.ID) + fk := n.metrics_build_graph_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "metrics_build_graph_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "metrics_build_graph_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -1620,146 +1224,6 @@ func (mq *MetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedActionSummary tells the query-builder to eager-load the nodes that are connected to the "action_summary" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedActionSummary(name string, opts ...func(*ActionSummaryQuery)) *MetricsQuery { - query := (&ActionSummaryClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedActionSummary == nil { - mq.withNamedActionSummary = make(map[string]*ActionSummaryQuery) - } - mq.withNamedActionSummary[name] = query - return mq -} - -// WithNamedMemoryMetrics tells the query-builder to eager-load the nodes that are connected to the "memory_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedMemoryMetrics(name string, opts ...func(*MemoryMetricsQuery)) *MetricsQuery { - query := (&MemoryMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedMemoryMetrics == nil { - mq.withNamedMemoryMetrics = make(map[string]*MemoryMetricsQuery) - } - mq.withNamedMemoryMetrics[name] = query - return mq -} - -// WithNamedTargetMetrics tells the query-builder to eager-load the nodes that are connected to the "target_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedTargetMetrics(name string, opts ...func(*TargetMetricsQuery)) *MetricsQuery { - query := (&TargetMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedTargetMetrics == nil { - mq.withNamedTargetMetrics = make(map[string]*TargetMetricsQuery) - } - mq.withNamedTargetMetrics[name] = query - return mq -} - -// WithNamedPackageMetrics tells the query-builder to eager-load the nodes that are connected to the "package_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedPackageMetrics(name string, opts ...func(*PackageMetricsQuery)) *MetricsQuery { - query := (&PackageMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedPackageMetrics == nil { - mq.withNamedPackageMetrics = make(map[string]*PackageMetricsQuery) - } - mq.withNamedPackageMetrics[name] = query - return mq -} - -// WithNamedTimingMetrics tells the query-builder to eager-load the nodes that are connected to the "timing_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedTimingMetrics(name string, opts ...func(*TimingMetricsQuery)) *MetricsQuery { - query := (&TimingMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedTimingMetrics == nil { - mq.withNamedTimingMetrics = make(map[string]*TimingMetricsQuery) - } - mq.withNamedTimingMetrics[name] = query - return mq -} - -// WithNamedCumulativeMetrics tells the query-builder to eager-load the nodes that are connected to the "cumulative_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedCumulativeMetrics(name string, opts ...func(*CumulativeMetricsQuery)) *MetricsQuery { - query := (&CumulativeMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedCumulativeMetrics == nil { - mq.withNamedCumulativeMetrics = make(map[string]*CumulativeMetricsQuery) - } - mq.withNamedCumulativeMetrics[name] = query - return mq -} - -// WithNamedArtifactMetrics tells the query-builder to eager-load the nodes that are connected to the "artifact_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedArtifactMetrics(name string, opts ...func(*ArtifactMetricsQuery)) *MetricsQuery { - query := (&ArtifactMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedArtifactMetrics == nil { - mq.withNamedArtifactMetrics = make(map[string]*ArtifactMetricsQuery) - } - mq.withNamedArtifactMetrics[name] = query - return mq -} - -// WithNamedNetworkMetrics tells the query-builder to eager-load the nodes that are connected to the "network_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedNetworkMetrics(name string, opts ...func(*NetworkMetricsQuery)) *MetricsQuery { - query := (&NetworkMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedNetworkMetrics == nil { - mq.withNamedNetworkMetrics = make(map[string]*NetworkMetricsQuery) - } - mq.withNamedNetworkMetrics[name] = query - return mq -} - -// WithNamedDynamicExecutionMetrics tells the query-builder to eager-load the nodes that are connected to the "dynamic_execution_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedDynamicExecutionMetrics(name string, opts ...func(*DynamicExecutionMetricsQuery)) *MetricsQuery { - query := (&DynamicExecutionMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedDynamicExecutionMetrics == nil { - mq.withNamedDynamicExecutionMetrics = make(map[string]*DynamicExecutionMetricsQuery) - } - mq.withNamedDynamicExecutionMetrics[name] = query - return mq -} - -// WithNamedBuildGraphMetrics tells the query-builder to eager-load the nodes that are connected to the "build_graph_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mq *MetricsQuery) WithNamedBuildGraphMetrics(name string, opts ...func(*BuildGraphMetricsQuery)) *MetricsQuery { - query := (&BuildGraphMetricsClient{config: mq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mq.withNamedBuildGraphMetrics == nil { - mq.withNamedBuildGraphMetrics = make(map[string]*BuildGraphMetricsQuery) - } - mq.withNamedBuildGraphMetrics[name] = query - return mq -} - // MetricsGroupBy is the group-by builder for Metrics entities. type MetricsGroupBy struct { selector diff --git a/ent/gen/ent/metrics_update.go b/ent/gen/ent/metrics_update.go index 34fe38a..507ecb0 100644 --- a/ent/gen/ent/metrics_update.go +++ b/ent/gen/ent/metrics_update.go @@ -57,377 +57,267 @@ func (mu *MetricsUpdate) SetBazelInvocation(b *BazelInvocation) *MetricsUpdate { return mu.SetBazelInvocationID(b.ID) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (mu *MetricsUpdate) AddActionSummaryIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (mu *MetricsUpdate) SetActionSummaryID(id int) *MetricsUpdate { + mu.mutation.SetActionSummaryID(id) return mu } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (mu *MetricsUpdate) AddActionSummary(a ...*ActionSummary) *MetricsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableActionSummaryID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetActionSummaryID(*id) } - return mu.AddActionSummaryIDs(ids...) -} - -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (mu *MetricsUpdate) AddMemoryMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddMemoryMetricIDs(ids...) return mu } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (mu *MetricsUpdate) AddMemoryMetrics(m ...*MemoryMetrics) *MetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return mu.AddMemoryMetricIDs(ids...) +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (mu *MetricsUpdate) SetActionSummary(a *ActionSummary) *MetricsUpdate { + return mu.SetActionSummaryID(a.ID) } -// AddTargetMetricIDs adds the "target_metrics" edge to the TargetMetrics entity by IDs. -func (mu *MetricsUpdate) AddTargetMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddTargetMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (mu *MetricsUpdate) SetMemoryMetricsID(id int) *MetricsUpdate { + mu.mutation.SetMemoryMetricsID(id) return mu } -// AddTargetMetrics adds the "target_metrics" edges to the TargetMetrics entity. -func (mu *MetricsUpdate) AddTargetMetrics(t ...*TargetMetrics) *MetricsUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableMemoryMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetMemoryMetricsID(*id) } - return mu.AddTargetMetricIDs(ids...) -} - -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (mu *MetricsUpdate) AddPackageMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddPackageMetricIDs(ids...) return mu } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (mu *MetricsUpdate) AddPackageMetrics(p ...*PackageMetrics) *MetricsUpdate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID - } - return mu.AddPackageMetricIDs(ids...) +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (mu *MetricsUpdate) SetMemoryMetrics(m *MemoryMetrics) *MetricsUpdate { + return mu.SetMemoryMetricsID(m.ID) } -// AddTimingMetricIDs adds the "timing_metrics" edge to the TimingMetrics entity by IDs. -func (mu *MetricsUpdate) AddTimingMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddTimingMetricIDs(ids...) +// SetTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID. +func (mu *MetricsUpdate) SetTargetMetricsID(id int) *MetricsUpdate { + mu.mutation.SetTargetMetricsID(id) return mu } -// AddTimingMetrics adds the "timing_metrics" edges to the TimingMetrics entity. -func (mu *MetricsUpdate) AddTimingMetrics(t ...*TimingMetrics) *MetricsUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableTargetMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetTargetMetricsID(*id) } - return mu.AddTimingMetricIDs(ids...) -} - -// AddCumulativeMetricIDs adds the "cumulative_metrics" edge to the CumulativeMetrics entity by IDs. -func (mu *MetricsUpdate) AddCumulativeMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddCumulativeMetricIDs(ids...) return mu } -// AddCumulativeMetrics adds the "cumulative_metrics" edges to the CumulativeMetrics entity. -func (mu *MetricsUpdate) AddCumulativeMetrics(c ...*CumulativeMetrics) *MetricsUpdate { - ids := make([]int, len(c)) - for i := range c { - ids[i] = c[i].ID - } - return mu.AddCumulativeMetricIDs(ids...) +// SetTargetMetrics sets the "target_metrics" edge to the TargetMetrics entity. +func (mu *MetricsUpdate) SetTargetMetrics(t *TargetMetrics) *MetricsUpdate { + return mu.SetTargetMetricsID(t.ID) } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (mu *MetricsUpdate) AddArtifactMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddArtifactMetricIDs(ids...) +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (mu *MetricsUpdate) SetPackageMetricsID(id int) *MetricsUpdate { + mu.mutation.SetPackageMetricsID(id) return mu } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (mu *MetricsUpdate) AddArtifactMetrics(a ...*ArtifactMetrics) *MetricsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillablePackageMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetPackageMetricsID(*id) } - return mu.AddArtifactMetricIDs(ids...) -} - -// AddNetworkMetricIDs adds the "network_metrics" edge to the NetworkMetrics entity by IDs. -func (mu *MetricsUpdate) AddNetworkMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddNetworkMetricIDs(ids...) return mu } -// AddNetworkMetrics adds the "network_metrics" edges to the NetworkMetrics entity. -func (mu *MetricsUpdate) AddNetworkMetrics(n ...*NetworkMetrics) *MetricsUpdate { - ids := make([]int, len(n)) - for i := range n { - ids[i] = n[i].ID - } - return mu.AddNetworkMetricIDs(ids...) +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (mu *MetricsUpdate) SetPackageMetrics(p *PackageMetrics) *MetricsUpdate { + return mu.SetPackageMetricsID(p.ID) } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (mu *MetricsUpdate) AddDynamicExecutionMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID. +func (mu *MetricsUpdate) SetTimingMetricsID(id int) *MetricsUpdate { + mu.mutation.SetTimingMetricsID(id) return mu } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (mu *MetricsUpdate) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *MetricsUpdate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableTimingMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetTimingMetricsID(*id) } - return mu.AddDynamicExecutionMetricIDs(ids...) -} - -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (mu *MetricsUpdate) AddBuildGraphMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.AddBuildGraphMetricIDs(ids...) return mu } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (mu *MetricsUpdate) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *MetricsUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return mu.AddBuildGraphMetricIDs(ids...) +// SetTimingMetrics sets the "timing_metrics" edge to the TimingMetrics entity. +func (mu *MetricsUpdate) SetTimingMetrics(t *TimingMetrics) *MetricsUpdate { + return mu.SetTimingMetricsID(t.ID) } -// Mutation returns the MetricsMutation object of the builder. -func (mu *MetricsUpdate) Mutation() *MetricsMutation { - return mu.mutation +// SetCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID. +func (mu *MetricsUpdate) SetCumulativeMetricsID(id int) *MetricsUpdate { + mu.mutation.SetCumulativeMetricsID(id) + return mu } -// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. -func (mu *MetricsUpdate) ClearBazelInvocation() *MetricsUpdate { - mu.mutation.ClearBazelInvocation() +// SetNillableCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableCumulativeMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetCumulativeMetricsID(*id) + } return mu } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. -func (mu *MetricsUpdate) ClearActionSummary() *MetricsUpdate { - mu.mutation.ClearActionSummary() - return mu +// SetCumulativeMetrics sets the "cumulative_metrics" edge to the CumulativeMetrics entity. +func (mu *MetricsUpdate) SetCumulativeMetrics(c *CumulativeMetrics) *MetricsUpdate { + return mu.SetCumulativeMetricsID(c.ID) } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (mu *MetricsUpdate) RemoveActionSummaryIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveActionSummaryIDs(ids...) +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (mu *MetricsUpdate) SetArtifactMetricsID(id int) *MetricsUpdate { + mu.mutation.SetArtifactMetricsID(id) return mu } -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (mu *MetricsUpdate) RemoveActionSummary(a ...*ActionSummary) *MetricsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableArtifactMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetArtifactMetricsID(*id) } - return mu.RemoveActionSummaryIDs(ids...) + return mu } -// ClearMemoryMetrics clears all "memory_metrics" edges to the MemoryMetrics entity. -func (mu *MetricsUpdate) ClearMemoryMetrics() *MetricsUpdate { - mu.mutation.ClearMemoryMetrics() - return mu +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (mu *MetricsUpdate) SetArtifactMetrics(a *ArtifactMetrics) *MetricsUpdate { + return mu.SetArtifactMetricsID(a.ID) } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to MemoryMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveMemoryMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveMemoryMetricIDs(ids...) +// SetNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID. +func (mu *MetricsUpdate) SetNetworkMetricsID(id int) *MetricsUpdate { + mu.mutation.SetNetworkMetricsID(id) return mu } -// RemoveMemoryMetrics removes "memory_metrics" edges to MemoryMetrics entities. -func (mu *MetricsUpdate) RemoveMemoryMetrics(m ...*MemoryMetrics) *MetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableNetworkMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetNetworkMetricsID(*id) } - return mu.RemoveMemoryMetricIDs(ids...) + return mu } -// ClearTargetMetrics clears all "target_metrics" edges to the TargetMetrics entity. -func (mu *MetricsUpdate) ClearTargetMetrics() *MetricsUpdate { - mu.mutation.ClearTargetMetrics() - return mu +// SetNetworkMetrics sets the "network_metrics" edge to the NetworkMetrics entity. +func (mu *MetricsUpdate) SetNetworkMetrics(n *NetworkMetrics) *MetricsUpdate { + return mu.SetNetworkMetricsID(n.ID) } -// RemoveTargetMetricIDs removes the "target_metrics" edge to TargetMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveTargetMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveTargetMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (mu *MetricsUpdate) SetDynamicExecutionMetricsID(id int) *MetricsUpdate { + mu.mutation.SetDynamicExecutionMetricsID(id) return mu } -// RemoveTargetMetrics removes "target_metrics" edges to TargetMetrics entities. -func (mu *MetricsUpdate) RemoveTargetMetrics(t ...*TargetMetrics) *MetricsUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableDynamicExecutionMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetDynamicExecutionMetricsID(*id) } - return mu.RemoveTargetMetricIDs(ids...) + return mu } -// ClearPackageMetrics clears all "package_metrics" edges to the PackageMetrics entity. -func (mu *MetricsUpdate) ClearPackageMetrics() *MetricsUpdate { - mu.mutation.ClearPackageMetrics() - return mu +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (mu *MetricsUpdate) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *MetricsUpdate { + return mu.SetDynamicExecutionMetricsID(d.ID) } -// RemovePackageMetricIDs removes the "package_metrics" edge to PackageMetrics entities by IDs. -func (mu *MetricsUpdate) RemovePackageMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemovePackageMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (mu *MetricsUpdate) SetBuildGraphMetricsID(id int) *MetricsUpdate { + mu.mutation.SetBuildGraphMetricsID(id) return mu } -// RemovePackageMetrics removes "package_metrics" edges to PackageMetrics entities. -func (mu *MetricsUpdate) RemovePackageMetrics(p ...*PackageMetrics) *MetricsUpdate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (mu *MetricsUpdate) SetNillableBuildGraphMetricsID(id *int) *MetricsUpdate { + if id != nil { + mu = mu.SetBuildGraphMetricsID(*id) } - return mu.RemovePackageMetricIDs(ids...) -} - -// ClearTimingMetrics clears all "timing_metrics" edges to the TimingMetrics entity. -func (mu *MetricsUpdate) ClearTimingMetrics() *MetricsUpdate { - mu.mutation.ClearTimingMetrics() return mu } -// RemoveTimingMetricIDs removes the "timing_metrics" edge to TimingMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveTimingMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveTimingMetricIDs(ids...) - return mu +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (mu *MetricsUpdate) SetBuildGraphMetrics(b *BuildGraphMetrics) *MetricsUpdate { + return mu.SetBuildGraphMetricsID(b.ID) } -// RemoveTimingMetrics removes "timing_metrics" edges to TimingMetrics entities. -func (mu *MetricsUpdate) RemoveTimingMetrics(t ...*TimingMetrics) *MetricsUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return mu.RemoveTimingMetricIDs(ids...) +// Mutation returns the MetricsMutation object of the builder. +func (mu *MetricsUpdate) Mutation() *MetricsMutation { + return mu.mutation } -// ClearCumulativeMetrics clears all "cumulative_metrics" edges to the CumulativeMetrics entity. -func (mu *MetricsUpdate) ClearCumulativeMetrics() *MetricsUpdate { - mu.mutation.ClearCumulativeMetrics() +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. +func (mu *MetricsUpdate) ClearBazelInvocation() *MetricsUpdate { + mu.mutation.ClearBazelInvocation() return mu } -// RemoveCumulativeMetricIDs removes the "cumulative_metrics" edge to CumulativeMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveCumulativeMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveCumulativeMetricIDs(ids...) +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. +func (mu *MetricsUpdate) ClearActionSummary() *MetricsUpdate { + mu.mutation.ClearActionSummary() return mu } -// RemoveCumulativeMetrics removes "cumulative_metrics" edges to CumulativeMetrics entities. -func (mu *MetricsUpdate) RemoveCumulativeMetrics(c ...*CumulativeMetrics) *MetricsUpdate { - ids := make([]int, len(c)) - for i := range c { - ids[i] = c[i].ID - } - return mu.RemoveCumulativeMetricIDs(ids...) +// ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. +func (mu *MetricsUpdate) ClearMemoryMetrics() *MetricsUpdate { + mu.mutation.ClearMemoryMetrics() + return mu } -// ClearArtifactMetrics clears all "artifact_metrics" edges to the ArtifactMetrics entity. -func (mu *MetricsUpdate) ClearArtifactMetrics() *MetricsUpdate { - mu.mutation.ClearArtifactMetrics() +// ClearTargetMetrics clears the "target_metrics" edge to the TargetMetrics entity. +func (mu *MetricsUpdate) ClearTargetMetrics() *MetricsUpdate { + mu.mutation.ClearTargetMetrics() return mu } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to ArtifactMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveArtifactMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveArtifactMetricIDs(ids...) +// ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. +func (mu *MetricsUpdate) ClearPackageMetrics() *MetricsUpdate { + mu.mutation.ClearPackageMetrics() return mu } -// RemoveArtifactMetrics removes "artifact_metrics" edges to ArtifactMetrics entities. -func (mu *MetricsUpdate) RemoveArtifactMetrics(a ...*ArtifactMetrics) *MetricsUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return mu.RemoveArtifactMetricIDs(ids...) +// ClearTimingMetrics clears the "timing_metrics" edge to the TimingMetrics entity. +func (mu *MetricsUpdate) ClearTimingMetrics() *MetricsUpdate { + mu.mutation.ClearTimingMetrics() + return mu } -// ClearNetworkMetrics clears all "network_metrics" edges to the NetworkMetrics entity. -func (mu *MetricsUpdate) ClearNetworkMetrics() *MetricsUpdate { - mu.mutation.ClearNetworkMetrics() +// ClearCumulativeMetrics clears the "cumulative_metrics" edge to the CumulativeMetrics entity. +func (mu *MetricsUpdate) ClearCumulativeMetrics() *MetricsUpdate { + mu.mutation.ClearCumulativeMetrics() return mu } -// RemoveNetworkMetricIDs removes the "network_metrics" edge to NetworkMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveNetworkMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveNetworkMetricIDs(ids...) +// ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. +func (mu *MetricsUpdate) ClearArtifactMetrics() *MetricsUpdate { + mu.mutation.ClearArtifactMetrics() return mu } -// RemoveNetworkMetrics removes "network_metrics" edges to NetworkMetrics entities. -func (mu *MetricsUpdate) RemoveNetworkMetrics(n ...*NetworkMetrics) *MetricsUpdate { - ids := make([]int, len(n)) - for i := range n { - ids[i] = n[i].ID - } - return mu.RemoveNetworkMetricIDs(ids...) +// ClearNetworkMetrics clears the "network_metrics" edge to the NetworkMetrics entity. +func (mu *MetricsUpdate) ClearNetworkMetrics() *MetricsUpdate { + mu.mutation.ClearNetworkMetrics() + return mu } -// ClearDynamicExecutionMetrics clears all "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. +// ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. func (mu *MetricsUpdate) ClearDynamicExecutionMetrics() *MetricsUpdate { mu.mutation.ClearDynamicExecutionMetrics() return mu } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to DynamicExecutionMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveDynamicExecutionMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveDynamicExecutionMetricIDs(ids...) - return mu -} - -// RemoveDynamicExecutionMetrics removes "dynamic_execution_metrics" edges to DynamicExecutionMetrics entities. -func (mu *MetricsUpdate) RemoveDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *MetricsUpdate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID - } - return mu.RemoveDynamicExecutionMetricIDs(ids...) -} - -// ClearBuildGraphMetrics clears all "build_graph_metrics" edges to the BuildGraphMetrics entity. +// ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. func (mu *MetricsUpdate) ClearBuildGraphMetrics() *MetricsUpdate { mu.mutation.ClearBuildGraphMetrics() return mu } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to BuildGraphMetrics entities by IDs. -func (mu *MetricsUpdate) RemoveBuildGraphMetricIDs(ids ...int) *MetricsUpdate { - mu.mutation.RemoveBuildGraphMetricIDs(ids...) - return mu -} - -// RemoveBuildGraphMetrics removes "build_graph_metrics" edges to BuildGraphMetrics entities. -func (mu *MetricsUpdate) RemoveBuildGraphMetrics(b ...*BuildGraphMetrics) *MetricsUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return mu.RemoveBuildGraphMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (mu *MetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, mu.sqlSave, mu.mutation, mu.hooks) @@ -495,20 +385,7 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: metrics.ActionSummaryTable, - Columns: []string{metrics.ActionSummaryColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !mu.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ActionSummaryTable, Columns: []string{metrics.ActionSummaryColumn}, @@ -517,14 +394,11 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ActionSummaryTable, Columns: []string{metrics.ActionSummaryColumn}, @@ -540,39 +414,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.MemoryMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedMemoryMetricsIDs(); len(nodes) > 0 && !mu.mutation.MemoryMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, + Columns: []string{metrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, + Columns: []string{metrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), @@ -585,39 +443,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.TargetMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedTargetMetricsIDs(); len(nodes) > 0 && !mu.mutation.TargetMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, + Columns: []string{metrics.TargetMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.TargetMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, + Columns: []string{metrics.TargetMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), @@ -630,10 +472,10 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.PackageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, + Columns: []string{metrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -641,28 +483,12 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := mu.mutation.RemovedPackageMetricsIDs(); len(nodes) > 0 && !mu.mutation.PackageMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := mu.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, + Columns: []string{metrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -675,10 +501,10 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.TimingMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, + Columns: []string{metrics.TimingMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), @@ -686,28 +512,12 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := mu.mutation.RemovedTimingMetricsIDs(); len(nodes) > 0 && !mu.mutation.TimingMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := mu.mutation.TimingMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, + Columns: []string{metrics.TimingMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), @@ -720,10 +530,10 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.CumulativeMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, + Columns: []string{metrics.CumulativeMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), @@ -731,28 +541,12 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := mu.mutation.RemovedCumulativeMetricsIDs(); len(nodes) > 0 && !mu.mutation.CumulativeMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := mu.mutation.CumulativeMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, + Columns: []string{metrics.CumulativeMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), @@ -765,39 +559,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.ArtifactMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedArtifactMetricsIDs(); len(nodes) > 0 && !mu.mutation.ArtifactMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, + Columns: []string{metrics.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, + Columns: []string{metrics.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), @@ -810,39 +588,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.NetworkMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedNetworkMetricsIDs(); len(nodes) > 0 && !mu.mutation.NetworkMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, + Columns: []string{metrics.NetworkMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, + Columns: []string{metrics.NetworkMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), @@ -855,39 +617,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.DynamicExecutionMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedDynamicExecutionMetricsIDs(); len(nodes) > 0 && !mu.mutation.DynamicExecutionMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{metrics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{metrics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), @@ -900,39 +646,23 @@ func (mu *MetricsUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mu.mutation.BuildGraphMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mu.mutation.RemovedBuildGraphMetricsIDs(); len(nodes) > 0 && !mu.mutation.BuildGraphMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, + Columns: []string{metrics.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mu.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, + Columns: []string{metrics.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), @@ -982,377 +712,267 @@ func (muo *MetricsUpdateOne) SetBazelInvocation(b *BazelInvocation) *MetricsUpda return muo.SetBazelInvocationID(b.ID) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (muo *MetricsUpdateOne) AddActionSummaryIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (muo *MetricsUpdateOne) SetActionSummaryID(id int) *MetricsUpdateOne { + muo.mutation.SetActionSummaryID(id) return muo } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (muo *MetricsUpdateOne) AddActionSummary(a ...*ActionSummary) *MetricsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableActionSummaryID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetActionSummaryID(*id) } - return muo.AddActionSummaryIDs(ids...) -} - -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddMemoryMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddMemoryMetricIDs(ids...) return muo } -// AddMemoryMetrics adds the "memory_metrics" edges to the MemoryMetrics entity. -func (muo *MetricsUpdateOne) AddMemoryMetrics(m ...*MemoryMetrics) *MetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return muo.AddMemoryMetricIDs(ids...) +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (muo *MetricsUpdateOne) SetActionSummary(a *ActionSummary) *MetricsUpdateOne { + return muo.SetActionSummaryID(a.ID) } -// AddTargetMetricIDs adds the "target_metrics" edge to the TargetMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddTargetMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddTargetMetricIDs(ids...) +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID. +func (muo *MetricsUpdateOne) SetMemoryMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetMemoryMetricsID(id) return muo } -// AddTargetMetrics adds the "target_metrics" edges to the TargetMetrics entity. -func (muo *MetricsUpdateOne) AddTargetMetrics(t ...*TargetMetrics) *MetricsUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableMemoryMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetMemoryMetricsID(*id) } - return muo.AddTargetMetricIDs(ids...) -} - -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddPackageMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddPackageMetricIDs(ids...) return muo } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (muo *MetricsUpdateOne) AddPackageMetrics(p ...*PackageMetrics) *MetricsUpdateOne { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID - } - return muo.AddPackageMetricIDs(ids...) +// SetMemoryMetrics sets the "memory_metrics" edge to the MemoryMetrics entity. +func (muo *MetricsUpdateOne) SetMemoryMetrics(m *MemoryMetrics) *MetricsUpdateOne { + return muo.SetMemoryMetricsID(m.ID) } -// AddTimingMetricIDs adds the "timing_metrics" edge to the TimingMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddTimingMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddTimingMetricIDs(ids...) +// SetTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID. +func (muo *MetricsUpdateOne) SetTargetMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetTargetMetricsID(id) return muo } -// AddTimingMetrics adds the "timing_metrics" edges to the TimingMetrics entity. -func (muo *MetricsUpdateOne) AddTimingMetrics(t ...*TimingMetrics) *MetricsUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableTargetMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetTargetMetricsID(*id) } - return muo.AddTimingMetricIDs(ids...) -} - -// AddCumulativeMetricIDs adds the "cumulative_metrics" edge to the CumulativeMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddCumulativeMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddCumulativeMetricIDs(ids...) return muo } -// AddCumulativeMetrics adds the "cumulative_metrics" edges to the CumulativeMetrics entity. -func (muo *MetricsUpdateOne) AddCumulativeMetrics(c ...*CumulativeMetrics) *MetricsUpdateOne { - ids := make([]int, len(c)) - for i := range c { - ids[i] = c[i].ID - } - return muo.AddCumulativeMetricIDs(ids...) +// SetTargetMetrics sets the "target_metrics" edge to the TargetMetrics entity. +func (muo *MetricsUpdateOne) SetTargetMetrics(t *TargetMetrics) *MetricsUpdateOne { + return muo.SetTargetMetricsID(t.ID) } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddArtifactMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddArtifactMetricIDs(ids...) +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (muo *MetricsUpdateOne) SetPackageMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetPackageMetricsID(id) return muo } -// AddArtifactMetrics adds the "artifact_metrics" edges to the ArtifactMetrics entity. -func (muo *MetricsUpdateOne) AddArtifactMetrics(a ...*ArtifactMetrics) *MetricsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillablePackageMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetPackageMetricsID(*id) } - return muo.AddArtifactMetricIDs(ids...) -} - -// AddNetworkMetricIDs adds the "network_metrics" edge to the NetworkMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddNetworkMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddNetworkMetricIDs(ids...) return muo } -// AddNetworkMetrics adds the "network_metrics" edges to the NetworkMetrics entity. -func (muo *MetricsUpdateOne) AddNetworkMetrics(n ...*NetworkMetrics) *MetricsUpdateOne { - ids := make([]int, len(n)) - for i := range n { - ids[i] = n[i].ID - } - return muo.AddNetworkMetricIDs(ids...) +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (muo *MetricsUpdateOne) SetPackageMetrics(p *PackageMetrics) *MetricsUpdateOne { + return muo.SetPackageMetricsID(p.ID) } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddDynamicExecutionMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID. +func (muo *MetricsUpdateOne) SetTimingMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetTimingMetricsID(id) return muo } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (muo *MetricsUpdateOne) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *MetricsUpdateOne { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableTimingMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetTimingMetricsID(*id) } - return muo.AddDynamicExecutionMetricIDs(ids...) -} - -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (muo *MetricsUpdateOne) AddBuildGraphMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.AddBuildGraphMetricIDs(ids...) return muo } -// AddBuildGraphMetrics adds the "build_graph_metrics" edges to the BuildGraphMetrics entity. -func (muo *MetricsUpdateOne) AddBuildGraphMetrics(b ...*BuildGraphMetrics) *MetricsUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return muo.AddBuildGraphMetricIDs(ids...) +// SetTimingMetrics sets the "timing_metrics" edge to the TimingMetrics entity. +func (muo *MetricsUpdateOne) SetTimingMetrics(t *TimingMetrics) *MetricsUpdateOne { + return muo.SetTimingMetricsID(t.ID) } -// Mutation returns the MetricsMutation object of the builder. -func (muo *MetricsUpdateOne) Mutation() *MetricsMutation { - return muo.mutation +// SetCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID. +func (muo *MetricsUpdateOne) SetCumulativeMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetCumulativeMetricsID(id) + return muo } -// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. -func (muo *MetricsUpdateOne) ClearBazelInvocation() *MetricsUpdateOne { - muo.mutation.ClearBazelInvocation() +// SetNillableCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableCumulativeMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetCumulativeMetricsID(*id) + } return muo } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. -func (muo *MetricsUpdateOne) ClearActionSummary() *MetricsUpdateOne { - muo.mutation.ClearActionSummary() - return muo +// SetCumulativeMetrics sets the "cumulative_metrics" edge to the CumulativeMetrics entity. +func (muo *MetricsUpdateOne) SetCumulativeMetrics(c *CumulativeMetrics) *MetricsUpdateOne { + return muo.SetCumulativeMetricsID(c.ID) } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (muo *MetricsUpdateOne) RemoveActionSummaryIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveActionSummaryIDs(ids...) +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID. +func (muo *MetricsUpdateOne) SetArtifactMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetArtifactMetricsID(id) return muo } -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (muo *MetricsUpdateOne) RemoveActionSummary(a ...*ActionSummary) *MetricsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableArtifactMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetArtifactMetricsID(*id) } - return muo.RemoveActionSummaryIDs(ids...) + return muo } -// ClearMemoryMetrics clears all "memory_metrics" edges to the MemoryMetrics entity. -func (muo *MetricsUpdateOne) ClearMemoryMetrics() *MetricsUpdateOne { - muo.mutation.ClearMemoryMetrics() - return muo +// SetArtifactMetrics sets the "artifact_metrics" edge to the ArtifactMetrics entity. +func (muo *MetricsUpdateOne) SetArtifactMetrics(a *ArtifactMetrics) *MetricsUpdateOne { + return muo.SetArtifactMetricsID(a.ID) } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to MemoryMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveMemoryMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveMemoryMetricIDs(ids...) +// SetNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID. +func (muo *MetricsUpdateOne) SetNetworkMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetNetworkMetricsID(id) return muo } -// RemoveMemoryMetrics removes "memory_metrics" edges to MemoryMetrics entities. -func (muo *MetricsUpdateOne) RemoveMemoryMetrics(m ...*MemoryMetrics) *MetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableNetworkMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetNetworkMetricsID(*id) } - return muo.RemoveMemoryMetricIDs(ids...) + return muo } -// ClearTargetMetrics clears all "target_metrics" edges to the TargetMetrics entity. -func (muo *MetricsUpdateOne) ClearTargetMetrics() *MetricsUpdateOne { - muo.mutation.ClearTargetMetrics() - return muo +// SetNetworkMetrics sets the "network_metrics" edge to the NetworkMetrics entity. +func (muo *MetricsUpdateOne) SetNetworkMetrics(n *NetworkMetrics) *MetricsUpdateOne { + return muo.SetNetworkMetricsID(n.ID) } -// RemoveTargetMetricIDs removes the "target_metrics" edge to TargetMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveTargetMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveTargetMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (muo *MetricsUpdateOne) SetDynamicExecutionMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetDynamicExecutionMetricsID(id) return muo } -// RemoveTargetMetrics removes "target_metrics" edges to TargetMetrics entities. -func (muo *MetricsUpdateOne) RemoveTargetMetrics(t ...*TargetMetrics) *MetricsUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableDynamicExecutionMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetDynamicExecutionMetricsID(*id) } - return muo.RemoveTargetMetricIDs(ids...) + return muo } -// ClearPackageMetrics clears all "package_metrics" edges to the PackageMetrics entity. -func (muo *MetricsUpdateOne) ClearPackageMetrics() *MetricsUpdateOne { - muo.mutation.ClearPackageMetrics() - return muo +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (muo *MetricsUpdateOne) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *MetricsUpdateOne { + return muo.SetDynamicExecutionMetricsID(d.ID) } -// RemovePackageMetricIDs removes the "package_metrics" edge to PackageMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemovePackageMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemovePackageMetricIDs(ids...) +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID. +func (muo *MetricsUpdateOne) SetBuildGraphMetricsID(id int) *MetricsUpdateOne { + muo.mutation.SetBuildGraphMetricsID(id) return muo } -// RemovePackageMetrics removes "package_metrics" edges to PackageMetrics entities. -func (muo *MetricsUpdateOne) RemovePackageMetrics(p ...*PackageMetrics) *MetricsUpdateOne { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillableBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by ID if the given value is not nil. +func (muo *MetricsUpdateOne) SetNillableBuildGraphMetricsID(id *int) *MetricsUpdateOne { + if id != nil { + muo = muo.SetBuildGraphMetricsID(*id) } - return muo.RemovePackageMetricIDs(ids...) -} - -// ClearTimingMetrics clears all "timing_metrics" edges to the TimingMetrics entity. -func (muo *MetricsUpdateOne) ClearTimingMetrics() *MetricsUpdateOne { - muo.mutation.ClearTimingMetrics() return muo } -// RemoveTimingMetricIDs removes the "timing_metrics" edge to TimingMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveTimingMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveTimingMetricIDs(ids...) - return muo +// SetBuildGraphMetrics sets the "build_graph_metrics" edge to the BuildGraphMetrics entity. +func (muo *MetricsUpdateOne) SetBuildGraphMetrics(b *BuildGraphMetrics) *MetricsUpdateOne { + return muo.SetBuildGraphMetricsID(b.ID) } -// RemoveTimingMetrics removes "timing_metrics" edges to TimingMetrics entities. -func (muo *MetricsUpdateOne) RemoveTimingMetrics(t ...*TimingMetrics) *MetricsUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return muo.RemoveTimingMetricIDs(ids...) +// Mutation returns the MetricsMutation object of the builder. +func (muo *MetricsUpdateOne) Mutation() *MetricsMutation { + return muo.mutation } -// ClearCumulativeMetrics clears all "cumulative_metrics" edges to the CumulativeMetrics entity. -func (muo *MetricsUpdateOne) ClearCumulativeMetrics() *MetricsUpdateOne { - muo.mutation.ClearCumulativeMetrics() +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. +func (muo *MetricsUpdateOne) ClearBazelInvocation() *MetricsUpdateOne { + muo.mutation.ClearBazelInvocation() return muo } -// RemoveCumulativeMetricIDs removes the "cumulative_metrics" edge to CumulativeMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveCumulativeMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveCumulativeMetricIDs(ids...) +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. +func (muo *MetricsUpdateOne) ClearActionSummary() *MetricsUpdateOne { + muo.mutation.ClearActionSummary() return muo } -// RemoveCumulativeMetrics removes "cumulative_metrics" edges to CumulativeMetrics entities. -func (muo *MetricsUpdateOne) RemoveCumulativeMetrics(c ...*CumulativeMetrics) *MetricsUpdateOne { - ids := make([]int, len(c)) - for i := range c { - ids[i] = c[i].ID - } - return muo.RemoveCumulativeMetricIDs(ids...) +// ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. +func (muo *MetricsUpdateOne) ClearMemoryMetrics() *MetricsUpdateOne { + muo.mutation.ClearMemoryMetrics() + return muo } -// ClearArtifactMetrics clears all "artifact_metrics" edges to the ArtifactMetrics entity. -func (muo *MetricsUpdateOne) ClearArtifactMetrics() *MetricsUpdateOne { - muo.mutation.ClearArtifactMetrics() +// ClearTargetMetrics clears the "target_metrics" edge to the TargetMetrics entity. +func (muo *MetricsUpdateOne) ClearTargetMetrics() *MetricsUpdateOne { + muo.mutation.ClearTargetMetrics() return muo } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to ArtifactMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveArtifactMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveArtifactMetricIDs(ids...) +// ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. +func (muo *MetricsUpdateOne) ClearPackageMetrics() *MetricsUpdateOne { + muo.mutation.ClearPackageMetrics() return muo } -// RemoveArtifactMetrics removes "artifact_metrics" edges to ArtifactMetrics entities. -func (muo *MetricsUpdateOne) RemoveArtifactMetrics(a ...*ArtifactMetrics) *MetricsUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return muo.RemoveArtifactMetricIDs(ids...) +// ClearTimingMetrics clears the "timing_metrics" edge to the TimingMetrics entity. +func (muo *MetricsUpdateOne) ClearTimingMetrics() *MetricsUpdateOne { + muo.mutation.ClearTimingMetrics() + return muo } -// ClearNetworkMetrics clears all "network_metrics" edges to the NetworkMetrics entity. -func (muo *MetricsUpdateOne) ClearNetworkMetrics() *MetricsUpdateOne { - muo.mutation.ClearNetworkMetrics() +// ClearCumulativeMetrics clears the "cumulative_metrics" edge to the CumulativeMetrics entity. +func (muo *MetricsUpdateOne) ClearCumulativeMetrics() *MetricsUpdateOne { + muo.mutation.ClearCumulativeMetrics() return muo } -// RemoveNetworkMetricIDs removes the "network_metrics" edge to NetworkMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveNetworkMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveNetworkMetricIDs(ids...) +// ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. +func (muo *MetricsUpdateOne) ClearArtifactMetrics() *MetricsUpdateOne { + muo.mutation.ClearArtifactMetrics() return muo } -// RemoveNetworkMetrics removes "network_metrics" edges to NetworkMetrics entities. -func (muo *MetricsUpdateOne) RemoveNetworkMetrics(n ...*NetworkMetrics) *MetricsUpdateOne { - ids := make([]int, len(n)) - for i := range n { - ids[i] = n[i].ID - } - return muo.RemoveNetworkMetricIDs(ids...) +// ClearNetworkMetrics clears the "network_metrics" edge to the NetworkMetrics entity. +func (muo *MetricsUpdateOne) ClearNetworkMetrics() *MetricsUpdateOne { + muo.mutation.ClearNetworkMetrics() + return muo } -// ClearDynamicExecutionMetrics clears all "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. +// ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. func (muo *MetricsUpdateOne) ClearDynamicExecutionMetrics() *MetricsUpdateOne { muo.mutation.ClearDynamicExecutionMetrics() return muo } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to DynamicExecutionMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveDynamicExecutionMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveDynamicExecutionMetricIDs(ids...) - return muo -} - -// RemoveDynamicExecutionMetrics removes "dynamic_execution_metrics" edges to DynamicExecutionMetrics entities. -func (muo *MetricsUpdateOne) RemoveDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *MetricsUpdateOne { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID - } - return muo.RemoveDynamicExecutionMetricIDs(ids...) -} - -// ClearBuildGraphMetrics clears all "build_graph_metrics" edges to the BuildGraphMetrics entity. +// ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. func (muo *MetricsUpdateOne) ClearBuildGraphMetrics() *MetricsUpdateOne { muo.mutation.ClearBuildGraphMetrics() return muo } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to BuildGraphMetrics entities by IDs. -func (muo *MetricsUpdateOne) RemoveBuildGraphMetricIDs(ids ...int) *MetricsUpdateOne { - muo.mutation.RemoveBuildGraphMetricIDs(ids...) - return muo -} - -// RemoveBuildGraphMetrics removes "build_graph_metrics" edges to BuildGraphMetrics entities. -func (muo *MetricsUpdateOne) RemoveBuildGraphMetrics(b ...*BuildGraphMetrics) *MetricsUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return muo.RemoveBuildGraphMetricIDs(ids...) -} - // Where appends a list predicates to the MetricsUpdate builder. func (muo *MetricsUpdateOne) Where(ps ...predicate.Metrics) *MetricsUpdateOne { muo.mutation.Where(ps...) @@ -1450,20 +1070,7 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: metrics.ActionSummaryTable, - Columns: []string{metrics.ActionSummaryColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !muo.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ActionSummaryTable, Columns: []string{metrics.ActionSummaryColumn}, @@ -1472,14 +1079,11 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ActionSummaryTable, Columns: []string{metrics.ActionSummaryColumn}, @@ -1495,39 +1099,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.MemoryMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedMemoryMetricsIDs(); len(nodes) > 0 && !muo.mutation.MemoryMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, + Columns: []string{metrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.MemoryMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.MemoryMetricsTable, - Columns: metrics.MemoryMetricsPrimaryKey, + Columns: []string{metrics.MemoryMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(memorymetrics.FieldID, field.TypeInt), @@ -1540,39 +1128,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.TargetMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedTargetMetricsIDs(); len(nodes) > 0 && !muo.mutation.TargetMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, + Columns: []string{metrics.TargetMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.TargetMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TargetMetricsTable, - Columns: metrics.TargetMetricsPrimaryKey, + Columns: []string{metrics.TargetMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(targetmetrics.FieldID, field.TypeInt), @@ -1585,10 +1157,10 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.PackageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, + Columns: []string{metrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -1596,28 +1168,12 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := muo.mutation.RemovedPackageMetricsIDs(); len(nodes) > 0 && !muo.mutation.PackageMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := muo.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.PackageMetricsTable, - Columns: metrics.PackageMetricsPrimaryKey, + Columns: []string{metrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -1630,10 +1186,10 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.TimingMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, + Columns: []string{metrics.TimingMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), @@ -1641,28 +1197,12 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := muo.mutation.RemovedTimingMetricsIDs(); len(nodes) > 0 && !muo.mutation.TimingMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := muo.mutation.TimingMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.TimingMetricsTable, - Columns: metrics.TimingMetricsPrimaryKey, + Columns: []string{metrics.TimingMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingmetrics.FieldID, field.TypeInt), @@ -1675,10 +1215,10 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.CumulativeMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, + Columns: []string{metrics.CumulativeMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), @@ -1686,28 +1226,12 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := muo.mutation.RemovedCumulativeMetricsIDs(); len(nodes) > 0 && !muo.mutation.CumulativeMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := muo.mutation.CumulativeMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.CumulativeMetricsTable, - Columns: metrics.CumulativeMetricsPrimaryKey, + Columns: []string{metrics.CumulativeMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(cumulativemetrics.FieldID, field.TypeInt), @@ -1720,39 +1244,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.ArtifactMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedArtifactMetricsIDs(); len(nodes) > 0 && !muo.mutation.ArtifactMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, + Columns: []string{metrics.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.ArtifactMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.ArtifactMetricsTable, - Columns: metrics.ArtifactMetricsPrimaryKey, + Columns: []string{metrics.ArtifactMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(artifactmetrics.FieldID, field.TypeInt), @@ -1765,39 +1273,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.NetworkMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedNetworkMetricsIDs(); len(nodes) > 0 && !muo.mutation.NetworkMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, + Columns: []string{metrics.NetworkMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.NetworkMetricsTable, - Columns: metrics.NetworkMetricsPrimaryKey, + Columns: []string{metrics.NetworkMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(networkmetrics.FieldID, field.TypeInt), @@ -1810,39 +1302,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.DynamicExecutionMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedDynamicExecutionMetricsIDs(); len(nodes) > 0 && !muo.mutation.DynamicExecutionMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{metrics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.DynamicExecutionMetricsTable, - Columns: metrics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{metrics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), @@ -1855,39 +1331,23 @@ func (muo *MetricsUpdateOne) sqlSave(ctx context.Context) (_node *Metrics, err e } if muo.mutation.BuildGraphMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: false, - Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := muo.mutation.RemovedBuildGraphMetricsIDs(); len(nodes) > 0 && !muo.mutation.BuildGraphMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, + Columns: []string{metrics.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := muo.mutation.BuildGraphMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: false, Table: metrics.BuildGraphMetricsTable, - Columns: metrics.BuildGraphMetricsPrimaryKey, + Columns: []string{metrics.BuildGraphMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(buildgraphmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/migrate/schema.go b/ent/gen/ent/migrate/schema.go index c052075..40d0fd4 100644 --- a/ent/gen/ent/migrate/schema.go +++ b/ent/gen/ent/migrate/schema.go @@ -16,12 +16,21 @@ var ( {Name: "load_time_in_ms", Type: field.TypeInt64, Nullable: true}, {Name: "hits", Type: field.TypeInt32, Nullable: true}, {Name: "misses", Type: field.TypeInt32, Nullable: true}, + {Name: "action_summary_action_cache_statistics", Type: field.TypeInt, Unique: true, Nullable: true}, } // ActionCacheStatisticsTable holds the schema information for the "action_cache_statistics" table. ActionCacheStatisticsTable = &schema.Table{ Name: "action_cache_statistics", Columns: ActionCacheStatisticsColumns, PrimaryKey: []*schema.Column{ActionCacheStatisticsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "action_cache_statistics_action_summaries_action_cache_statistics", + Columns: []*schema.Column{ActionCacheStatisticsColumns[6]}, + RefColumns: []*schema.Column{ActionSummariesColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // ActionDataColumns holds the columns for the "action_data" table. ActionDataColumns = []*schema.Column{ @@ -33,12 +42,21 @@ var ( {Name: "last_ended_ms", Type: field.TypeInt64, Nullable: true}, {Name: "system_time", Type: field.TypeInt64, Nullable: true}, {Name: "user_time", Type: field.TypeInt64, Nullable: true}, + {Name: "action_summary_action_data", Type: field.TypeInt, Nullable: true}, } // ActionDataTable holds the schema information for the "action_data" table. ActionDataTable = &schema.Table{ Name: "action_data", Columns: ActionDataColumns, PrimaryKey: []*schema.Column{ActionDataColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "action_data_action_summaries_action_data", + Columns: []*schema.Column{ActionDataColumns[8]}, + RefColumns: []*schema.Column{ActionSummariesColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // ActionSummariesColumns holds the columns for the "action_summaries" table. ActionSummariesColumns = []*schema.Column{ @@ -47,7 +65,7 @@ var ( {Name: "actions_created_not_including_aspects", Type: field.TypeInt64, Nullable: true}, {Name: "actions_executed", Type: field.TypeInt64, Nullable: true}, {Name: "remote_cache_hits", Type: field.TypeInt64, Nullable: true}, - {Name: "metrics_action_summary", Type: field.TypeInt, Nullable: true}, + {Name: "metrics_action_summary", Type: field.TypeInt, Unique: true, Nullable: true}, } // ActionSummariesTable holds the schema information for the "action_summaries" table. ActionSummariesTable = &schema.Table{ @@ -66,12 +84,42 @@ var ( // ArtifactMetricsColumns holds the columns for the "artifact_metrics" table. ArtifactMetricsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "artifact_metrics_source_artifacts_read", Type: field.TypeInt, Nullable: true}, + {Name: "artifact_metrics_output_artifacts_seen", Type: field.TypeInt, Nullable: true}, + {Name: "artifact_metrics_output_artifacts_from_action_cache", Type: field.TypeInt, Nullable: true}, + {Name: "metrics_artifact_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // ArtifactMetricsTable holds the schema information for the "artifact_metrics" table. ArtifactMetricsTable = &schema.Table{ Name: "artifact_metrics", Columns: ArtifactMetricsColumns, PrimaryKey: []*schema.Column{ArtifactMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "artifact_metrics_files_metrics_source_artifacts_read", + Columns: []*schema.Column{ArtifactMetricsColumns[1]}, + RefColumns: []*schema.Column{FilesMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "artifact_metrics_files_metrics_output_artifacts_seen", + Columns: []*schema.Column{ArtifactMetricsColumns[2]}, + RefColumns: []*schema.Column{FilesMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "artifact_metrics_files_metrics_output_artifacts_from_action_cache", + Columns: []*schema.Column{ArtifactMetricsColumns[3]}, + RefColumns: []*schema.Column{FilesMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "artifact_metrics_metrics_artifact_metrics", + Columns: []*schema.Column{ArtifactMetricsColumns[4]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // BazelInvocationsColumns holds the columns for the "bazel_invocations" table. BazelInvocationsColumns = []*schema.Column{ @@ -192,44 +240,96 @@ var ( {Name: "other_configured_target_count", Type: field.TypeInt32, Nullable: true}, {Name: "output_artifact_count", Type: field.TypeInt32, Nullable: true}, {Name: "post_invocation_skyframe_node_count", Type: field.TypeInt32, Nullable: true}, + {Name: "build_graph_metrics_dirtied_values", Type: field.TypeInt, Nullable: true}, + {Name: "build_graph_metrics_changed_values", Type: field.TypeInt, Nullable: true}, + {Name: "build_graph_metrics_built_values", Type: field.TypeInt, Nullable: true}, + {Name: "build_graph_metrics_cleaned_values", Type: field.TypeInt, Nullable: true}, + {Name: "metrics_build_graph_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // BuildGraphMetricsTable holds the schema information for the "build_graph_metrics" table. BuildGraphMetricsTable = &schema.Table{ Name: "build_graph_metrics", Columns: BuildGraphMetricsColumns, PrimaryKey: []*schema.Column{BuildGraphMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "build_graph_metrics_evaluation_stats_dirtied_values", + Columns: []*schema.Column{BuildGraphMetricsColumns[10]}, + RefColumns: []*schema.Column{EvaluationStatsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "build_graph_metrics_evaluation_stats_changed_values", + Columns: []*schema.Column{BuildGraphMetricsColumns[11]}, + RefColumns: []*schema.Column{EvaluationStatsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "build_graph_metrics_evaluation_stats_built_values", + Columns: []*schema.Column{BuildGraphMetricsColumns[12]}, + RefColumns: []*schema.Column{EvaluationStatsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "build_graph_metrics_evaluation_stats_cleaned_values", + Columns: []*schema.Column{BuildGraphMetricsColumns[13]}, + RefColumns: []*schema.Column{EvaluationStatsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "build_graph_metrics_metrics_build_graph_metrics", + Columns: []*schema.Column{BuildGraphMetricsColumns[14]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // CumulativeMetricsColumns holds the columns for the "cumulative_metrics" table. CumulativeMetricsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "num_analyses", Type: field.TypeInt32, Nullable: true}, {Name: "num_builds", Type: field.TypeInt32, Nullable: true}, + {Name: "metrics_cumulative_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // CumulativeMetricsTable holds the schema information for the "cumulative_metrics" table. CumulativeMetricsTable = &schema.Table{ Name: "cumulative_metrics", Columns: CumulativeMetricsColumns, PrimaryKey: []*schema.Column{CumulativeMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "cumulative_metrics_metrics_cumulative_metrics", + Columns: []*schema.Column{CumulativeMetricsColumns[3]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // DynamicExecutionMetricsColumns holds the columns for the "dynamic_execution_metrics" table. DynamicExecutionMetricsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "metrics_dynamic_execution_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // DynamicExecutionMetricsTable holds the schema information for the "dynamic_execution_metrics" table. DynamicExecutionMetricsTable = &schema.Table{ Name: "dynamic_execution_metrics", Columns: DynamicExecutionMetricsColumns, PrimaryKey: []*schema.Column{DynamicExecutionMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "dynamic_execution_metrics_metrics_dynamic_execution_metrics", + Columns: []*schema.Column{DynamicExecutionMetricsColumns[1]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // EvaluationStatsColumns holds the columns for the "evaluation_stats" table. EvaluationStatsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "skyfunction_name", Type: field.TypeString, Nullable: true}, {Name: "count", Type: field.TypeInt64, Nullable: true}, - {Name: "build_graph_metrics_dirtied_values", Type: field.TypeInt, Nullable: true}, - {Name: "build_graph_metrics_changed_values", Type: field.TypeInt, Nullable: true}, - {Name: "build_graph_metrics_built_values", Type: field.TypeInt, Nullable: true}, - {Name: "build_graph_metrics_cleaned_values", Type: field.TypeInt, Nullable: true}, + {Name: "build_graph_metrics_evaluated_values", Type: field.TypeInt, Unique: true, Nullable: true}, } // EvaluationStatsTable holds the schema information for the "evaluation_stats" table. EvaluationStatsTable = &schema.Table{ @@ -238,29 +338,11 @@ var ( PrimaryKey: []*schema.Column{EvaluationStatsColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "evaluation_stats_build_graph_metrics_dirtied_values", + Symbol: "evaluation_stats_build_graph_metrics_evaluated_values", Columns: []*schema.Column{EvaluationStatsColumns[3]}, RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, OnDelete: schema.SetNull, }, - { - Symbol: "evaluation_stats_build_graph_metrics_changed_values", - Columns: []*schema.Column{EvaluationStatsColumns[4]}, - RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, - OnDelete: schema.SetNull, - }, - { - Symbol: "evaluation_stats_build_graph_metrics_built_values", - Columns: []*schema.Column{EvaluationStatsColumns[5]}, - RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, - OnDelete: schema.SetNull, - }, - { - Symbol: "evaluation_stats_build_graph_metrics_cleaned_values", - Columns: []*schema.Column{EvaluationStatsColumns[6]}, - RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, - OnDelete: schema.SetNull, - }, }, } // EventFilesColumns holds the columns for the "event_files" table. @@ -294,7 +376,7 @@ var ( {Name: "cached_remotely", Type: field.TypeBool, Nullable: true}, {Name: "exit_code", Type: field.TypeInt32, Nullable: true}, {Name: "hostname", Type: field.TypeString, Nullable: true}, - {Name: "exection_info_timing_breakdown", Type: field.TypeInt, Nullable: true}, + {Name: "test_result_bes_execution_info", Type: field.TypeInt, Unique: true, Nullable: true}, } // ExectionInfosTable holds the schema information for the "exection_infos" table. ExectionInfosTable = &schema.Table{ @@ -303,9 +385,9 @@ var ( PrimaryKey: []*schema.Column{ExectionInfosColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "exection_infos_timing_breakdowns_timing_breakdown", + Symbol: "exection_infos_test_result_be_ss_execution_info", Columns: []*schema.Column{ExectionInfosColumns[6]}, - RefColumns: []*schema.Column{TimingBreakdownsColumns[0]}, + RefColumns: []*schema.Column{TestResultBeSsColumns[0]}, OnDelete: schema.SetNull, }, }, @@ -315,9 +397,7 @@ var ( {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "size_in_bytes", Type: field.TypeInt64, Nullable: true}, {Name: "count", Type: field.TypeInt32, Nullable: true}, - {Name: "artifact_metrics_source_artifacts_read", Type: field.TypeInt, Nullable: true}, - {Name: "artifact_metrics_output_artifacts_seen", Type: field.TypeInt, Nullable: true}, - {Name: "artifact_metrics_output_artifacts_from_action_cache", Type: field.TypeInt, Nullable: true}, + {Name: "artifact_metrics_top_level_artifacts", Type: field.TypeInt, Unique: true, Nullable: true}, } // FilesMetricsTable holds the schema information for the "files_metrics" table. FilesMetricsTable = &schema.Table{ @@ -326,23 +406,11 @@ var ( PrimaryKey: []*schema.Column{FilesMetricsColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "files_metrics_artifact_metrics_source_artifacts_read", + Symbol: "files_metrics_artifact_metrics_top_level_artifacts", Columns: []*schema.Column{FilesMetricsColumns[3]}, RefColumns: []*schema.Column{ArtifactMetricsColumns[0]}, OnDelete: schema.SetNull, }, - { - Symbol: "files_metrics_artifact_metrics_output_artifacts_seen", - Columns: []*schema.Column{FilesMetricsColumns[4]}, - RefColumns: []*schema.Column{ArtifactMetricsColumns[0]}, - OnDelete: schema.SetNull, - }, - { - Symbol: "files_metrics_artifact_metrics_output_artifacts_from_action_cache", - Columns: []*schema.Column{FilesMetricsColumns[5]}, - RefColumns: []*schema.Column{ArtifactMetricsColumns[0]}, - OnDelete: schema.SetNull, - }, }, } // GarbageMetricsColumns holds the columns for the "garbage_metrics" table. @@ -350,12 +418,21 @@ var ( {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "type", Type: field.TypeString, Nullable: true}, {Name: "garbage_collected", Type: field.TypeInt64, Nullable: true}, + {Name: "memory_metrics_garbage_metrics", Type: field.TypeInt, Nullable: true}, } // GarbageMetricsTable holds the schema information for the "garbage_metrics" table. GarbageMetricsTable = &schema.Table{ Name: "garbage_metrics", Columns: GarbageMetricsColumns, PrimaryKey: []*schema.Column{GarbageMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "garbage_metrics_memory_metrics_garbage_metrics", + Columns: []*schema.Column{GarbageMetricsColumns[3]}, + RefColumns: []*schema.Column{MemoryMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // MemoryMetricsColumns holds the columns for the "memory_metrics" table. MemoryMetricsColumns = []*schema.Column{ @@ -363,12 +440,21 @@ var ( {Name: "peak_post_gc_heap_size", Type: field.TypeInt64, Nullable: true}, {Name: "used_heap_size_post_build", Type: field.TypeInt64, Nullable: true}, {Name: "peak_post_gc_tenured_space_heap_size", Type: field.TypeInt64, Nullable: true}, + {Name: "metrics_memory_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // MemoryMetricsTable holds the schema information for the "memory_metrics" table. MemoryMetricsTable = &schema.Table{ Name: "memory_metrics", Columns: MemoryMetricsColumns, PrimaryKey: []*schema.Column{MemoryMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "memory_metrics_metrics_memory_metrics", + Columns: []*schema.Column{MemoryMetricsColumns[4]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // MetricsColumns holds the columns for the "metrics" table. MetricsColumns = []*schema.Column{ @@ -394,17 +480,27 @@ var ( {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "reason", Type: field.TypeEnum, Nullable: true, Enums: []string{"DIFFERENT_ACTION_KEY", "DIFFERENT_DEPS", "DIFFERENT_ENVIRONMENT", "DIFFERENT_FILES", "CORRUPTED_CACHE_ENTRY", "NOT_CACHED", "UNCONDITIONAL_EXECUTION", "UNKNOWN"}, Default: "UNKNOWN"}, {Name: "count", Type: field.TypeInt32, Nullable: true}, + {Name: "action_cache_statistics_miss_details", Type: field.TypeInt, Nullable: true}, } // MissDetailsTable holds the schema information for the "miss_details" table. MissDetailsTable = &schema.Table{ Name: "miss_details", Columns: MissDetailsColumns, PrimaryKey: []*schema.Column{MissDetailsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "miss_details_action_cache_statistics_miss_details", + Columns: []*schema.Column{MissDetailsColumns[3]}, + RefColumns: []*schema.Column{ActionCacheStatisticsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // NamedSetOfFilesColumns holds the columns for the "named_set_of_files" table. NamedSetOfFilesColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "named_set_of_files_file_sets", Type: field.TypeInt, Unique: true, Nullable: true}, + {Name: "output_group_file_sets", Type: field.TypeInt, Unique: true, Nullable: true}, } // NamedSetOfFilesTable holds the schema information for the "named_set_of_files" table. NamedSetOfFilesTable = &schema.Table{ @@ -418,24 +514,39 @@ var ( RefColumns: []*schema.Column{NamedSetOfFilesColumns[0]}, OnDelete: schema.SetNull, }, + { + Symbol: "named_set_of_files_output_groups_file_sets", + Columns: []*schema.Column{NamedSetOfFilesColumns[2]}, + RefColumns: []*schema.Column{OutputGroupsColumns[0]}, + OnDelete: schema.SetNull, + }, }, } // NetworkMetricsColumns holds the columns for the "network_metrics" table. NetworkMetricsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, + {Name: "metrics_network_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // NetworkMetricsTable holds the schema information for the "network_metrics" table. NetworkMetricsTable = &schema.Table{ Name: "network_metrics", Columns: NetworkMetricsColumns, PrimaryKey: []*schema.Column{NetworkMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "network_metrics_metrics_network_metrics", + Columns: []*schema.Column{NetworkMetricsColumns[1]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // OutputGroupsColumns holds the columns for the "output_groups" table. OutputGroupsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "incomplete", Type: field.TypeBool, Nullable: true}, - {Name: "output_group_file_sets", Type: field.TypeInt, Nullable: true}, + {Name: "target_complete_output_group", Type: field.TypeInt, Unique: true, Nullable: true}, } // OutputGroupsTable holds the schema information for the "output_groups" table. OutputGroupsTable = &schema.Table{ @@ -444,9 +555,9 @@ var ( PrimaryKey: []*schema.Column{OutputGroupsColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "output_groups_named_set_of_files_file_sets", + Symbol: "output_groups_target_completes_output_group", Columns: []*schema.Column{OutputGroupsColumns[3]}, - RefColumns: []*schema.Column{NamedSetOfFilesColumns[0]}, + RefColumns: []*schema.Column{TargetCompletesColumns[0]}, OnDelete: schema.SetNull, }, }, @@ -460,23 +571,41 @@ var ( {Name: "computation_steps", Type: field.TypeUint64, Nullable: true}, {Name: "num_transitive_loads", Type: field.TypeUint64, Nullable: true}, {Name: "package_overhead", Type: field.TypeUint64, Nullable: true}, + {Name: "package_metrics_package_load_metrics", Type: field.TypeInt, Nullable: true}, } // PackageLoadMetricsTable holds the schema information for the "package_load_metrics" table. PackageLoadMetricsTable = &schema.Table{ Name: "package_load_metrics", Columns: PackageLoadMetricsColumns, PrimaryKey: []*schema.Column{PackageLoadMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "package_load_metrics_package_metrics_package_load_metrics", + Columns: []*schema.Column{PackageLoadMetricsColumns[7]}, + RefColumns: []*schema.Column{PackageMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // PackageMetricsColumns holds the columns for the "package_metrics" table. PackageMetricsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "packages_loaded", Type: field.TypeInt64, Nullable: true}, + {Name: "metrics_package_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // PackageMetricsTable holds the schema information for the "package_metrics" table. PackageMetricsTable = &schema.Table{ Name: "package_metrics", Columns: PackageMetricsColumns, PrimaryKey: []*schema.Column{PackageMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "package_metrics_metrics_package_metrics", + Columns: []*schema.Column{PackageMetricsColumns[2]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // RaceStatisticsColumns holds the columns for the "race_statistics" table. RaceStatisticsColumns = []*schema.Column{ @@ -486,24 +615,42 @@ var ( {Name: "remote_runner", Type: field.TypeString, Nullable: true}, {Name: "local_wins", Type: field.TypeInt64, Nullable: true}, {Name: "renote_wins", Type: field.TypeInt64, Nullable: true}, + {Name: "dynamic_execution_metrics_race_statistics", Type: field.TypeInt, Nullable: true}, } // RaceStatisticsTable holds the schema information for the "race_statistics" table. RaceStatisticsTable = &schema.Table{ Name: "race_statistics", Columns: RaceStatisticsColumns, PrimaryKey: []*schema.Column{RaceStatisticsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "race_statistics_dynamic_execution_metrics_race_statistics", + Columns: []*schema.Column{RaceStatisticsColumns[6]}, + RefColumns: []*schema.Column{DynamicExecutionMetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // ResourceUsagesColumns holds the columns for the "resource_usages" table. ResourceUsagesColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "value", Type: field.TypeString, Nullable: true}, + {Name: "exection_info_resource_usage", Type: field.TypeInt, Nullable: true}, } // ResourceUsagesTable holds the schema information for the "resource_usages" table. ResourceUsagesTable = &schema.Table{ Name: "resource_usages", Columns: ResourceUsagesColumns, PrimaryKey: []*schema.Column{ResourceUsagesColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "resource_usages_exection_infos_resource_usage", + Columns: []*schema.Column{ResourceUsagesColumns[3]}, + RefColumns: []*schema.Column{ExectionInfosColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // RunnerCountsColumns holds the columns for the "runner_counts" table. RunnerCountsColumns = []*schema.Column{ @@ -511,12 +658,21 @@ var ( {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "exec_kind", Type: field.TypeString, Nullable: true}, {Name: "actions_executed", Type: field.TypeInt64, Nullable: true}, + {Name: "action_summary_runner_count", Type: field.TypeInt, Nullable: true}, } // RunnerCountsTable holds the schema information for the "runner_counts" table. RunnerCountsTable = &schema.Table{ Name: "runner_counts", Columns: RunnerCountsColumns, PrimaryKey: []*schema.Column{RunnerCountsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "runner_counts_action_summaries_runner_count", + Columns: []*schema.Column{RunnerCountsColumns[4]}, + RefColumns: []*schema.Column{ActionSummariesColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // SystemNetworkStatsColumns holds the columns for the "system_network_stats" table. SystemNetworkStatsColumns = []*schema.Column{ @@ -529,7 +685,7 @@ var ( {Name: "peak_bytes_recv_per_sec", Type: field.TypeUint64, Nullable: true}, {Name: "peak_packets_sent_per_sec", Type: field.TypeUint64, Nullable: true}, {Name: "peak_packets_recv_per_sec", Type: field.TypeUint64, Nullable: true}, - {Name: "network_metrics_system_network_stats", Type: field.TypeInt, Nullable: true}, + {Name: "network_metrics_system_network_stats", Type: field.TypeInt, Unique: true, Nullable: true}, } // SystemNetworkStatsTable holds the schema information for the "system_network_stats" table. SystemNetworkStatsTable = &schema.Table{ @@ -555,7 +711,7 @@ var ( {Name: "test_timeout_seconds", Type: field.TypeInt64, Nullable: true}, {Name: "test_timeout", Type: field.TypeInt64, Nullable: true}, {Name: "test_size", Type: field.TypeEnum, Nullable: true, Enums: []string{"UNKNOWN", "SMALL", "MEDIUM", "LARGE", "ENORMOUS"}}, - {Name: "target_complete_output_group", Type: field.TypeInt, Nullable: true}, + {Name: "target_pair_completion", Type: field.TypeInt, Unique: true, Nullable: true}, } // TargetCompletesTable holds the schema information for the "target_completes" table. TargetCompletesTable = &schema.Table{ @@ -564,9 +720,9 @@ var ( PrimaryKey: []*schema.Column{TargetCompletesColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "target_completes_output_groups_output_group", + Symbol: "target_completes_target_pairs_completion", Columns: []*schema.Column{TargetCompletesColumns[8]}, - RefColumns: []*schema.Column{OutputGroupsColumns[0]}, + RefColumns: []*schema.Column{TargetPairsColumns[0]}, OnDelete: schema.SetNull, }, }, @@ -578,12 +734,21 @@ var ( {Name: "target_kind", Type: field.TypeString, Nullable: true}, {Name: "start_time_in_ms", Type: field.TypeInt64, Nullable: true}, {Name: "test_size", Type: field.TypeEnum, Nullable: true, Enums: []string{"UNKNOWN", "SMALL", "MEDIUM", "LARGE", "ENORMOUS"}}, + {Name: "target_pair_configuration", Type: field.TypeInt, Unique: true, Nullable: true}, } // TargetConfiguredsTable holds the schema information for the "target_configureds" table. TargetConfiguredsTable = &schema.Table{ Name: "target_configureds", Columns: TargetConfiguredsColumns, PrimaryKey: []*schema.Column{TargetConfiguredsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "target_configureds_target_pairs_configuration", + Columns: []*schema.Column{TargetConfiguredsColumns[5]}, + RefColumns: []*schema.Column{TargetPairsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // TargetMetricsColumns holds the columns for the "target_metrics" table. TargetMetricsColumns = []*schema.Column{ @@ -591,12 +756,21 @@ var ( {Name: "targets_loaded", Type: field.TypeInt64, Nullable: true}, {Name: "targets_configured", Type: field.TypeInt64, Nullable: true}, {Name: "targets_configured_not_including_aspects", Type: field.TypeInt64, Nullable: true}, + {Name: "metrics_target_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // TargetMetricsTable holds the schema information for the "target_metrics" table. TargetMetricsTable = &schema.Table{ Name: "target_metrics", Columns: TargetMetricsColumns, PrimaryKey: []*schema.Column{TargetMetricsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "target_metrics_metrics_target_metrics", + Columns: []*schema.Column{TargetMetricsColumns[4]}, + RefColumns: []*schema.Column{MetricsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // TargetPairsColumns holds the columns for the "target_pairs" table. TargetPairsColumns = []*schema.Column{ @@ -607,8 +781,7 @@ var ( {Name: "target_kind", Type: field.TypeString, Nullable: true}, {Name: "test_size", Type: field.TypeEnum, Nullable: true, Enums: []string{"UNKNOWN", "SMALL", "MEDIUM", "LARGE", "ENORMOUS"}, Default: "UNKNOWN"}, {Name: "abort_reason", Type: field.TypeEnum, Nullable: true, Enums: []string{"UNKNOWN", "USER_INTERRUPTED", "NO_ANALYZE", "NO_BUILD", "TIME_OUT", "REMOTE_ENVIRONMENT_FAILURE", "INTERNAL", "LOADING_FAILURE", "ANALYSIS_FAILURE", "SKIPPED", "INCOMPLETE", "OUT_OF_MEMORY"}}, - {Name: "target_pair_configuration", Type: field.TypeInt, Nullable: true}, - {Name: "target_pair_completion", Type: field.TypeInt, Nullable: true}, + {Name: "bazel_invocation_targets", Type: field.TypeInt, Nullable: true}, } // TargetPairsTable holds the schema information for the "target_pairs" table. TargetPairsTable = &schema.Table{ @@ -617,15 +790,9 @@ var ( PrimaryKey: []*schema.Column{TargetPairsColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "target_pairs_target_configureds_configuration", + Symbol: "target_pairs_bazel_invocations_targets", Columns: []*schema.Column{TargetPairsColumns[7]}, - RefColumns: []*schema.Column{TargetConfiguredsColumns[0]}, - OnDelete: schema.SetNull, - }, - { - Symbol: "target_pairs_target_completes_completion", - Columns: []*schema.Column{TargetPairsColumns[8]}, - RefColumns: []*schema.Column{TargetCompletesColumns[0]}, + RefColumns: []*schema.Column{BazelInvocationsColumns[0]}, OnDelete: schema.SetNull, }, }, @@ -638,8 +805,9 @@ var ( {Name: "strategy", Type: field.TypeString, Nullable: true}, {Name: "cached_locally", Type: field.TypeBool, Nullable: true}, {Name: "cached_remotely", Type: field.TypeBool, Nullable: true}, + {Name: "first_seen", Type: field.TypeTime, Nullable: true}, {Name: "duration_ms", Type: field.TypeInt64, Nullable: true}, - {Name: "test_collection_test_summary", Type: field.TypeInt, Nullable: true}, + {Name: "bazel_invocation_test_collection", Type: field.TypeInt, Nullable: true}, } // TestCollectionsTable holds the schema information for the "test_collections" table. TestCollectionsTable = &schema.Table{ @@ -648,9 +816,9 @@ var ( PrimaryKey: []*schema.Column{TestCollectionsColumns[0]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "test_collections_test_summaries_test_summary", - Columns: []*schema.Column{TestCollectionsColumns[7]}, - RefColumns: []*schema.Column{TestSummariesColumns[0]}, + Symbol: "test_collections_bazel_invocations_test_collection", + Columns: []*schema.Column{TestCollectionsColumns[8]}, + RefColumns: []*schema.Column{BazelInvocationsColumns[0]}, OnDelete: schema.SetNull, }, }, @@ -667,6 +835,7 @@ var ( {Name: "output_group_inline_files", Type: field.TypeInt, Nullable: true}, {Name: "target_complete_important_output", Type: field.TypeInt, Nullable: true}, {Name: "target_complete_directory_output", Type: field.TypeInt, Nullable: true}, + {Name: "test_result_bes_test_action_output", Type: field.TypeInt, Nullable: true}, {Name: "test_summary_passed", Type: field.TypeInt, Nullable: true}, {Name: "test_summary_failed", Type: field.TypeInt, Nullable: true}, } @@ -701,14 +870,20 @@ var ( OnDelete: schema.SetNull, }, { - Symbol: "test_files_test_summaries_passed", + Symbol: "test_files_test_result_be_ss_test_action_output", Columns: []*schema.Column{TestFilesColumns[10]}, + RefColumns: []*schema.Column{TestResultBeSsColumns[0]}, + OnDelete: schema.SetNull, + }, + { + Symbol: "test_files_test_summaries_passed", + Columns: []*schema.Column{TestFilesColumns[11]}, RefColumns: []*schema.Column{TestSummariesColumns[0]}, OnDelete: schema.SetNull, }, { Symbol: "test_files_test_summaries_failed", - Columns: []*schema.Column{TestFilesColumns[11]}, + Columns: []*schema.Column{TestFilesColumns[12]}, RefColumns: []*schema.Column{TestSummariesColumns[0]}, OnDelete: schema.SetNull, }, @@ -727,7 +902,6 @@ var ( {Name: "test_attempt_duration_millis", Type: field.TypeInt64, Nullable: true}, {Name: "test_attempt_duration", Type: field.TypeInt64, Nullable: true}, {Name: "test_collection_test_results", Type: field.TypeInt, Nullable: true}, - {Name: "test_result_bes_execution_info", Type: field.TypeInt, Nullable: true}, } // TestResultBeSsTable holds the schema information for the "test_result_be_ss" table. TestResultBeSsTable = &schema.Table{ @@ -741,12 +915,6 @@ var ( RefColumns: []*schema.Column{TestCollectionsColumns[0]}, OnDelete: schema.SetNull, }, - { - Symbol: "test_result_be_ss_exection_infos_execution_info", - Columns: []*schema.Column{TestResultBeSsColumns[11]}, - RefColumns: []*schema.Column{ExectionInfosColumns[0]}, - OnDelete: schema.SetNull, - }, }, } // TestSummariesColumns holds the columns for the "test_summaries" table. @@ -762,36 +930,63 @@ var ( {Name: "last_stop_time", Type: field.TypeInt64, Nullable: true}, {Name: "total_run_duration", Type: field.TypeInt64, Nullable: true}, {Name: "label", Type: field.TypeString, Nullable: true}, + {Name: "test_collection_test_summary", Type: field.TypeInt, Unique: true, Nullable: true}, } // TestSummariesTable holds the schema information for the "test_summaries" table. TestSummariesTable = &schema.Table{ Name: "test_summaries", Columns: TestSummariesColumns, PrimaryKey: []*schema.Column{TestSummariesColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "test_summaries_test_collections_test_summary", + Columns: []*schema.Column{TestSummariesColumns[11]}, + RefColumns: []*schema.Column{TestCollectionsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // TimingBreakdownsColumns holds the columns for the "timing_breakdowns" table. TimingBreakdownsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "time", Type: field.TypeString, Nullable: true}, + {Name: "exection_info_timing_breakdown", Type: field.TypeInt, Unique: true, Nullable: true}, } // TimingBreakdownsTable holds the schema information for the "timing_breakdowns" table. TimingBreakdownsTable = &schema.Table{ Name: "timing_breakdowns", Columns: TimingBreakdownsColumns, PrimaryKey: []*schema.Column{TimingBreakdownsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "timing_breakdowns_exection_infos_timing_breakdown", + Columns: []*schema.Column{TimingBreakdownsColumns[3]}, + RefColumns: []*schema.Column{ExectionInfosColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // TimingChildsColumns holds the columns for the "timing_childs" table. TimingChildsColumns = []*schema.Column{ {Name: "id", Type: field.TypeInt, Increment: true}, {Name: "name", Type: field.TypeString, Nullable: true}, {Name: "time", Type: field.TypeString, Nullable: true}, + {Name: "timing_breakdown_child", Type: field.TypeInt, Nullable: true}, } // TimingChildsTable holds the schema information for the "timing_childs" table. TimingChildsTable = &schema.Table{ Name: "timing_childs", Columns: TimingChildsColumns, PrimaryKey: []*schema.Column{TimingChildsColumns[0]}, + ForeignKeys: []*schema.ForeignKey{ + { + Symbol: "timing_childs_timing_breakdowns_child", + Columns: []*schema.Column{TimingChildsColumns[3]}, + RefColumns: []*schema.Column{TimingBreakdownsColumns[0]}, + OnDelete: schema.SetNull, + }, + }, } // TimingMetricsColumns holds the columns for the "timing_metrics" table. TimingMetricsColumns = []*schema.Column{ @@ -801,585 +996,19 @@ var ( {Name: "analysis_phase_time_in_ms", Type: field.TypeInt64, Nullable: true}, {Name: "execution_phase_time_in_ms", Type: field.TypeInt64, Nullable: true}, {Name: "actions_execution_start_in_ms", Type: field.TypeInt64, Nullable: true}, + {Name: "metrics_timing_metrics", Type: field.TypeInt, Unique: true, Nullable: true}, } // TimingMetricsTable holds the schema information for the "timing_metrics" table. TimingMetricsTable = &schema.Table{ Name: "timing_metrics", Columns: TimingMetricsColumns, PrimaryKey: []*schema.Column{TimingMetricsColumns[0]}, - } - // ActionCacheStatisticsMissDetailsColumns holds the columns for the "action_cache_statistics_miss_details" table. - ActionCacheStatisticsMissDetailsColumns = []*schema.Column{ - {Name: "action_cache_statistics_id", Type: field.TypeInt}, - {Name: "miss_detail_id", Type: field.TypeInt}, - } - // ActionCacheStatisticsMissDetailsTable holds the schema information for the "action_cache_statistics_miss_details" table. - ActionCacheStatisticsMissDetailsTable = &schema.Table{ - Name: "action_cache_statistics_miss_details", - Columns: ActionCacheStatisticsMissDetailsColumns, - PrimaryKey: []*schema.Column{ActionCacheStatisticsMissDetailsColumns[0], ActionCacheStatisticsMissDetailsColumns[1]}, ForeignKeys: []*schema.ForeignKey{ { - Symbol: "action_cache_statistics_miss_details_action_cache_statistics_id", - Columns: []*schema.Column{ActionCacheStatisticsMissDetailsColumns[0]}, - RefColumns: []*schema.Column{ActionCacheStatisticsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "action_cache_statistics_miss_details_miss_detail_id", - Columns: []*schema.Column{ActionCacheStatisticsMissDetailsColumns[1]}, - RefColumns: []*schema.Column{MissDetailsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // ActionSummaryActionDataColumns holds the columns for the "action_summary_action_data" table. - ActionSummaryActionDataColumns = []*schema.Column{ - {Name: "action_summary_id", Type: field.TypeInt}, - {Name: "action_data_id", Type: field.TypeInt}, - } - // ActionSummaryActionDataTable holds the schema information for the "action_summary_action_data" table. - ActionSummaryActionDataTable = &schema.Table{ - Name: "action_summary_action_data", - Columns: ActionSummaryActionDataColumns, - PrimaryKey: []*schema.Column{ActionSummaryActionDataColumns[0], ActionSummaryActionDataColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "action_summary_action_data_action_summary_id", - Columns: []*schema.Column{ActionSummaryActionDataColumns[0]}, - RefColumns: []*schema.Column{ActionSummariesColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "action_summary_action_data_action_data_id", - Columns: []*schema.Column{ActionSummaryActionDataColumns[1]}, - RefColumns: []*schema.Column{ActionDataColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // ActionSummaryRunnerCountColumns holds the columns for the "action_summary_runner_count" table. - ActionSummaryRunnerCountColumns = []*schema.Column{ - {Name: "action_summary_id", Type: field.TypeInt}, - {Name: "runner_count_id", Type: field.TypeInt}, - } - // ActionSummaryRunnerCountTable holds the schema information for the "action_summary_runner_count" table. - ActionSummaryRunnerCountTable = &schema.Table{ - Name: "action_summary_runner_count", - Columns: ActionSummaryRunnerCountColumns, - PrimaryKey: []*schema.Column{ActionSummaryRunnerCountColumns[0], ActionSummaryRunnerCountColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "action_summary_runner_count_action_summary_id", - Columns: []*schema.Column{ActionSummaryRunnerCountColumns[0]}, - RefColumns: []*schema.Column{ActionSummariesColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "action_summary_runner_count_runner_count_id", - Columns: []*schema.Column{ActionSummaryRunnerCountColumns[1]}, - RefColumns: []*schema.Column{RunnerCountsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // ActionSummaryActionCacheStatisticsColumns holds the columns for the "action_summary_action_cache_statistics" table. - ActionSummaryActionCacheStatisticsColumns = []*schema.Column{ - {Name: "action_summary_id", Type: field.TypeInt}, - {Name: "action_cache_statistics_id", Type: field.TypeInt}, - } - // ActionSummaryActionCacheStatisticsTable holds the schema information for the "action_summary_action_cache_statistics" table. - ActionSummaryActionCacheStatisticsTable = &schema.Table{ - Name: "action_summary_action_cache_statistics", - Columns: ActionSummaryActionCacheStatisticsColumns, - PrimaryKey: []*schema.Column{ActionSummaryActionCacheStatisticsColumns[0], ActionSummaryActionCacheStatisticsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "action_summary_action_cache_statistics_action_summary_id", - Columns: []*schema.Column{ActionSummaryActionCacheStatisticsColumns[0]}, - RefColumns: []*schema.Column{ActionSummariesColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "action_summary_action_cache_statistics_action_cache_statistics_id", - Columns: []*schema.Column{ActionSummaryActionCacheStatisticsColumns[1]}, - RefColumns: []*schema.Column{ActionCacheStatisticsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // ArtifactMetricsTopLevelArtifactsColumns holds the columns for the "artifact_metrics_top_level_artifacts" table. - ArtifactMetricsTopLevelArtifactsColumns = []*schema.Column{ - {Name: "artifact_metrics_id", Type: field.TypeInt}, - {Name: "files_metric_id", Type: field.TypeInt}, - } - // ArtifactMetricsTopLevelArtifactsTable holds the schema information for the "artifact_metrics_top_level_artifacts" table. - ArtifactMetricsTopLevelArtifactsTable = &schema.Table{ - Name: "artifact_metrics_top_level_artifacts", - Columns: ArtifactMetricsTopLevelArtifactsColumns, - PrimaryKey: []*schema.Column{ArtifactMetricsTopLevelArtifactsColumns[0], ArtifactMetricsTopLevelArtifactsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "artifact_metrics_top_level_artifacts_artifact_metrics_id", - Columns: []*schema.Column{ArtifactMetricsTopLevelArtifactsColumns[0]}, - RefColumns: []*schema.Column{ArtifactMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "artifact_metrics_top_level_artifacts_files_metric_id", - Columns: []*schema.Column{ArtifactMetricsTopLevelArtifactsColumns[1]}, - RefColumns: []*schema.Column{FilesMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // BazelInvocationTestCollectionColumns holds the columns for the "bazel_invocation_test_collection" table. - BazelInvocationTestCollectionColumns = []*schema.Column{ - {Name: "bazel_invocation_id", Type: field.TypeInt}, - {Name: "test_collection_id", Type: field.TypeInt}, - } - // BazelInvocationTestCollectionTable holds the schema information for the "bazel_invocation_test_collection" table. - BazelInvocationTestCollectionTable = &schema.Table{ - Name: "bazel_invocation_test_collection", - Columns: BazelInvocationTestCollectionColumns, - PrimaryKey: []*schema.Column{BazelInvocationTestCollectionColumns[0], BazelInvocationTestCollectionColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "bazel_invocation_test_collection_bazel_invocation_id", - Columns: []*schema.Column{BazelInvocationTestCollectionColumns[0]}, - RefColumns: []*schema.Column{BazelInvocationsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "bazel_invocation_test_collection_test_collection_id", - Columns: []*schema.Column{BazelInvocationTestCollectionColumns[1]}, - RefColumns: []*schema.Column{TestCollectionsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // BazelInvocationTargetsColumns holds the columns for the "bazel_invocation_targets" table. - BazelInvocationTargetsColumns = []*schema.Column{ - {Name: "bazel_invocation_id", Type: field.TypeInt}, - {Name: "target_pair_id", Type: field.TypeInt}, - } - // BazelInvocationTargetsTable holds the schema information for the "bazel_invocation_targets" table. - BazelInvocationTargetsTable = &schema.Table{ - Name: "bazel_invocation_targets", - Columns: BazelInvocationTargetsColumns, - PrimaryKey: []*schema.Column{BazelInvocationTargetsColumns[0], BazelInvocationTargetsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "bazel_invocation_targets_bazel_invocation_id", - Columns: []*schema.Column{BazelInvocationTargetsColumns[0]}, - RefColumns: []*schema.Column{BazelInvocationsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "bazel_invocation_targets_target_pair_id", - Columns: []*schema.Column{BazelInvocationTargetsColumns[1]}, - RefColumns: []*schema.Column{TargetPairsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // BuildGraphMetricsEvaluatedValuesColumns holds the columns for the "build_graph_metrics_evaluated_values" table. - BuildGraphMetricsEvaluatedValuesColumns = []*schema.Column{ - {Name: "build_graph_metrics_id", Type: field.TypeInt}, - {Name: "evaluation_stat_id", Type: field.TypeInt}, - } - // BuildGraphMetricsEvaluatedValuesTable holds the schema information for the "build_graph_metrics_evaluated_values" table. - BuildGraphMetricsEvaluatedValuesTable = &schema.Table{ - Name: "build_graph_metrics_evaluated_values", - Columns: BuildGraphMetricsEvaluatedValuesColumns, - PrimaryKey: []*schema.Column{BuildGraphMetricsEvaluatedValuesColumns[0], BuildGraphMetricsEvaluatedValuesColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "build_graph_metrics_evaluated_values_build_graph_metrics_id", - Columns: []*schema.Column{BuildGraphMetricsEvaluatedValuesColumns[0]}, - RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "build_graph_metrics_evaluated_values_evaluation_stat_id", - Columns: []*schema.Column{BuildGraphMetricsEvaluatedValuesColumns[1]}, - RefColumns: []*schema.Column{EvaluationStatsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // DynamicExecutionMetricsRaceStatisticsColumns holds the columns for the "dynamic_execution_metrics_race_statistics" table. - DynamicExecutionMetricsRaceStatisticsColumns = []*schema.Column{ - {Name: "dynamic_execution_metrics_id", Type: field.TypeInt}, - {Name: "race_statistics_id", Type: field.TypeInt}, - } - // DynamicExecutionMetricsRaceStatisticsTable holds the schema information for the "dynamic_execution_metrics_race_statistics" table. - DynamicExecutionMetricsRaceStatisticsTable = &schema.Table{ - Name: "dynamic_execution_metrics_race_statistics", - Columns: DynamicExecutionMetricsRaceStatisticsColumns, - PrimaryKey: []*schema.Column{DynamicExecutionMetricsRaceStatisticsColumns[0], DynamicExecutionMetricsRaceStatisticsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "dynamic_execution_metrics_race_statistics_dynamic_execution_metrics_id", - Columns: []*schema.Column{DynamicExecutionMetricsRaceStatisticsColumns[0]}, - RefColumns: []*schema.Column{DynamicExecutionMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "dynamic_execution_metrics_race_statistics_race_statistics_id", - Columns: []*schema.Column{DynamicExecutionMetricsRaceStatisticsColumns[1]}, - RefColumns: []*schema.Column{RaceStatisticsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // ExectionInfoResourceUsageColumns holds the columns for the "exection_info_resource_usage" table. - ExectionInfoResourceUsageColumns = []*schema.Column{ - {Name: "exection_info_id", Type: field.TypeInt}, - {Name: "resource_usage_id", Type: field.TypeInt}, - } - // ExectionInfoResourceUsageTable holds the schema information for the "exection_info_resource_usage" table. - ExectionInfoResourceUsageTable = &schema.Table{ - Name: "exection_info_resource_usage", - Columns: ExectionInfoResourceUsageColumns, - PrimaryKey: []*schema.Column{ExectionInfoResourceUsageColumns[0], ExectionInfoResourceUsageColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "exection_info_resource_usage_exection_info_id", - Columns: []*schema.Column{ExectionInfoResourceUsageColumns[0]}, - RefColumns: []*schema.Column{ExectionInfosColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "exection_info_resource_usage_resource_usage_id", - Columns: []*schema.Column{ExectionInfoResourceUsageColumns[1]}, - RefColumns: []*schema.Column{ResourceUsagesColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MemoryMetricsGarbageMetricsColumns holds the columns for the "memory_metrics_garbage_metrics" table. - MemoryMetricsGarbageMetricsColumns = []*schema.Column{ - {Name: "memory_metrics_id", Type: field.TypeInt}, - {Name: "garbage_metrics_id", Type: field.TypeInt}, - } - // MemoryMetricsGarbageMetricsTable holds the schema information for the "memory_metrics_garbage_metrics" table. - MemoryMetricsGarbageMetricsTable = &schema.Table{ - Name: "memory_metrics_garbage_metrics", - Columns: MemoryMetricsGarbageMetricsColumns, - PrimaryKey: []*schema.Column{MemoryMetricsGarbageMetricsColumns[0], MemoryMetricsGarbageMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "memory_metrics_garbage_metrics_memory_metrics_id", - Columns: []*schema.Column{MemoryMetricsGarbageMetricsColumns[0]}, - RefColumns: []*schema.Column{MemoryMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "memory_metrics_garbage_metrics_garbage_metrics_id", - Columns: []*schema.Column{MemoryMetricsGarbageMetricsColumns[1]}, - RefColumns: []*schema.Column{GarbageMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsMemoryMetricsColumns holds the columns for the "metrics_memory_metrics" table. - MetricsMemoryMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "memory_metrics_id", Type: field.TypeInt}, - } - // MetricsMemoryMetricsTable holds the schema information for the "metrics_memory_metrics" table. - MetricsMemoryMetricsTable = &schema.Table{ - Name: "metrics_memory_metrics", - Columns: MetricsMemoryMetricsColumns, - PrimaryKey: []*schema.Column{MetricsMemoryMetricsColumns[0], MetricsMemoryMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_memory_metrics_metrics_id", - Columns: []*schema.Column{MetricsMemoryMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_memory_metrics_memory_metrics_id", - Columns: []*schema.Column{MetricsMemoryMetricsColumns[1]}, - RefColumns: []*schema.Column{MemoryMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsTargetMetricsColumns holds the columns for the "metrics_target_metrics" table. - MetricsTargetMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "target_metrics_id", Type: field.TypeInt}, - } - // MetricsTargetMetricsTable holds the schema information for the "metrics_target_metrics" table. - MetricsTargetMetricsTable = &schema.Table{ - Name: "metrics_target_metrics", - Columns: MetricsTargetMetricsColumns, - PrimaryKey: []*schema.Column{MetricsTargetMetricsColumns[0], MetricsTargetMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_target_metrics_metrics_id", - Columns: []*schema.Column{MetricsTargetMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_target_metrics_target_metrics_id", - Columns: []*schema.Column{MetricsTargetMetricsColumns[1]}, - RefColumns: []*schema.Column{TargetMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsPackageMetricsColumns holds the columns for the "metrics_package_metrics" table. - MetricsPackageMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "package_metrics_id", Type: field.TypeInt}, - } - // MetricsPackageMetricsTable holds the schema information for the "metrics_package_metrics" table. - MetricsPackageMetricsTable = &schema.Table{ - Name: "metrics_package_metrics", - Columns: MetricsPackageMetricsColumns, - PrimaryKey: []*schema.Column{MetricsPackageMetricsColumns[0], MetricsPackageMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_package_metrics_metrics_id", - Columns: []*schema.Column{MetricsPackageMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_package_metrics_package_metrics_id", - Columns: []*schema.Column{MetricsPackageMetricsColumns[1]}, - RefColumns: []*schema.Column{PackageMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsTimingMetricsColumns holds the columns for the "metrics_timing_metrics" table. - MetricsTimingMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "timing_metrics_id", Type: field.TypeInt}, - } - // MetricsTimingMetricsTable holds the schema information for the "metrics_timing_metrics" table. - MetricsTimingMetricsTable = &schema.Table{ - Name: "metrics_timing_metrics", - Columns: MetricsTimingMetricsColumns, - PrimaryKey: []*schema.Column{MetricsTimingMetricsColumns[0], MetricsTimingMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_timing_metrics_metrics_id", - Columns: []*schema.Column{MetricsTimingMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_timing_metrics_timing_metrics_id", - Columns: []*schema.Column{MetricsTimingMetricsColumns[1]}, - RefColumns: []*schema.Column{TimingMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsCumulativeMetricsColumns holds the columns for the "metrics_cumulative_metrics" table. - MetricsCumulativeMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "cumulative_metrics_id", Type: field.TypeInt}, - } - // MetricsCumulativeMetricsTable holds the schema information for the "metrics_cumulative_metrics" table. - MetricsCumulativeMetricsTable = &schema.Table{ - Name: "metrics_cumulative_metrics", - Columns: MetricsCumulativeMetricsColumns, - PrimaryKey: []*schema.Column{MetricsCumulativeMetricsColumns[0], MetricsCumulativeMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_cumulative_metrics_metrics_id", - Columns: []*schema.Column{MetricsCumulativeMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_cumulative_metrics_cumulative_metrics_id", - Columns: []*schema.Column{MetricsCumulativeMetricsColumns[1]}, - RefColumns: []*schema.Column{CumulativeMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsArtifactMetricsColumns holds the columns for the "metrics_artifact_metrics" table. - MetricsArtifactMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "artifact_metrics_id", Type: field.TypeInt}, - } - // MetricsArtifactMetricsTable holds the schema information for the "metrics_artifact_metrics" table. - MetricsArtifactMetricsTable = &schema.Table{ - Name: "metrics_artifact_metrics", - Columns: MetricsArtifactMetricsColumns, - PrimaryKey: []*schema.Column{MetricsArtifactMetricsColumns[0], MetricsArtifactMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_artifact_metrics_metrics_id", - Columns: []*schema.Column{MetricsArtifactMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_artifact_metrics_artifact_metrics_id", - Columns: []*schema.Column{MetricsArtifactMetricsColumns[1]}, - RefColumns: []*schema.Column{ArtifactMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsNetworkMetricsColumns holds the columns for the "metrics_network_metrics" table. - MetricsNetworkMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "network_metrics_id", Type: field.TypeInt}, - } - // MetricsNetworkMetricsTable holds the schema information for the "metrics_network_metrics" table. - MetricsNetworkMetricsTable = &schema.Table{ - Name: "metrics_network_metrics", - Columns: MetricsNetworkMetricsColumns, - PrimaryKey: []*schema.Column{MetricsNetworkMetricsColumns[0], MetricsNetworkMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_network_metrics_metrics_id", - Columns: []*schema.Column{MetricsNetworkMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_network_metrics_network_metrics_id", - Columns: []*schema.Column{MetricsNetworkMetricsColumns[1]}, - RefColumns: []*schema.Column{NetworkMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsDynamicExecutionMetricsColumns holds the columns for the "metrics_dynamic_execution_metrics" table. - MetricsDynamicExecutionMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "dynamic_execution_metrics_id", Type: field.TypeInt}, - } - // MetricsDynamicExecutionMetricsTable holds the schema information for the "metrics_dynamic_execution_metrics" table. - MetricsDynamicExecutionMetricsTable = &schema.Table{ - Name: "metrics_dynamic_execution_metrics", - Columns: MetricsDynamicExecutionMetricsColumns, - PrimaryKey: []*schema.Column{MetricsDynamicExecutionMetricsColumns[0], MetricsDynamicExecutionMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_dynamic_execution_metrics_metrics_id", - Columns: []*schema.Column{MetricsDynamicExecutionMetricsColumns[0]}, + Symbol: "timing_metrics_metrics_timing_metrics", + Columns: []*schema.Column{TimingMetricsColumns[6]}, RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_dynamic_execution_metrics_dynamic_execution_metrics_id", - Columns: []*schema.Column{MetricsDynamicExecutionMetricsColumns[1]}, - RefColumns: []*schema.Column{DynamicExecutionMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // MetricsBuildGraphMetricsColumns holds the columns for the "metrics_build_graph_metrics" table. - MetricsBuildGraphMetricsColumns = []*schema.Column{ - {Name: "metrics_id", Type: field.TypeInt}, - {Name: "build_graph_metrics_id", Type: field.TypeInt}, - } - // MetricsBuildGraphMetricsTable holds the schema information for the "metrics_build_graph_metrics" table. - MetricsBuildGraphMetricsTable = &schema.Table{ - Name: "metrics_build_graph_metrics", - Columns: MetricsBuildGraphMetricsColumns, - PrimaryKey: []*schema.Column{MetricsBuildGraphMetricsColumns[0], MetricsBuildGraphMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "metrics_build_graph_metrics_metrics_id", - Columns: []*schema.Column{MetricsBuildGraphMetricsColumns[0]}, - RefColumns: []*schema.Column{MetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "metrics_build_graph_metrics_build_graph_metrics_id", - Columns: []*schema.Column{MetricsBuildGraphMetricsColumns[1]}, - RefColumns: []*schema.Column{BuildGraphMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // PackageMetricsPackageLoadMetricsColumns holds the columns for the "package_metrics_package_load_metrics" table. - PackageMetricsPackageLoadMetricsColumns = []*schema.Column{ - {Name: "package_metrics_id", Type: field.TypeInt}, - {Name: "package_load_metrics_id", Type: field.TypeInt}, - } - // PackageMetricsPackageLoadMetricsTable holds the schema information for the "package_metrics_package_load_metrics" table. - PackageMetricsPackageLoadMetricsTable = &schema.Table{ - Name: "package_metrics_package_load_metrics", - Columns: PackageMetricsPackageLoadMetricsColumns, - PrimaryKey: []*schema.Column{PackageMetricsPackageLoadMetricsColumns[0], PackageMetricsPackageLoadMetricsColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "package_metrics_package_load_metrics_package_metrics_id", - Columns: []*schema.Column{PackageMetricsPackageLoadMetricsColumns[0]}, - RefColumns: []*schema.Column{PackageMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "package_metrics_package_load_metrics_package_load_metrics_id", - Columns: []*schema.Column{PackageMetricsPackageLoadMetricsColumns[1]}, - RefColumns: []*schema.Column{PackageLoadMetricsColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // TestResultBesTestActionOutputColumns holds the columns for the "test_result_bes_test_action_output" table. - TestResultBesTestActionOutputColumns = []*schema.Column{ - {Name: "test_result_bes_id", Type: field.TypeInt}, - {Name: "test_file_id", Type: field.TypeInt}, - } - // TestResultBesTestActionOutputTable holds the schema information for the "test_result_bes_test_action_output" table. - TestResultBesTestActionOutputTable = &schema.Table{ - Name: "test_result_bes_test_action_output", - Columns: TestResultBesTestActionOutputColumns, - PrimaryKey: []*schema.Column{TestResultBesTestActionOutputColumns[0], TestResultBesTestActionOutputColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "test_result_bes_test_action_output_test_result_bes_id", - Columns: []*schema.Column{TestResultBesTestActionOutputColumns[0]}, - RefColumns: []*schema.Column{TestResultBeSsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "test_result_bes_test_action_output_test_file_id", - Columns: []*schema.Column{TestResultBesTestActionOutputColumns[1]}, - RefColumns: []*schema.Column{TestFilesColumns[0]}, - OnDelete: schema.Cascade, - }, - }, - } - // TimingBreakdownChildColumns holds the columns for the "timing_breakdown_child" table. - TimingBreakdownChildColumns = []*schema.Column{ - {Name: "timing_breakdown_id", Type: field.TypeInt}, - {Name: "timing_child_id", Type: field.TypeInt}, - } - // TimingBreakdownChildTable holds the schema information for the "timing_breakdown_child" table. - TimingBreakdownChildTable = &schema.Table{ - Name: "timing_breakdown_child", - Columns: TimingBreakdownChildColumns, - PrimaryKey: []*schema.Column{TimingBreakdownChildColumns[0], TimingBreakdownChildColumns[1]}, - ForeignKeys: []*schema.ForeignKey{ - { - Symbol: "timing_breakdown_child_timing_breakdown_id", - Columns: []*schema.Column{TimingBreakdownChildColumns[0]}, - RefColumns: []*schema.Column{TimingBreakdownsColumns[0]}, - OnDelete: schema.Cascade, - }, - { - Symbol: "timing_breakdown_child_timing_child_id", - Columns: []*schema.Column{TimingBreakdownChildColumns[1]}, - RefColumns: []*schema.Column{TimingChildsColumns[0]}, - OnDelete: schema.Cascade, + OnDelete: schema.SetNull, }, }, } @@ -1424,105 +1053,59 @@ var ( TimingBreakdownsTable, TimingChildsTable, TimingMetricsTable, - ActionCacheStatisticsMissDetailsTable, - ActionSummaryActionDataTable, - ActionSummaryRunnerCountTable, - ActionSummaryActionCacheStatisticsTable, - ArtifactMetricsTopLevelArtifactsTable, - BazelInvocationTestCollectionTable, - BazelInvocationTargetsTable, - BuildGraphMetricsEvaluatedValuesTable, - DynamicExecutionMetricsRaceStatisticsTable, - ExectionInfoResourceUsageTable, - MemoryMetricsGarbageMetricsTable, - MetricsMemoryMetricsTable, - MetricsTargetMetricsTable, - MetricsPackageMetricsTable, - MetricsTimingMetricsTable, - MetricsCumulativeMetricsTable, - MetricsArtifactMetricsTable, - MetricsNetworkMetricsTable, - MetricsDynamicExecutionMetricsTable, - MetricsBuildGraphMetricsTable, - PackageMetricsPackageLoadMetricsTable, - TestResultBesTestActionOutputTable, - TimingBreakdownChildTable, } ) func init() { + ActionCacheStatisticsTable.ForeignKeys[0].RefTable = ActionSummariesTable + ActionDataTable.ForeignKeys[0].RefTable = ActionSummariesTable ActionSummariesTable.ForeignKeys[0].RefTable = MetricsTable + ArtifactMetricsTable.ForeignKeys[0].RefTable = FilesMetricsTable + ArtifactMetricsTable.ForeignKeys[1].RefTable = FilesMetricsTable + ArtifactMetricsTable.ForeignKeys[2].RefTable = FilesMetricsTable + ArtifactMetricsTable.ForeignKeys[3].RefTable = MetricsTable BazelInvocationsTable.ForeignKeys[0].RefTable = BuildsTable BazelInvocationsTable.ForeignKeys[1].RefTable = EventFilesTable BazelInvocationProblemsTable.ForeignKeys[0].RefTable = BazelInvocationsTable + BuildGraphMetricsTable.ForeignKeys[0].RefTable = EvaluationStatsTable + BuildGraphMetricsTable.ForeignKeys[1].RefTable = EvaluationStatsTable + BuildGraphMetricsTable.ForeignKeys[2].RefTable = EvaluationStatsTable + BuildGraphMetricsTable.ForeignKeys[3].RefTable = EvaluationStatsTable + BuildGraphMetricsTable.ForeignKeys[4].RefTable = MetricsTable + CumulativeMetricsTable.ForeignKeys[0].RefTable = MetricsTable + DynamicExecutionMetricsTable.ForeignKeys[0].RefTable = MetricsTable EvaluationStatsTable.ForeignKeys[0].RefTable = BuildGraphMetricsTable - EvaluationStatsTable.ForeignKeys[1].RefTable = BuildGraphMetricsTable - EvaluationStatsTable.ForeignKeys[2].RefTable = BuildGraphMetricsTable - EvaluationStatsTable.ForeignKeys[3].RefTable = BuildGraphMetricsTable - ExectionInfosTable.ForeignKeys[0].RefTable = TimingBreakdownsTable + ExectionInfosTable.ForeignKeys[0].RefTable = TestResultBeSsTable FilesMetricsTable.ForeignKeys[0].RefTable = ArtifactMetricsTable - FilesMetricsTable.ForeignKeys[1].RefTable = ArtifactMetricsTable - FilesMetricsTable.ForeignKeys[2].RefTable = ArtifactMetricsTable + GarbageMetricsTable.ForeignKeys[0].RefTable = MemoryMetricsTable + MemoryMetricsTable.ForeignKeys[0].RefTable = MetricsTable MetricsTable.ForeignKeys[0].RefTable = BazelInvocationsTable + MissDetailsTable.ForeignKeys[0].RefTable = ActionCacheStatisticsTable NamedSetOfFilesTable.ForeignKeys[0].RefTable = NamedSetOfFilesTable - OutputGroupsTable.ForeignKeys[0].RefTable = NamedSetOfFilesTable + NamedSetOfFilesTable.ForeignKeys[1].RefTable = OutputGroupsTable + NetworkMetricsTable.ForeignKeys[0].RefTable = MetricsTable + OutputGroupsTable.ForeignKeys[0].RefTable = TargetCompletesTable + PackageLoadMetricsTable.ForeignKeys[0].RefTable = PackageMetricsTable + PackageMetricsTable.ForeignKeys[0].RefTable = MetricsTable + RaceStatisticsTable.ForeignKeys[0].RefTable = DynamicExecutionMetricsTable + ResourceUsagesTable.ForeignKeys[0].RefTable = ExectionInfosTable + RunnerCountsTable.ForeignKeys[0].RefTable = ActionSummariesTable SystemNetworkStatsTable.ForeignKeys[0].RefTable = NetworkMetricsTable - TargetCompletesTable.ForeignKeys[0].RefTable = OutputGroupsTable - TargetPairsTable.ForeignKeys[0].RefTable = TargetConfiguredsTable - TargetPairsTable.ForeignKeys[1].RefTable = TargetCompletesTable - TestCollectionsTable.ForeignKeys[0].RefTable = TestSummariesTable + TargetCompletesTable.ForeignKeys[0].RefTable = TargetPairsTable + TargetConfiguredsTable.ForeignKeys[0].RefTable = TargetPairsTable + TargetMetricsTable.ForeignKeys[0].RefTable = MetricsTable + TargetPairsTable.ForeignKeys[0].RefTable = BazelInvocationsTable + TestCollectionsTable.ForeignKeys[0].RefTable = BazelInvocationsTable TestFilesTable.ForeignKeys[0].RefTable = NamedSetOfFilesTable TestFilesTable.ForeignKeys[1].RefTable = OutputGroupsTable TestFilesTable.ForeignKeys[2].RefTable = TargetCompletesTable TestFilesTable.ForeignKeys[3].RefTable = TargetCompletesTable - TestFilesTable.ForeignKeys[4].RefTable = TestSummariesTable + TestFilesTable.ForeignKeys[4].RefTable = TestResultBeSsTable TestFilesTable.ForeignKeys[5].RefTable = TestSummariesTable + TestFilesTable.ForeignKeys[6].RefTable = TestSummariesTable TestResultBeSsTable.ForeignKeys[0].RefTable = TestCollectionsTable - TestResultBeSsTable.ForeignKeys[1].RefTable = ExectionInfosTable - ActionCacheStatisticsMissDetailsTable.ForeignKeys[0].RefTable = ActionCacheStatisticsTable - ActionCacheStatisticsMissDetailsTable.ForeignKeys[1].RefTable = MissDetailsTable - ActionSummaryActionDataTable.ForeignKeys[0].RefTable = ActionSummariesTable - ActionSummaryActionDataTable.ForeignKeys[1].RefTable = ActionDataTable - ActionSummaryRunnerCountTable.ForeignKeys[0].RefTable = ActionSummariesTable - ActionSummaryRunnerCountTable.ForeignKeys[1].RefTable = RunnerCountsTable - ActionSummaryActionCacheStatisticsTable.ForeignKeys[0].RefTable = ActionSummariesTable - ActionSummaryActionCacheStatisticsTable.ForeignKeys[1].RefTable = ActionCacheStatisticsTable - ArtifactMetricsTopLevelArtifactsTable.ForeignKeys[0].RefTable = ArtifactMetricsTable - ArtifactMetricsTopLevelArtifactsTable.ForeignKeys[1].RefTable = FilesMetricsTable - BazelInvocationTestCollectionTable.ForeignKeys[0].RefTable = BazelInvocationsTable - BazelInvocationTestCollectionTable.ForeignKeys[1].RefTable = TestCollectionsTable - BazelInvocationTargetsTable.ForeignKeys[0].RefTable = BazelInvocationsTable - BazelInvocationTargetsTable.ForeignKeys[1].RefTable = TargetPairsTable - BuildGraphMetricsEvaluatedValuesTable.ForeignKeys[0].RefTable = BuildGraphMetricsTable - BuildGraphMetricsEvaluatedValuesTable.ForeignKeys[1].RefTable = EvaluationStatsTable - DynamicExecutionMetricsRaceStatisticsTable.ForeignKeys[0].RefTable = DynamicExecutionMetricsTable - DynamicExecutionMetricsRaceStatisticsTable.ForeignKeys[1].RefTable = RaceStatisticsTable - ExectionInfoResourceUsageTable.ForeignKeys[0].RefTable = ExectionInfosTable - ExectionInfoResourceUsageTable.ForeignKeys[1].RefTable = ResourceUsagesTable - MemoryMetricsGarbageMetricsTable.ForeignKeys[0].RefTable = MemoryMetricsTable - MemoryMetricsGarbageMetricsTable.ForeignKeys[1].RefTable = GarbageMetricsTable - MetricsMemoryMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsMemoryMetricsTable.ForeignKeys[1].RefTable = MemoryMetricsTable - MetricsTargetMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsTargetMetricsTable.ForeignKeys[1].RefTable = TargetMetricsTable - MetricsPackageMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsPackageMetricsTable.ForeignKeys[1].RefTable = PackageMetricsTable - MetricsTimingMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsTimingMetricsTable.ForeignKeys[1].RefTable = TimingMetricsTable - MetricsCumulativeMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsCumulativeMetricsTable.ForeignKeys[1].RefTable = CumulativeMetricsTable - MetricsArtifactMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsArtifactMetricsTable.ForeignKeys[1].RefTable = ArtifactMetricsTable - MetricsNetworkMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsNetworkMetricsTable.ForeignKeys[1].RefTable = NetworkMetricsTable - MetricsDynamicExecutionMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsDynamicExecutionMetricsTable.ForeignKeys[1].RefTable = DynamicExecutionMetricsTable - MetricsBuildGraphMetricsTable.ForeignKeys[0].RefTable = MetricsTable - MetricsBuildGraphMetricsTable.ForeignKeys[1].RefTable = BuildGraphMetricsTable - PackageMetricsPackageLoadMetricsTable.ForeignKeys[0].RefTable = PackageMetricsTable - PackageMetricsPackageLoadMetricsTable.ForeignKeys[1].RefTable = PackageLoadMetricsTable - TestResultBesTestActionOutputTable.ForeignKeys[0].RefTable = TestResultBeSsTable - TestResultBesTestActionOutputTable.ForeignKeys[1].RefTable = TestFilesTable - TimingBreakdownChildTable.ForeignKeys[0].RefTable = TimingBreakdownsTable - TimingBreakdownChildTable.ForeignKeys[1].RefTable = TimingChildsTable + TestSummariesTable.ForeignKeys[0].RefTable = TestCollectionsTable + TimingBreakdownsTable.ForeignKeys[0].RefTable = ExectionInfosTable + TimingChildsTable.ForeignKeys[0].RefTable = TimingBreakdownsTable + TimingMetricsTable.ForeignKeys[0].RefTable = MetricsTable } diff --git a/ent/gen/ent/missdetail.go b/ent/gen/ent/missdetail.go index a58bfc9..5746645 100644 --- a/ent/gen/ent/missdetail.go +++ b/ent/gen/ent/missdetail.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/actioncachestatistics" "github.com/buildbarn/bb-portal/ent/gen/ent/missdetail" ) @@ -22,28 +23,29 @@ type MissDetail struct { Count int32 `json:"count,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the MissDetailQuery when eager-loading is set. - Edges MissDetailEdges `json:"edges"` - selectValues sql.SelectValues + Edges MissDetailEdges `json:"edges"` + action_cache_statistics_miss_details *int + selectValues sql.SelectValues } // MissDetailEdges holds the relations/edges for other nodes in the graph. type MissDetailEdges struct { // ActionCacheStatistics holds the value of the action_cache_statistics edge. - ActionCacheStatistics []*ActionCacheStatistics `json:"action_cache_statistics,omitempty"` + ActionCacheStatistics *ActionCacheStatistics `json:"action_cache_statistics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedActionCacheStatistics map[string][]*ActionCacheStatistics } // ActionCacheStatisticsOrErr returns the ActionCacheStatistics value or an error if the edge -// was not loaded in eager-loading. -func (e MissDetailEdges) ActionCacheStatisticsOrErr() ([]*ActionCacheStatistics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e MissDetailEdges) ActionCacheStatisticsOrErr() (*ActionCacheStatistics, error) { + if e.ActionCacheStatistics != nil { return e.ActionCacheStatistics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: actioncachestatistics.Label} } return nil, &NotLoadedError{edge: "action_cache_statistics"} } @@ -57,6 +59,8 @@ func (*MissDetail) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case missdetail.FieldReason: values[i] = new(sql.NullString) + case missdetail.ForeignKeys[0]: // action_cache_statistics_miss_details + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -90,6 +94,13 @@ func (md *MissDetail) assignValues(columns []string, values []any) error { } else if value.Valid { md.Count = int32(value.Int64) } + case missdetail.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field action_cache_statistics_miss_details", value) + } else if value.Valid { + md.action_cache_statistics_miss_details = new(int) + *md.action_cache_statistics_miss_details = int(value.Int64) + } default: md.selectValues.Set(columns[i], values[i]) } @@ -140,29 +151,5 @@ func (md *MissDetail) String() string { return builder.String() } -// NamedActionCacheStatistics returns the ActionCacheStatistics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (md *MissDetail) NamedActionCacheStatistics(name string) ([]*ActionCacheStatistics, error) { - if md.Edges.namedActionCacheStatistics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := md.Edges.namedActionCacheStatistics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (md *MissDetail) appendNamedActionCacheStatistics(name string, edges ...*ActionCacheStatistics) { - if md.Edges.namedActionCacheStatistics == nil { - md.Edges.namedActionCacheStatistics = make(map[string][]*ActionCacheStatistics) - } - if len(edges) == 0 { - md.Edges.namedActionCacheStatistics[name] = []*ActionCacheStatistics{} - } else { - md.Edges.namedActionCacheStatistics[name] = append(md.Edges.namedActionCacheStatistics[name], edges...) - } -} - // MissDetails is a parsable slice of MissDetail. type MissDetails []*MissDetail diff --git a/ent/gen/ent/missdetail/missdetail.go b/ent/gen/ent/missdetail/missdetail.go index 236b9fa..e634331 100644 --- a/ent/gen/ent/missdetail/missdetail.go +++ b/ent/gen/ent/missdetail/missdetail.go @@ -24,11 +24,13 @@ const ( EdgeActionCacheStatistics = "action_cache_statistics" // Table holds the table name of the missdetail in the database. Table = "miss_details" - // ActionCacheStatisticsTable is the table that holds the action_cache_statistics relation/edge. The primary key declared below. - ActionCacheStatisticsTable = "action_cache_statistics_miss_details" + // ActionCacheStatisticsTable is the table that holds the action_cache_statistics relation/edge. + ActionCacheStatisticsTable = "miss_details" // ActionCacheStatisticsInverseTable is the table name for the ActionCacheStatistics entity. // It exists in this package in order to avoid circular dependency with the "actioncachestatistics" package. ActionCacheStatisticsInverseTable = "action_cache_statistics" + // ActionCacheStatisticsColumn is the table column denoting the action_cache_statistics relation/edge. + ActionCacheStatisticsColumn = "action_cache_statistics_miss_details" ) // Columns holds all SQL columns for missdetail fields. @@ -38,11 +40,11 @@ var Columns = []string{ FieldCount, } -var ( - // ActionCacheStatisticsPrimaryKey and ActionCacheStatisticsColumn2 are the table columns denoting the - // primary key for the action_cache_statistics relation (M2M). - ActionCacheStatisticsPrimaryKey = []string{"action_cache_statistics_id", "miss_detail_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "miss_details" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "action_cache_statistics_miss_details", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -51,6 +53,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -104,24 +111,17 @@ func ByCount(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldCount, opts...).ToFunc() } -// ByActionCacheStatisticsCount orders the results by action_cache_statistics count. -func ByActionCacheStatisticsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionCacheStatisticsStep(), opts...) - } -} - -// ByActionCacheStatistics orders the results by action_cache_statistics terms. -func ByActionCacheStatistics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByActionCacheStatisticsField orders the results by action_cache_statistics field. +func ByActionCacheStatisticsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionCacheStatisticsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newActionCacheStatisticsStep(), sql.OrderByField(field, opts...)) } } func newActionCacheStatisticsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionCacheStatisticsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionCacheStatisticsTable, ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionCacheStatisticsTable, ActionCacheStatisticsColumn), ) } diff --git a/ent/gen/ent/missdetail/where.go b/ent/gen/ent/missdetail/where.go index ddbfcd9..087c661 100644 --- a/ent/gen/ent/missdetail/where.go +++ b/ent/gen/ent/missdetail/where.go @@ -143,7 +143,7 @@ func HasActionCacheStatistics() predicate.MissDetail { return predicate.MissDetail(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionCacheStatisticsTable, ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionCacheStatisticsTable, ActionCacheStatisticsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/missdetail_create.go b/ent/gen/ent/missdetail_create.go index 3343cb5..028f0fc 100644 --- a/ent/gen/ent/missdetail_create.go +++ b/ent/gen/ent/missdetail_create.go @@ -47,19 +47,23 @@ func (mdc *MissDetailCreate) SetNillableCount(i *int32) *MissDetailCreate { return mdc } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (mdc *MissDetailCreate) AddActionCacheStatisticIDs(ids ...int) *MissDetailCreate { - mdc.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (mdc *MissDetailCreate) SetActionCacheStatisticsID(id int) *MissDetailCreate { + mdc.mutation.SetActionCacheStatisticsID(id) return mdc } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (mdc *MissDetailCreate) AddActionCacheStatistics(a ...*ActionCacheStatistics) *MissDetailCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (mdc *MissDetailCreate) SetNillableActionCacheStatisticsID(id *int) *MissDetailCreate { + if id != nil { + mdc = mdc.SetActionCacheStatisticsID(*id) } - return mdc.AddActionCacheStatisticIDs(ids...) + return mdc +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (mdc *MissDetailCreate) SetActionCacheStatistics(a *ActionCacheStatistics) *MissDetailCreate { + return mdc.SetActionCacheStatisticsID(a.ID) } // Mutation returns the MissDetailMutation object of the builder. @@ -146,10 +150,10 @@ func (mdc *MissDetailCreate) createSpec() (*MissDetail, *sqlgraph.CreateSpec) { } if nodes := mdc.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, + Columns: []string{missdetail.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), @@ -158,6 +162,7 @@ func (mdc *MissDetailCreate) createSpec() (*MissDetail, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.action_cache_statistics_miss_details = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/missdetail_query.go b/ent/gen/ent/missdetail_query.go index 72c0aa3..fca37cf 100644 --- a/ent/gen/ent/missdetail_query.go +++ b/ent/gen/ent/missdetail_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // MissDetailQuery is the builder for querying MissDetail entities. type MissDetailQuery struct { config - ctx *QueryContext - order []missdetail.OrderOption - inters []Interceptor - predicates []predicate.MissDetail - withActionCacheStatistics *ActionCacheStatisticsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*MissDetail) error - withNamedActionCacheStatistics map[string]*ActionCacheStatisticsQuery + ctx *QueryContext + order []missdetail.OrderOption + inters []Interceptor + predicates []predicate.MissDetail + withActionCacheStatistics *ActionCacheStatisticsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*MissDetail) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (mdq *MissDetailQuery) QueryActionCacheStatistics() *ActionCacheStatisticsQ step := sqlgraph.NewStep( sqlgraph.From(missdetail.Table, missdetail.FieldID, selector), sqlgraph.To(actioncachestatistics.Table, actioncachestatistics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, missdetail.ActionCacheStatisticsTable, missdetail.ActionCacheStatisticsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, missdetail.ActionCacheStatisticsTable, missdetail.ActionCacheStatisticsColumn), ) fromU = sqlgraph.SetNeighbors(mdq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (mdq *MissDetailQuery) prepareQuery(ctx context.Context) error { func (mdq *MissDetailQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*MissDetail, error) { var ( nodes = []*MissDetail{} + withFKs = mdq.withFKs _spec = mdq.querySpec() loadedTypes = [1]bool{ mdq.withActionCacheStatistics != nil, } ) + if mdq.withActionCacheStatistics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, missdetail.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*MissDetail).scanValues(nil, columns) } @@ -399,18 +405,8 @@ func (mdq *MissDetailQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* return nodes, nil } if query := mdq.withActionCacheStatistics; query != nil { - if err := mdq.loadActionCacheStatistics(ctx, query, nodes, - func(n *MissDetail) { n.Edges.ActionCacheStatistics = []*ActionCacheStatistics{} }, - func(n *MissDetail, e *ActionCacheStatistics) { - n.Edges.ActionCacheStatistics = append(n.Edges.ActionCacheStatistics, e) - }); err != nil { - return nil, err - } - } - for name, query := range mdq.withNamedActionCacheStatistics { - if err := mdq.loadActionCacheStatistics(ctx, query, nodes, - func(n *MissDetail) { n.appendNamedActionCacheStatistics(name) }, - func(n *MissDetail, e *ActionCacheStatistics) { n.appendNamedActionCacheStatistics(name, e) }); err != nil { + if err := mdq.loadActionCacheStatistics(ctx, query, nodes, nil, + func(n *MissDetail, e *ActionCacheStatistics) { n.Edges.ActionCacheStatistics = e }); err != nil { return nil, err } } @@ -423,62 +419,33 @@ func (mdq *MissDetailQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* } func (mdq *MissDetailQuery) loadActionCacheStatistics(ctx context.Context, query *ActionCacheStatisticsQuery, nodes []*MissDetail, init func(*MissDetail), assign func(*MissDetail, *ActionCacheStatistics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*MissDetail) - nids := make(map[int]map[*MissDetail]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*MissDetail) + for i := range nodes { + if nodes[i].action_cache_statistics_miss_details == nil { + continue } + fk := *nodes[i].action_cache_statistics_miss_details + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(missdetail.ActionCacheStatisticsTable) - s.Join(joinT).On(s.C(actioncachestatistics.FieldID), joinT.C(missdetail.ActionCacheStatisticsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(missdetail.ActionCacheStatisticsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(missdetail.ActionCacheStatisticsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*MissDetail]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionCacheStatistics](ctx, query, qr, query.inters) + query.Where(actioncachestatistics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "action_cache_statistics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "action_cache_statistics_miss_details" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -568,20 +535,6 @@ func (mdq *MissDetailQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedActionCacheStatistics tells the query-builder to eager-load the nodes that are connected to the "action_cache_statistics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (mdq *MissDetailQuery) WithNamedActionCacheStatistics(name string, opts ...func(*ActionCacheStatisticsQuery)) *MissDetailQuery { - query := (&ActionCacheStatisticsClient{config: mdq.config}).Query() - for _, opt := range opts { - opt(query) - } - if mdq.withNamedActionCacheStatistics == nil { - mdq.withNamedActionCacheStatistics = make(map[string]*ActionCacheStatisticsQuery) - } - mdq.withNamedActionCacheStatistics[name] = query - return mdq -} - // MissDetailGroupBy is the group-by builder for MissDetail entities. type MissDetailGroupBy struct { selector diff --git a/ent/gen/ent/missdetail_update.go b/ent/gen/ent/missdetail_update.go index 4412afe..8c5dc1d 100644 --- a/ent/gen/ent/missdetail_update.go +++ b/ent/gen/ent/missdetail_update.go @@ -75,19 +75,23 @@ func (mdu *MissDetailUpdate) ClearCount() *MissDetailUpdate { return mdu } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (mdu *MissDetailUpdate) AddActionCacheStatisticIDs(ids ...int) *MissDetailUpdate { - mdu.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (mdu *MissDetailUpdate) SetActionCacheStatisticsID(id int) *MissDetailUpdate { + mdu.mutation.SetActionCacheStatisticsID(id) return mdu } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (mdu *MissDetailUpdate) AddActionCacheStatistics(a ...*ActionCacheStatistics) *MissDetailUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (mdu *MissDetailUpdate) SetNillableActionCacheStatisticsID(id *int) *MissDetailUpdate { + if id != nil { + mdu = mdu.SetActionCacheStatisticsID(*id) } - return mdu.AddActionCacheStatisticIDs(ids...) + return mdu +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (mdu *MissDetailUpdate) SetActionCacheStatistics(a *ActionCacheStatistics) *MissDetailUpdate { + return mdu.SetActionCacheStatisticsID(a.ID) } // Mutation returns the MissDetailMutation object of the builder. @@ -95,27 +99,12 @@ func (mdu *MissDetailUpdate) Mutation() *MissDetailMutation { return mdu.mutation } -// ClearActionCacheStatistics clears all "action_cache_statistics" edges to the ActionCacheStatistics entity. +// ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. func (mdu *MissDetailUpdate) ClearActionCacheStatistics() *MissDetailUpdate { mdu.mutation.ClearActionCacheStatistics() return mdu } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to ActionCacheStatistics entities by IDs. -func (mdu *MissDetailUpdate) RemoveActionCacheStatisticIDs(ids ...int) *MissDetailUpdate { - mdu.mutation.RemoveActionCacheStatisticIDs(ids...) - return mdu -} - -// RemoveActionCacheStatistics removes "action_cache_statistics" edges to ActionCacheStatistics entities. -func (mdu *MissDetailUpdate) RemoveActionCacheStatistics(a ...*ActionCacheStatistics) *MissDetailUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return mdu.RemoveActionCacheStatisticIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (mdu *MissDetailUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, mdu.sqlSave, mdu.mutation, mdu.hooks) @@ -182,39 +171,23 @@ func (mdu *MissDetailUpdate) sqlSave(ctx context.Context) (n int, err error) { } if mdu.mutation.ActionCacheStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mdu.mutation.RemovedActionCacheStatisticsIDs(); len(nodes) > 0 && !mdu.mutation.ActionCacheStatisticsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, + Columns: []string{missdetail.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mdu.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, + Columns: []string{missdetail.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), @@ -292,19 +265,23 @@ func (mduo *MissDetailUpdateOne) ClearCount() *MissDetailUpdateOne { return mduo } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (mduo *MissDetailUpdateOne) AddActionCacheStatisticIDs(ids ...int) *MissDetailUpdateOne { - mduo.mutation.AddActionCacheStatisticIDs(ids...) +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID. +func (mduo *MissDetailUpdateOne) SetActionCacheStatisticsID(id int) *MissDetailUpdateOne { + mduo.mutation.SetActionCacheStatisticsID(id) return mduo } -// AddActionCacheStatistics adds the "action_cache_statistics" edges to the ActionCacheStatistics entity. -func (mduo *MissDetailUpdateOne) AddActionCacheStatistics(a ...*ActionCacheStatistics) *MissDetailUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by ID if the given value is not nil. +func (mduo *MissDetailUpdateOne) SetNillableActionCacheStatisticsID(id *int) *MissDetailUpdateOne { + if id != nil { + mduo = mduo.SetActionCacheStatisticsID(*id) } - return mduo.AddActionCacheStatisticIDs(ids...) + return mduo +} + +// SetActionCacheStatistics sets the "action_cache_statistics" edge to the ActionCacheStatistics entity. +func (mduo *MissDetailUpdateOne) SetActionCacheStatistics(a *ActionCacheStatistics) *MissDetailUpdateOne { + return mduo.SetActionCacheStatisticsID(a.ID) } // Mutation returns the MissDetailMutation object of the builder. @@ -312,27 +289,12 @@ func (mduo *MissDetailUpdateOne) Mutation() *MissDetailMutation { return mduo.mutation } -// ClearActionCacheStatistics clears all "action_cache_statistics" edges to the ActionCacheStatistics entity. +// ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. func (mduo *MissDetailUpdateOne) ClearActionCacheStatistics() *MissDetailUpdateOne { mduo.mutation.ClearActionCacheStatistics() return mduo } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to ActionCacheStatistics entities by IDs. -func (mduo *MissDetailUpdateOne) RemoveActionCacheStatisticIDs(ids ...int) *MissDetailUpdateOne { - mduo.mutation.RemoveActionCacheStatisticIDs(ids...) - return mduo -} - -// RemoveActionCacheStatistics removes "action_cache_statistics" edges to ActionCacheStatistics entities. -func (mduo *MissDetailUpdateOne) RemoveActionCacheStatistics(a ...*ActionCacheStatistics) *MissDetailUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return mduo.RemoveActionCacheStatisticIDs(ids...) -} - // Where appends a list predicates to the MissDetailUpdate builder. func (mduo *MissDetailUpdateOne) Where(ps ...predicate.MissDetail) *MissDetailUpdateOne { mduo.mutation.Where(ps...) @@ -429,39 +391,23 @@ func (mduo *MissDetailUpdateOne) sqlSave(ctx context.Context) (_node *MissDetail } if mduo.mutation.ActionCacheStatisticsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := mduo.mutation.RemovedActionCacheStatisticsIDs(); len(nodes) > 0 && !mduo.mutation.ActionCacheStatisticsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, + Columns: []string{missdetail.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := mduo.mutation.ActionCacheStatisticsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: missdetail.ActionCacheStatisticsTable, - Columns: missdetail.ActionCacheStatisticsPrimaryKey, + Columns: []string{missdetail.ActionCacheStatisticsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actioncachestatistics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/mutation.go b/ent/gen/ent/mutation.go index 08da3e5..07f48b1 100644 --- a/ent/gen/ent/mutation.go +++ b/ent/gen/ent/mutation.go @@ -123,8 +123,7 @@ type ActionCacheStatisticsMutation struct { misses *int32 addmisses *int32 clearedFields map[string]struct{} - action_summary map[int]struct{} - removedaction_summary map[int]struct{} + action_summary *int clearedaction_summary bool miss_details map[int]struct{} removedmiss_details map[int]struct{} @@ -582,14 +581,9 @@ func (m *ActionCacheStatisticsMutation) ResetMisses() { delete(m.clearedFields, actioncachestatistics.FieldMisses) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by ids. -func (m *ActionCacheStatisticsMutation) AddActionSummaryIDs(ids ...int) { - if m.action_summary == nil { - m.action_summary = make(map[int]struct{}) - } - for i := range ids { - m.action_summary[ids[i]] = struct{}{} - } +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by id. +func (m *ActionCacheStatisticsMutation) SetActionSummaryID(id int) { + m.action_summary = &id } // ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. @@ -602,29 +596,20 @@ func (m *ActionCacheStatisticsMutation) ActionSummaryCleared() bool { return m.clearedaction_summary } -// RemoveActionSummaryIDs removes the "action_summary" edge to the ActionSummary entity by IDs. -func (m *ActionCacheStatisticsMutation) RemoveActionSummaryIDs(ids ...int) { - if m.removedaction_summary == nil { - m.removedaction_summary = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_summary, ids[i]) - m.removedaction_summary[ids[i]] = struct{}{} - } -} - -// RemovedActionSummary returns the removed IDs of the "action_summary" edge to the ActionSummary entity. -func (m *ActionCacheStatisticsMutation) RemovedActionSummaryIDs() (ids []int) { - for id := range m.removedaction_summary { - ids = append(ids, id) +// ActionSummaryID returns the "action_summary" edge ID in the mutation. +func (m *ActionCacheStatisticsMutation) ActionSummaryID() (id int, exists bool) { + if m.action_summary != nil { + return *m.action_summary, true } return } // ActionSummaryIDs returns the "action_summary" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionSummaryID instead. It exists only for internal usage by the builders. func (m *ActionCacheStatisticsMutation) ActionSummaryIDs() (ids []int) { - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + ids = append(ids, *id) } return } @@ -633,7 +618,6 @@ func (m *ActionCacheStatisticsMutation) ActionSummaryIDs() (ids []int) { func (m *ActionCacheStatisticsMutation) ResetActionSummary() { m.action_summary = nil m.clearedaction_summary = false - m.removedaction_summary = nil } // AddMissDetailIDs adds the "miss_details" edge to the MissDetail entity by ids. @@ -1002,11 +986,9 @@ func (m *ActionCacheStatisticsMutation) AddedEdges() []string { func (m *ActionCacheStatisticsMutation) AddedIDs(name string) []ent.Value { switch name { case actioncachestatistics.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.action_summary)) - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + return []ent.Value{*id} } - return ids case actioncachestatistics.EdgeMissDetails: ids := make([]ent.Value, 0, len(m.miss_details)) for id := range m.miss_details { @@ -1020,9 +1002,6 @@ func (m *ActionCacheStatisticsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *ActionCacheStatisticsMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedaction_summary != nil { - edges = append(edges, actioncachestatistics.EdgeActionSummary) - } if m.removedmiss_details != nil { edges = append(edges, actioncachestatistics.EdgeMissDetails) } @@ -1033,12 +1012,6 @@ func (m *ActionCacheStatisticsMutation) RemovedEdges() []string { // the given name in this mutation. func (m *ActionCacheStatisticsMutation) RemovedIDs(name string) []ent.Value { switch name { - case actioncachestatistics.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.removedaction_summary)) - for id := range m.removedaction_summary { - ids = append(ids, id) - } - return ids case actioncachestatistics.EdgeMissDetails: ids := make([]ent.Value, 0, len(m.removedmiss_details)) for id := range m.removedmiss_details { @@ -1077,6 +1050,9 @@ func (m *ActionCacheStatisticsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *ActionCacheStatisticsMutation) ClearEdge(name string) error { switch name { + case actioncachestatistics.EdgeActionSummary: + m.ClearActionSummary() + return nil } return fmt.Errorf("unknown ActionCacheStatistics unique edge %s", name) } @@ -1115,8 +1091,7 @@ type ActionDataMutation struct { user_time *int64 adduser_time *int64 clearedFields map[string]struct{} - action_summary map[int]struct{} - removedaction_summary map[int]struct{} + action_summary *int clearedaction_summary bool done bool oldValue func(context.Context) (*ActionData, error) @@ -1690,14 +1665,9 @@ func (m *ActionDataMutation) ResetUserTime() { delete(m.clearedFields, actiondata.FieldUserTime) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by ids. -func (m *ActionDataMutation) AddActionSummaryIDs(ids ...int) { - if m.action_summary == nil { - m.action_summary = make(map[int]struct{}) - } - for i := range ids { - m.action_summary[ids[i]] = struct{}{} - } +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by id. +func (m *ActionDataMutation) SetActionSummaryID(id int) { + m.action_summary = &id } // ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. @@ -1710,29 +1680,20 @@ func (m *ActionDataMutation) ActionSummaryCleared() bool { return m.clearedaction_summary } -// RemoveActionSummaryIDs removes the "action_summary" edge to the ActionSummary entity by IDs. -func (m *ActionDataMutation) RemoveActionSummaryIDs(ids ...int) { - if m.removedaction_summary == nil { - m.removedaction_summary = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_summary, ids[i]) - m.removedaction_summary[ids[i]] = struct{}{} - } -} - -// RemovedActionSummary returns the removed IDs of the "action_summary" edge to the ActionSummary entity. -func (m *ActionDataMutation) RemovedActionSummaryIDs() (ids []int) { - for id := range m.removedaction_summary { - ids = append(ids, id) +// ActionSummaryID returns the "action_summary" edge ID in the mutation. +func (m *ActionDataMutation) ActionSummaryID() (id int, exists bool) { + if m.action_summary != nil { + return *m.action_summary, true } return } // ActionSummaryIDs returns the "action_summary" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionSummaryID instead. It exists only for internal usage by the builders. func (m *ActionDataMutation) ActionSummaryIDs() (ids []int) { - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + ids = append(ids, *id) } return } @@ -1741,7 +1702,6 @@ func (m *ActionDataMutation) ActionSummaryIDs() (ids []int) { func (m *ActionDataMutation) ResetActionSummary() { m.action_summary = nil m.clearedaction_summary = false - m.removedaction_summary = nil } // Where appends a list predicates to the ActionDataMutation builder. @@ -2111,11 +2071,9 @@ func (m *ActionDataMutation) AddedEdges() []string { func (m *ActionDataMutation) AddedIDs(name string) []ent.Value { switch name { case actiondata.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.action_summary)) - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -2123,23 +2081,12 @@ func (m *ActionDataMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *ActionDataMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedaction_summary != nil { - edges = append(edges, actiondata.EdgeActionSummary) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *ActionDataMutation) RemovedIDs(name string) []ent.Value { - switch name { - case actiondata.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.removedaction_summary)) - for id := range m.removedaction_summary { - ids = append(ids, id) - } - return ids - } return nil } @@ -2166,6 +2113,9 @@ func (m *ActionDataMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *ActionDataMutation) ClearEdge(name string) error { switch name { + case actiondata.EdgeActionSummary: + m.ClearActionSummary() + return nil } return fmt.Errorf("unknown ActionData unique edge %s", name) } @@ -2204,8 +2154,7 @@ type ActionSummaryMutation struct { runner_count map[int]struct{} removedrunner_count map[int]struct{} clearedrunner_count bool - action_cache_statistics map[int]struct{} - removedaction_cache_statistics map[int]struct{} + action_cache_statistics *int clearedaction_cache_statistics bool done bool oldValue func(context.Context) (*ActionSummary, error) @@ -2737,14 +2686,9 @@ func (m *ActionSummaryMutation) ResetRunnerCount() { m.removedrunner_count = nil } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by ids. -func (m *ActionSummaryMutation) AddActionCacheStatisticIDs(ids ...int) { - if m.action_cache_statistics == nil { - m.action_cache_statistics = make(map[int]struct{}) - } - for i := range ids { - m.action_cache_statistics[ids[i]] = struct{}{} - } +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by id. +func (m *ActionSummaryMutation) SetActionCacheStatisticsID(id int) { + m.action_cache_statistics = &id } // ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. @@ -2757,29 +2701,20 @@ func (m *ActionSummaryMutation) ActionCacheStatisticsCleared() bool { return m.clearedaction_cache_statistics } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (m *ActionSummaryMutation) RemoveActionCacheStatisticIDs(ids ...int) { - if m.removedaction_cache_statistics == nil { - m.removedaction_cache_statistics = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_cache_statistics, ids[i]) - m.removedaction_cache_statistics[ids[i]] = struct{}{} - } -} - -// RemovedActionCacheStatistics returns the removed IDs of the "action_cache_statistics" edge to the ActionCacheStatistics entity. -func (m *ActionSummaryMutation) RemovedActionCacheStatisticsIDs() (ids []int) { - for id := range m.removedaction_cache_statistics { - ids = append(ids, id) +// ActionCacheStatisticsID returns the "action_cache_statistics" edge ID in the mutation. +func (m *ActionSummaryMutation) ActionCacheStatisticsID() (id int, exists bool) { + if m.action_cache_statistics != nil { + return *m.action_cache_statistics, true } return } // ActionCacheStatisticsIDs returns the "action_cache_statistics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionCacheStatisticsID instead. It exists only for internal usage by the builders. func (m *ActionSummaryMutation) ActionCacheStatisticsIDs() (ids []int) { - for id := range m.action_cache_statistics { - ids = append(ids, id) + if id := m.action_cache_statistics; id != nil { + ids = append(ids, *id) } return } @@ -2788,7 +2723,6 @@ func (m *ActionSummaryMutation) ActionCacheStatisticsIDs() (ids []int) { func (m *ActionSummaryMutation) ResetActionCacheStatistics() { m.action_cache_statistics = nil m.clearedaction_cache_statistics = false - m.removedaction_cache_statistics = nil } // Where appends a list predicates to the ActionSummaryMutation builder. @@ -3090,11 +3024,9 @@ func (m *ActionSummaryMutation) AddedIDs(name string) []ent.Value { } return ids case actionsummary.EdgeActionCacheStatistics: - ids := make([]ent.Value, 0, len(m.action_cache_statistics)) - for id := range m.action_cache_statistics { - ids = append(ids, id) + if id := m.action_cache_statistics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -3108,9 +3040,6 @@ func (m *ActionSummaryMutation) RemovedEdges() []string { if m.removedrunner_count != nil { edges = append(edges, actionsummary.EdgeRunnerCount) } - if m.removedaction_cache_statistics != nil { - edges = append(edges, actionsummary.EdgeActionCacheStatistics) - } return edges } @@ -3130,12 +3059,6 @@ func (m *ActionSummaryMutation) RemovedIDs(name string) []ent.Value { ids = append(ids, id) } return ids - case actionsummary.EdgeActionCacheStatistics: - ids := make([]ent.Value, 0, len(m.removedaction_cache_statistics)) - for id := range m.removedaction_cache_statistics { - ids = append(ids, id) - } - return ids } return nil } @@ -3181,6 +3104,9 @@ func (m *ActionSummaryMutation) ClearEdge(name string) error { case actionsummary.EdgeMetrics: m.ClearMetrics() return nil + case actionsummary.EdgeActionCacheStatistics: + m.ClearActionCacheStatistics() + return nil } return fmt.Errorf("unknown ActionSummary unique edge %s", name) } @@ -3212,20 +3138,15 @@ type ArtifactMetricsMutation struct { typ string id *int clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool - source_artifacts_read map[int]struct{} - removedsource_artifacts_read map[int]struct{} + source_artifacts_read *int clearedsource_artifacts_read bool - output_artifacts_seen map[int]struct{} - removedoutput_artifacts_seen map[int]struct{} + output_artifacts_seen *int clearedoutput_artifacts_seen bool - output_artifacts_from_action_cache map[int]struct{} - removedoutput_artifacts_from_action_cache map[int]struct{} + output_artifacts_from_action_cache *int clearedoutput_artifacts_from_action_cache bool - top_level_artifacts map[int]struct{} - removedtop_level_artifacts map[int]struct{} + top_level_artifacts *int clearedtop_level_artifacts bool done bool oldValue func(context.Context) (*ArtifactMetrics, error) @@ -3330,14 +3251,9 @@ func (m *ArtifactMetricsMutation) IDs(ctx context.Context) ([]int, error) { } } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *ArtifactMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *ArtifactMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -3350,29 +3266,20 @@ func (m *ArtifactMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *ArtifactMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *ArtifactMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *ArtifactMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *ArtifactMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -3381,17 +3288,11 @@ func (m *ArtifactMetricsMutation) MetricsIDs() (ids []int) { func (m *ArtifactMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } -// AddSourceArtifactsReadIDs adds the "source_artifacts_read" edge to the FilesMetric entity by ids. -func (m *ArtifactMetricsMutation) AddSourceArtifactsReadIDs(ids ...int) { - if m.source_artifacts_read == nil { - m.source_artifacts_read = make(map[int]struct{}) - } - for i := range ids { - m.source_artifacts_read[ids[i]] = struct{}{} - } +// SetSourceArtifactsReadID sets the "source_artifacts_read" edge to the FilesMetric entity by id. +func (m *ArtifactMetricsMutation) SetSourceArtifactsReadID(id int) { + m.source_artifacts_read = &id } // ClearSourceArtifactsRead clears the "source_artifacts_read" edge to the FilesMetric entity. @@ -3404,29 +3305,20 @@ func (m *ArtifactMetricsMutation) SourceArtifactsReadCleared() bool { return m.clearedsource_artifacts_read } -// RemoveSourceArtifactsReadIDs removes the "source_artifacts_read" edge to the FilesMetric entity by IDs. -func (m *ArtifactMetricsMutation) RemoveSourceArtifactsReadIDs(ids ...int) { - if m.removedsource_artifacts_read == nil { - m.removedsource_artifacts_read = make(map[int]struct{}) - } - for i := range ids { - delete(m.source_artifacts_read, ids[i]) - m.removedsource_artifacts_read[ids[i]] = struct{}{} - } -} - -// RemovedSourceArtifactsRead returns the removed IDs of the "source_artifacts_read" edge to the FilesMetric entity. -func (m *ArtifactMetricsMutation) RemovedSourceArtifactsReadIDs() (ids []int) { - for id := range m.removedsource_artifacts_read { - ids = append(ids, id) +// SourceArtifactsReadID returns the "source_artifacts_read" edge ID in the mutation. +func (m *ArtifactMetricsMutation) SourceArtifactsReadID() (id int, exists bool) { + if m.source_artifacts_read != nil { + return *m.source_artifacts_read, true } return } // SourceArtifactsReadIDs returns the "source_artifacts_read" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// SourceArtifactsReadID instead. It exists only for internal usage by the builders. func (m *ArtifactMetricsMutation) SourceArtifactsReadIDs() (ids []int) { - for id := range m.source_artifacts_read { - ids = append(ids, id) + if id := m.source_artifacts_read; id != nil { + ids = append(ids, *id) } return } @@ -3435,17 +3327,11 @@ func (m *ArtifactMetricsMutation) SourceArtifactsReadIDs() (ids []int) { func (m *ArtifactMetricsMutation) ResetSourceArtifactsRead() { m.source_artifacts_read = nil m.clearedsource_artifacts_read = false - m.removedsource_artifacts_read = nil } -// AddOutputArtifactsSeenIDs adds the "output_artifacts_seen" edge to the FilesMetric entity by ids. -func (m *ArtifactMetricsMutation) AddOutputArtifactsSeenIDs(ids ...int) { - if m.output_artifacts_seen == nil { - m.output_artifacts_seen = make(map[int]struct{}) - } - for i := range ids { - m.output_artifacts_seen[ids[i]] = struct{}{} - } +// SetOutputArtifactsSeenID sets the "output_artifacts_seen" edge to the FilesMetric entity by id. +func (m *ArtifactMetricsMutation) SetOutputArtifactsSeenID(id int) { + m.output_artifacts_seen = &id } // ClearOutputArtifactsSeen clears the "output_artifacts_seen" edge to the FilesMetric entity. @@ -3458,29 +3344,20 @@ func (m *ArtifactMetricsMutation) OutputArtifactsSeenCleared() bool { return m.clearedoutput_artifacts_seen } -// RemoveOutputArtifactsSeenIDs removes the "output_artifacts_seen" edge to the FilesMetric entity by IDs. -func (m *ArtifactMetricsMutation) RemoveOutputArtifactsSeenIDs(ids ...int) { - if m.removedoutput_artifacts_seen == nil { - m.removedoutput_artifacts_seen = make(map[int]struct{}) - } - for i := range ids { - delete(m.output_artifacts_seen, ids[i]) - m.removedoutput_artifacts_seen[ids[i]] = struct{}{} - } -} - -// RemovedOutputArtifactsSeen returns the removed IDs of the "output_artifacts_seen" edge to the FilesMetric entity. -func (m *ArtifactMetricsMutation) RemovedOutputArtifactsSeenIDs() (ids []int) { - for id := range m.removedoutput_artifacts_seen { - ids = append(ids, id) +// OutputArtifactsSeenID returns the "output_artifacts_seen" edge ID in the mutation. +func (m *ArtifactMetricsMutation) OutputArtifactsSeenID() (id int, exists bool) { + if m.output_artifacts_seen != nil { + return *m.output_artifacts_seen, true } return } // OutputArtifactsSeenIDs returns the "output_artifacts_seen" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// OutputArtifactsSeenID instead. It exists only for internal usage by the builders. func (m *ArtifactMetricsMutation) OutputArtifactsSeenIDs() (ids []int) { - for id := range m.output_artifacts_seen { - ids = append(ids, id) + if id := m.output_artifacts_seen; id != nil { + ids = append(ids, *id) } return } @@ -3489,17 +3366,11 @@ func (m *ArtifactMetricsMutation) OutputArtifactsSeenIDs() (ids []int) { func (m *ArtifactMetricsMutation) ResetOutputArtifactsSeen() { m.output_artifacts_seen = nil m.clearedoutput_artifacts_seen = false - m.removedoutput_artifacts_seen = nil } -// AddOutputArtifactsFromActionCacheIDs adds the "output_artifacts_from_action_cache" edge to the FilesMetric entity by ids. -func (m *ArtifactMetricsMutation) AddOutputArtifactsFromActionCacheIDs(ids ...int) { - if m.output_artifacts_from_action_cache == nil { - m.output_artifacts_from_action_cache = make(map[int]struct{}) - } - for i := range ids { - m.output_artifacts_from_action_cache[ids[i]] = struct{}{} - } +// SetOutputArtifactsFromActionCacheID sets the "output_artifacts_from_action_cache" edge to the FilesMetric entity by id. +func (m *ArtifactMetricsMutation) SetOutputArtifactsFromActionCacheID(id int) { + m.output_artifacts_from_action_cache = &id } // ClearOutputArtifactsFromActionCache clears the "output_artifacts_from_action_cache" edge to the FilesMetric entity. @@ -3512,29 +3383,20 @@ func (m *ArtifactMetricsMutation) OutputArtifactsFromActionCacheCleared() bool { return m.clearedoutput_artifacts_from_action_cache } -// RemoveOutputArtifactsFromActionCacheIDs removes the "output_artifacts_from_action_cache" edge to the FilesMetric entity by IDs. -func (m *ArtifactMetricsMutation) RemoveOutputArtifactsFromActionCacheIDs(ids ...int) { - if m.removedoutput_artifacts_from_action_cache == nil { - m.removedoutput_artifacts_from_action_cache = make(map[int]struct{}) - } - for i := range ids { - delete(m.output_artifacts_from_action_cache, ids[i]) - m.removedoutput_artifacts_from_action_cache[ids[i]] = struct{}{} - } -} - -// RemovedOutputArtifactsFromActionCache returns the removed IDs of the "output_artifacts_from_action_cache" edge to the FilesMetric entity. -func (m *ArtifactMetricsMutation) RemovedOutputArtifactsFromActionCacheIDs() (ids []int) { - for id := range m.removedoutput_artifacts_from_action_cache { - ids = append(ids, id) +// OutputArtifactsFromActionCacheID returns the "output_artifacts_from_action_cache" edge ID in the mutation. +func (m *ArtifactMetricsMutation) OutputArtifactsFromActionCacheID() (id int, exists bool) { + if m.output_artifacts_from_action_cache != nil { + return *m.output_artifacts_from_action_cache, true } return } // OutputArtifactsFromActionCacheIDs returns the "output_artifacts_from_action_cache" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// OutputArtifactsFromActionCacheID instead. It exists only for internal usage by the builders. func (m *ArtifactMetricsMutation) OutputArtifactsFromActionCacheIDs() (ids []int) { - for id := range m.output_artifacts_from_action_cache { - ids = append(ids, id) + if id := m.output_artifacts_from_action_cache; id != nil { + ids = append(ids, *id) } return } @@ -3543,17 +3405,11 @@ func (m *ArtifactMetricsMutation) OutputArtifactsFromActionCacheIDs() (ids []int func (m *ArtifactMetricsMutation) ResetOutputArtifactsFromActionCache() { m.output_artifacts_from_action_cache = nil m.clearedoutput_artifacts_from_action_cache = false - m.removedoutput_artifacts_from_action_cache = nil } -// AddTopLevelArtifactIDs adds the "top_level_artifacts" edge to the FilesMetric entity by ids. -func (m *ArtifactMetricsMutation) AddTopLevelArtifactIDs(ids ...int) { - if m.top_level_artifacts == nil { - m.top_level_artifacts = make(map[int]struct{}) - } - for i := range ids { - m.top_level_artifacts[ids[i]] = struct{}{} - } +// SetTopLevelArtifactsID sets the "top_level_artifacts" edge to the FilesMetric entity by id. +func (m *ArtifactMetricsMutation) SetTopLevelArtifactsID(id int) { + m.top_level_artifacts = &id } // ClearTopLevelArtifacts clears the "top_level_artifacts" edge to the FilesMetric entity. @@ -3566,29 +3422,20 @@ func (m *ArtifactMetricsMutation) TopLevelArtifactsCleared() bool { return m.clearedtop_level_artifacts } -// RemoveTopLevelArtifactIDs removes the "top_level_artifacts" edge to the FilesMetric entity by IDs. -func (m *ArtifactMetricsMutation) RemoveTopLevelArtifactIDs(ids ...int) { - if m.removedtop_level_artifacts == nil { - m.removedtop_level_artifacts = make(map[int]struct{}) - } - for i := range ids { - delete(m.top_level_artifacts, ids[i]) - m.removedtop_level_artifacts[ids[i]] = struct{}{} - } -} - -// RemovedTopLevelArtifacts returns the removed IDs of the "top_level_artifacts" edge to the FilesMetric entity. -func (m *ArtifactMetricsMutation) RemovedTopLevelArtifactsIDs() (ids []int) { - for id := range m.removedtop_level_artifacts { - ids = append(ids, id) +// TopLevelArtifactsID returns the "top_level_artifacts" edge ID in the mutation. +func (m *ArtifactMetricsMutation) TopLevelArtifactsID() (id int, exists bool) { + if m.top_level_artifacts != nil { + return *m.top_level_artifacts, true } return } // TopLevelArtifactsIDs returns the "top_level_artifacts" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TopLevelArtifactsID instead. It exists only for internal usage by the builders. func (m *ArtifactMetricsMutation) TopLevelArtifactsIDs() (ids []int) { - for id := range m.top_level_artifacts { - ids = append(ids, id) + if id := m.top_level_artifacts; id != nil { + ids = append(ids, *id) } return } @@ -3597,7 +3444,6 @@ func (m *ArtifactMetricsMutation) TopLevelArtifactsIDs() (ids []int) { func (m *ArtifactMetricsMutation) ResetTopLevelArtifacts() { m.top_level_artifacts = nil m.clearedtop_level_artifacts = false - m.removedtop_level_artifacts = nil } // Where appends a list predicates to the ArtifactMetricsMutation builder. @@ -3732,35 +3578,25 @@ func (m *ArtifactMetricsMutation) AddedEdges() []string { func (m *ArtifactMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case artifactmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case artifactmetrics.EdgeSourceArtifactsRead: - ids := make([]ent.Value, 0, len(m.source_artifacts_read)) - for id := range m.source_artifacts_read { - ids = append(ids, id) + if id := m.source_artifacts_read; id != nil { + return []ent.Value{*id} } - return ids case artifactmetrics.EdgeOutputArtifactsSeen: - ids := make([]ent.Value, 0, len(m.output_artifacts_seen)) - for id := range m.output_artifacts_seen { - ids = append(ids, id) + if id := m.output_artifacts_seen; id != nil { + return []ent.Value{*id} } - return ids case artifactmetrics.EdgeOutputArtifactsFromActionCache: - ids := make([]ent.Value, 0, len(m.output_artifacts_from_action_cache)) - for id := range m.output_artifacts_from_action_cache { - ids = append(ids, id) + if id := m.output_artifacts_from_action_cache; id != nil { + return []ent.Value{*id} } - return ids case artifactmetrics.EdgeTopLevelArtifacts: - ids := make([]ent.Value, 0, len(m.top_level_artifacts)) - for id := range m.top_level_artifacts { - ids = append(ids, id) + if id := m.top_level_artifacts; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -3768,59 +3604,12 @@ func (m *ArtifactMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *ArtifactMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 5) - if m.removedmetrics != nil { - edges = append(edges, artifactmetrics.EdgeMetrics) - } - if m.removedsource_artifacts_read != nil { - edges = append(edges, artifactmetrics.EdgeSourceArtifactsRead) - } - if m.removedoutput_artifacts_seen != nil { - edges = append(edges, artifactmetrics.EdgeOutputArtifactsSeen) - } - if m.removedoutput_artifacts_from_action_cache != nil { - edges = append(edges, artifactmetrics.EdgeOutputArtifactsFromActionCache) - } - if m.removedtop_level_artifacts != nil { - edges = append(edges, artifactmetrics.EdgeTopLevelArtifacts) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *ArtifactMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case artifactmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - case artifactmetrics.EdgeSourceArtifactsRead: - ids := make([]ent.Value, 0, len(m.removedsource_artifacts_read)) - for id := range m.removedsource_artifacts_read { - ids = append(ids, id) - } - return ids - case artifactmetrics.EdgeOutputArtifactsSeen: - ids := make([]ent.Value, 0, len(m.removedoutput_artifacts_seen)) - for id := range m.removedoutput_artifacts_seen { - ids = append(ids, id) - } - return ids - case artifactmetrics.EdgeOutputArtifactsFromActionCache: - ids := make([]ent.Value, 0, len(m.removedoutput_artifacts_from_action_cache)) - for id := range m.removedoutput_artifacts_from_action_cache { - ids = append(ids, id) - } - return ids - case artifactmetrics.EdgeTopLevelArtifacts: - ids := make([]ent.Value, 0, len(m.removedtop_level_artifacts)) - for id := range m.removedtop_level_artifacts { - ids = append(ids, id) - } - return ids - } return nil } @@ -3867,6 +3656,21 @@ func (m *ArtifactMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *ArtifactMetricsMutation) ClearEdge(name string) error { switch name { + case artifactmetrics.EdgeMetrics: + m.ClearMetrics() + return nil + case artifactmetrics.EdgeSourceArtifactsRead: + m.ClearSourceArtifactsRead() + return nil + case artifactmetrics.EdgeOutputArtifactsSeen: + m.ClearOutputArtifactsSeen() + return nil + case artifactmetrics.EdgeOutputArtifactsFromActionCache: + m.ClearOutputArtifactsFromActionCache() + return nil + case artifactmetrics.EdgeTopLevelArtifacts: + m.ClearTopLevelArtifacts() + return nil } return fmt.Errorf("unknown ArtifactMetrics unique edge %s", name) } @@ -7545,23 +7349,17 @@ type BuildGraphMetricsMutation struct { post_invocation_skyframe_node_count *int32 addpost_invocation_skyframe_node_count *int32 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool - dirtied_values map[int]struct{} - removeddirtied_values map[int]struct{} + dirtied_values *int cleareddirtied_values bool - changed_values map[int]struct{} - removedchanged_values map[int]struct{} + changed_values *int clearedchanged_values bool - built_values map[int]struct{} - removedbuilt_values map[int]struct{} + built_values *int clearedbuilt_values bool - cleaned_values map[int]struct{} - removedcleaned_values map[int]struct{} + cleaned_values *int clearedcleaned_values bool - evaluated_values map[int]struct{} - removedevaluated_values map[int]struct{} + evaluated_values *int clearedevaluated_values bool done bool oldValue func(context.Context) (*BuildGraphMetrics, error) @@ -8296,14 +8094,9 @@ func (m *BuildGraphMetricsMutation) ResetPostInvocationSkyframeNodeCount() { delete(m.clearedFields, buildgraphmetrics.FieldPostInvocationSkyframeNodeCount) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *BuildGraphMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *BuildGraphMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -8316,29 +8109,20 @@ func (m *BuildGraphMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *BuildGraphMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -8347,17 +8131,11 @@ func (m *BuildGraphMetricsMutation) MetricsIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } -// AddDirtiedValueIDs adds the "dirtied_values" edge to the EvaluationStat entity by ids. -func (m *BuildGraphMetricsMutation) AddDirtiedValueIDs(ids ...int) { - if m.dirtied_values == nil { - m.dirtied_values = make(map[int]struct{}) - } - for i := range ids { - m.dirtied_values[ids[i]] = struct{}{} - } +// SetDirtiedValuesID sets the "dirtied_values" edge to the EvaluationStat entity by id. +func (m *BuildGraphMetricsMutation) SetDirtiedValuesID(id int) { + m.dirtied_values = &id } // ClearDirtiedValues clears the "dirtied_values" edge to the EvaluationStat entity. @@ -8370,29 +8148,20 @@ func (m *BuildGraphMetricsMutation) DirtiedValuesCleared() bool { return m.cleareddirtied_values } -// RemoveDirtiedValueIDs removes the "dirtied_values" edge to the EvaluationStat entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveDirtiedValueIDs(ids ...int) { - if m.removeddirtied_values == nil { - m.removeddirtied_values = make(map[int]struct{}) - } - for i := range ids { - delete(m.dirtied_values, ids[i]) - m.removeddirtied_values[ids[i]] = struct{}{} - } -} - -// RemovedDirtiedValues returns the removed IDs of the "dirtied_values" edge to the EvaluationStat entity. -func (m *BuildGraphMetricsMutation) RemovedDirtiedValuesIDs() (ids []int) { - for id := range m.removeddirtied_values { - ids = append(ids, id) +// DirtiedValuesID returns the "dirtied_values" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) DirtiedValuesID() (id int, exists bool) { + if m.dirtied_values != nil { + return *m.dirtied_values, true } return } // DirtiedValuesIDs returns the "dirtied_values" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// DirtiedValuesID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) DirtiedValuesIDs() (ids []int) { - for id := range m.dirtied_values { - ids = append(ids, id) + if id := m.dirtied_values; id != nil { + ids = append(ids, *id) } return } @@ -8401,17 +8170,11 @@ func (m *BuildGraphMetricsMutation) DirtiedValuesIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetDirtiedValues() { m.dirtied_values = nil m.cleareddirtied_values = false - m.removeddirtied_values = nil } -// AddChangedValueIDs adds the "changed_values" edge to the EvaluationStat entity by ids. -func (m *BuildGraphMetricsMutation) AddChangedValueIDs(ids ...int) { - if m.changed_values == nil { - m.changed_values = make(map[int]struct{}) - } - for i := range ids { - m.changed_values[ids[i]] = struct{}{} - } +// SetChangedValuesID sets the "changed_values" edge to the EvaluationStat entity by id. +func (m *BuildGraphMetricsMutation) SetChangedValuesID(id int) { + m.changed_values = &id } // ClearChangedValues clears the "changed_values" edge to the EvaluationStat entity. @@ -8424,29 +8187,20 @@ func (m *BuildGraphMetricsMutation) ChangedValuesCleared() bool { return m.clearedchanged_values } -// RemoveChangedValueIDs removes the "changed_values" edge to the EvaluationStat entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveChangedValueIDs(ids ...int) { - if m.removedchanged_values == nil { - m.removedchanged_values = make(map[int]struct{}) - } - for i := range ids { - delete(m.changed_values, ids[i]) - m.removedchanged_values[ids[i]] = struct{}{} - } -} - -// RemovedChangedValues returns the removed IDs of the "changed_values" edge to the EvaluationStat entity. -func (m *BuildGraphMetricsMutation) RemovedChangedValuesIDs() (ids []int) { - for id := range m.removedchanged_values { - ids = append(ids, id) +// ChangedValuesID returns the "changed_values" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) ChangedValuesID() (id int, exists bool) { + if m.changed_values != nil { + return *m.changed_values, true } return } // ChangedValuesIDs returns the "changed_values" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ChangedValuesID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) ChangedValuesIDs() (ids []int) { - for id := range m.changed_values { - ids = append(ids, id) + if id := m.changed_values; id != nil { + ids = append(ids, *id) } return } @@ -8455,17 +8209,11 @@ func (m *BuildGraphMetricsMutation) ChangedValuesIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetChangedValues() { m.changed_values = nil m.clearedchanged_values = false - m.removedchanged_values = nil } -// AddBuiltValueIDs adds the "built_values" edge to the EvaluationStat entity by ids. -func (m *BuildGraphMetricsMutation) AddBuiltValueIDs(ids ...int) { - if m.built_values == nil { - m.built_values = make(map[int]struct{}) - } - for i := range ids { - m.built_values[ids[i]] = struct{}{} - } +// SetBuiltValuesID sets the "built_values" edge to the EvaluationStat entity by id. +func (m *BuildGraphMetricsMutation) SetBuiltValuesID(id int) { + m.built_values = &id } // ClearBuiltValues clears the "built_values" edge to the EvaluationStat entity. @@ -8478,29 +8226,20 @@ func (m *BuildGraphMetricsMutation) BuiltValuesCleared() bool { return m.clearedbuilt_values } -// RemoveBuiltValueIDs removes the "built_values" edge to the EvaluationStat entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveBuiltValueIDs(ids ...int) { - if m.removedbuilt_values == nil { - m.removedbuilt_values = make(map[int]struct{}) - } - for i := range ids { - delete(m.built_values, ids[i]) - m.removedbuilt_values[ids[i]] = struct{}{} +// BuiltValuesID returns the "built_values" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) BuiltValuesID() (id int, exists bool) { + if m.built_values != nil { + return *m.built_values, true } -} - -// RemovedBuiltValues returns the removed IDs of the "built_values" edge to the EvaluationStat entity. -func (m *BuildGraphMetricsMutation) RemovedBuiltValuesIDs() (ids []int) { - for id := range m.removedbuilt_values { - ids = append(ids, id) - } - return + return } // BuiltValuesIDs returns the "built_values" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// BuiltValuesID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) BuiltValuesIDs() (ids []int) { - for id := range m.built_values { - ids = append(ids, id) + if id := m.built_values; id != nil { + ids = append(ids, *id) } return } @@ -8509,17 +8248,11 @@ func (m *BuildGraphMetricsMutation) BuiltValuesIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetBuiltValues() { m.built_values = nil m.clearedbuilt_values = false - m.removedbuilt_values = nil } -// AddCleanedValueIDs adds the "cleaned_values" edge to the EvaluationStat entity by ids. -func (m *BuildGraphMetricsMutation) AddCleanedValueIDs(ids ...int) { - if m.cleaned_values == nil { - m.cleaned_values = make(map[int]struct{}) - } - for i := range ids { - m.cleaned_values[ids[i]] = struct{}{} - } +// SetCleanedValuesID sets the "cleaned_values" edge to the EvaluationStat entity by id. +func (m *BuildGraphMetricsMutation) SetCleanedValuesID(id int) { + m.cleaned_values = &id } // ClearCleanedValues clears the "cleaned_values" edge to the EvaluationStat entity. @@ -8532,29 +8265,20 @@ func (m *BuildGraphMetricsMutation) CleanedValuesCleared() bool { return m.clearedcleaned_values } -// RemoveCleanedValueIDs removes the "cleaned_values" edge to the EvaluationStat entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveCleanedValueIDs(ids ...int) { - if m.removedcleaned_values == nil { - m.removedcleaned_values = make(map[int]struct{}) - } - for i := range ids { - delete(m.cleaned_values, ids[i]) - m.removedcleaned_values[ids[i]] = struct{}{} - } -} - -// RemovedCleanedValues returns the removed IDs of the "cleaned_values" edge to the EvaluationStat entity. -func (m *BuildGraphMetricsMutation) RemovedCleanedValuesIDs() (ids []int) { - for id := range m.removedcleaned_values { - ids = append(ids, id) +// CleanedValuesID returns the "cleaned_values" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) CleanedValuesID() (id int, exists bool) { + if m.cleaned_values != nil { + return *m.cleaned_values, true } return } // CleanedValuesIDs returns the "cleaned_values" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// CleanedValuesID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) CleanedValuesIDs() (ids []int) { - for id := range m.cleaned_values { - ids = append(ids, id) + if id := m.cleaned_values; id != nil { + ids = append(ids, *id) } return } @@ -8563,17 +8287,11 @@ func (m *BuildGraphMetricsMutation) CleanedValuesIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetCleanedValues() { m.cleaned_values = nil m.clearedcleaned_values = false - m.removedcleaned_values = nil } -// AddEvaluatedValueIDs adds the "evaluated_values" edge to the EvaluationStat entity by ids. -func (m *BuildGraphMetricsMutation) AddEvaluatedValueIDs(ids ...int) { - if m.evaluated_values == nil { - m.evaluated_values = make(map[int]struct{}) - } - for i := range ids { - m.evaluated_values[ids[i]] = struct{}{} - } +// SetEvaluatedValuesID sets the "evaluated_values" edge to the EvaluationStat entity by id. +func (m *BuildGraphMetricsMutation) SetEvaluatedValuesID(id int) { + m.evaluated_values = &id } // ClearEvaluatedValues clears the "evaluated_values" edge to the EvaluationStat entity. @@ -8586,29 +8304,20 @@ func (m *BuildGraphMetricsMutation) EvaluatedValuesCleared() bool { return m.clearedevaluated_values } -// RemoveEvaluatedValueIDs removes the "evaluated_values" edge to the EvaluationStat entity by IDs. -func (m *BuildGraphMetricsMutation) RemoveEvaluatedValueIDs(ids ...int) { - if m.removedevaluated_values == nil { - m.removedevaluated_values = make(map[int]struct{}) - } - for i := range ids { - delete(m.evaluated_values, ids[i]) - m.removedevaluated_values[ids[i]] = struct{}{} - } -} - -// RemovedEvaluatedValues returns the removed IDs of the "evaluated_values" edge to the EvaluationStat entity. -func (m *BuildGraphMetricsMutation) RemovedEvaluatedValuesIDs() (ids []int) { - for id := range m.removedevaluated_values { - ids = append(ids, id) +// EvaluatedValuesID returns the "evaluated_values" edge ID in the mutation. +func (m *BuildGraphMetricsMutation) EvaluatedValuesID() (id int, exists bool) { + if m.evaluated_values != nil { + return *m.evaluated_values, true } return } // EvaluatedValuesIDs returns the "evaluated_values" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// EvaluatedValuesID instead. It exists only for internal usage by the builders. func (m *BuildGraphMetricsMutation) EvaluatedValuesIDs() (ids []int) { - for id := range m.evaluated_values { - ids = append(ids, id) + if id := m.evaluated_values; id != nil { + ids = append(ids, *id) } return } @@ -8617,7 +8326,6 @@ func (m *BuildGraphMetricsMutation) EvaluatedValuesIDs() (ids []int) { func (m *BuildGraphMetricsMutation) ResetEvaluatedValues() { m.evaluated_values = nil m.clearedevaluated_values = false - m.removedevaluated_values = nil } // Where appends a list predicates to the BuildGraphMetricsMutation builder. @@ -9084,41 +8792,29 @@ func (m *BuildGraphMetricsMutation) AddedEdges() []string { func (m *BuildGraphMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case buildgraphmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case buildgraphmetrics.EdgeDirtiedValues: - ids := make([]ent.Value, 0, len(m.dirtied_values)) - for id := range m.dirtied_values { - ids = append(ids, id) + if id := m.dirtied_values; id != nil { + return []ent.Value{*id} } - return ids case buildgraphmetrics.EdgeChangedValues: - ids := make([]ent.Value, 0, len(m.changed_values)) - for id := range m.changed_values { - ids = append(ids, id) + if id := m.changed_values; id != nil { + return []ent.Value{*id} } - return ids case buildgraphmetrics.EdgeBuiltValues: - ids := make([]ent.Value, 0, len(m.built_values)) - for id := range m.built_values { - ids = append(ids, id) + if id := m.built_values; id != nil { + return []ent.Value{*id} } - return ids case buildgraphmetrics.EdgeCleanedValues: - ids := make([]ent.Value, 0, len(m.cleaned_values)) - for id := range m.cleaned_values { - ids = append(ids, id) + if id := m.cleaned_values; id != nil { + return []ent.Value{*id} } - return ids case buildgraphmetrics.EdgeEvaluatedValues: - ids := make([]ent.Value, 0, len(m.evaluated_values)) - for id := range m.evaluated_values { - ids = append(ids, id) + if id := m.evaluated_values; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -9126,68 +8822,12 @@ func (m *BuildGraphMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *BuildGraphMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 6) - if m.removedmetrics != nil { - edges = append(edges, buildgraphmetrics.EdgeMetrics) - } - if m.removeddirtied_values != nil { - edges = append(edges, buildgraphmetrics.EdgeDirtiedValues) - } - if m.removedchanged_values != nil { - edges = append(edges, buildgraphmetrics.EdgeChangedValues) - } - if m.removedbuilt_values != nil { - edges = append(edges, buildgraphmetrics.EdgeBuiltValues) - } - if m.removedcleaned_values != nil { - edges = append(edges, buildgraphmetrics.EdgeCleanedValues) - } - if m.removedevaluated_values != nil { - edges = append(edges, buildgraphmetrics.EdgeEvaluatedValues) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *BuildGraphMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case buildgraphmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - case buildgraphmetrics.EdgeDirtiedValues: - ids := make([]ent.Value, 0, len(m.removeddirtied_values)) - for id := range m.removeddirtied_values { - ids = append(ids, id) - } - return ids - case buildgraphmetrics.EdgeChangedValues: - ids := make([]ent.Value, 0, len(m.removedchanged_values)) - for id := range m.removedchanged_values { - ids = append(ids, id) - } - return ids - case buildgraphmetrics.EdgeBuiltValues: - ids := make([]ent.Value, 0, len(m.removedbuilt_values)) - for id := range m.removedbuilt_values { - ids = append(ids, id) - } - return ids - case buildgraphmetrics.EdgeCleanedValues: - ids := make([]ent.Value, 0, len(m.removedcleaned_values)) - for id := range m.removedcleaned_values { - ids = append(ids, id) - } - return ids - case buildgraphmetrics.EdgeEvaluatedValues: - ids := make([]ent.Value, 0, len(m.removedevaluated_values)) - for id := range m.removedevaluated_values { - ids = append(ids, id) - } - return ids - } return nil } @@ -9239,6 +8879,24 @@ func (m *BuildGraphMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *BuildGraphMetricsMutation) ClearEdge(name string) error { switch name { + case buildgraphmetrics.EdgeMetrics: + m.ClearMetrics() + return nil + case buildgraphmetrics.EdgeDirtiedValues: + m.ClearDirtiedValues() + return nil + case buildgraphmetrics.EdgeChangedValues: + m.ClearChangedValues() + return nil + case buildgraphmetrics.EdgeBuiltValues: + m.ClearBuiltValues() + return nil + case buildgraphmetrics.EdgeCleanedValues: + m.ClearCleanedValues() + return nil + case buildgraphmetrics.EdgeEvaluatedValues: + m.ClearEvaluatedValues() + return nil } return fmt.Errorf("unknown BuildGraphMetrics unique edge %s", name) } @@ -9280,8 +8938,7 @@ type CumulativeMetricsMutation struct { num_builds *int32 addnum_builds *int32 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool done bool oldValue func(context.Context) (*CumulativeMetrics, error) @@ -9526,14 +9183,9 @@ func (m *CumulativeMetricsMutation) ResetNumBuilds() { delete(m.clearedFields, cumulativemetrics.FieldNumBuilds) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *CumulativeMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *CumulativeMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -9546,29 +9198,20 @@ func (m *CumulativeMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *CumulativeMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *CumulativeMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *CumulativeMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *CumulativeMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -9577,7 +9220,6 @@ func (m *CumulativeMetricsMutation) MetricsIDs() (ids []int) { func (m *CumulativeMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // Where appends a list predicates to the CumulativeMetricsMutation builder. @@ -9784,11 +9426,9 @@ func (m *CumulativeMetricsMutation) AddedEdges() []string { func (m *CumulativeMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case cumulativemetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -9796,23 +9436,12 @@ func (m *CumulativeMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *CumulativeMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedmetrics != nil { - edges = append(edges, cumulativemetrics.EdgeMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *CumulativeMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case cumulativemetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -9839,6 +9468,9 @@ func (m *CumulativeMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *CumulativeMetricsMutation) ClearEdge(name string) error { switch name { + case cumulativemetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown CumulativeMetrics unique edge %s", name) } @@ -9861,8 +9493,7 @@ type DynamicExecutionMetricsMutation struct { typ string id *int clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool race_statistics map[int]struct{} removedrace_statistics map[int]struct{} @@ -9970,14 +9601,9 @@ func (m *DynamicExecutionMetricsMutation) IDs(ctx context.Context) ([]int, error } } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *DynamicExecutionMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *DynamicExecutionMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -9990,29 +9616,20 @@ func (m *DynamicExecutionMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *DynamicExecutionMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *DynamicExecutionMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *DynamicExecutionMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *DynamicExecutionMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -10021,7 +9638,6 @@ func (m *DynamicExecutionMetricsMutation) MetricsIDs() (ids []int) { func (m *DynamicExecutionMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // AddRaceStatisticIDs adds the "race_statistics" edge to the RaceStatistics entity by ids. @@ -10201,11 +9817,9 @@ func (m *DynamicExecutionMetricsMutation) AddedEdges() []string { func (m *DynamicExecutionMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case dynamicexecutionmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case dynamicexecutionmetrics.EdgeRaceStatistics: ids := make([]ent.Value, 0, len(m.race_statistics)) for id := range m.race_statistics { @@ -10219,9 +9833,6 @@ func (m *DynamicExecutionMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *DynamicExecutionMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedmetrics != nil { - edges = append(edges, dynamicexecutionmetrics.EdgeMetrics) - } if m.removedrace_statistics != nil { edges = append(edges, dynamicexecutionmetrics.EdgeRaceStatistics) } @@ -10232,12 +9843,6 @@ func (m *DynamicExecutionMetricsMutation) RemovedEdges() []string { // the given name in this mutation. func (m *DynamicExecutionMetricsMutation) RemovedIDs(name string) []ent.Value { switch name { - case dynamicexecutionmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids case dynamicexecutionmetrics.EdgeRaceStatistics: ids := make([]ent.Value, 0, len(m.removedrace_statistics)) for id := range m.removedrace_statistics { @@ -10276,6 +9881,9 @@ func (m *DynamicExecutionMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *DynamicExecutionMetricsMutation) ClearEdge(name string) error { switch name { + case dynamicexecutionmetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown DynamicExecutionMetrics unique edge %s", name) } @@ -10304,8 +9912,7 @@ type EvaluationStatMutation struct { count *int64 addcount *int64 clearedFields map[string]struct{} - build_graph_metrics map[int]struct{} - removedbuild_graph_metrics map[int]struct{} + build_graph_metrics *int clearedbuild_graph_metrics bool done bool oldValue func(context.Context) (*EvaluationStat, error) @@ -10529,14 +10136,9 @@ func (m *EvaluationStatMutation) ResetCount() { delete(m.clearedFields, evaluationstat.FieldCount) } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by ids. -func (m *EvaluationStatMutation) AddBuildGraphMetricIDs(ids ...int) { - if m.build_graph_metrics == nil { - m.build_graph_metrics = make(map[int]struct{}) - } - for i := range ids { - m.build_graph_metrics[ids[i]] = struct{}{} - } +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by id. +func (m *EvaluationStatMutation) SetBuildGraphMetricsID(id int) { + m.build_graph_metrics = &id } // ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. @@ -10549,29 +10151,20 @@ func (m *EvaluationStatMutation) BuildGraphMetricsCleared() bool { return m.clearedbuild_graph_metrics } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (m *EvaluationStatMutation) RemoveBuildGraphMetricIDs(ids ...int) { - if m.removedbuild_graph_metrics == nil { - m.removedbuild_graph_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.build_graph_metrics, ids[i]) - m.removedbuild_graph_metrics[ids[i]] = struct{}{} - } -} - -// RemovedBuildGraphMetrics returns the removed IDs of the "build_graph_metrics" edge to the BuildGraphMetrics entity. -func (m *EvaluationStatMutation) RemovedBuildGraphMetricsIDs() (ids []int) { - for id := range m.removedbuild_graph_metrics { - ids = append(ids, id) +// BuildGraphMetricsID returns the "build_graph_metrics" edge ID in the mutation. +func (m *EvaluationStatMutation) BuildGraphMetricsID() (id int, exists bool) { + if m.build_graph_metrics != nil { + return *m.build_graph_metrics, true } return } // BuildGraphMetricsIDs returns the "build_graph_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// BuildGraphMetricsID instead. It exists only for internal usage by the builders. func (m *EvaluationStatMutation) BuildGraphMetricsIDs() (ids []int) { - for id := range m.build_graph_metrics { - ids = append(ids, id) + if id := m.build_graph_metrics; id != nil { + ids = append(ids, *id) } return } @@ -10580,7 +10173,6 @@ func (m *EvaluationStatMutation) BuildGraphMetricsIDs() (ids []int) { func (m *EvaluationStatMutation) ResetBuildGraphMetrics() { m.build_graph_metrics = nil m.clearedbuild_graph_metrics = false - m.removedbuild_graph_metrics = nil } // Where appends a list predicates to the EvaluationStatMutation builder. @@ -10775,11 +10367,9 @@ func (m *EvaluationStatMutation) AddedEdges() []string { func (m *EvaluationStatMutation) AddedIDs(name string) []ent.Value { switch name { case evaluationstat.EdgeBuildGraphMetrics: - ids := make([]ent.Value, 0, len(m.build_graph_metrics)) - for id := range m.build_graph_metrics { - ids = append(ids, id) + if id := m.build_graph_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -10787,23 +10377,12 @@ func (m *EvaluationStatMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *EvaluationStatMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedbuild_graph_metrics != nil { - edges = append(edges, evaluationstat.EdgeBuildGraphMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *EvaluationStatMutation) RemovedIDs(name string) []ent.Value { - switch name { - case evaluationstat.EdgeBuildGraphMetrics: - ids := make([]ent.Value, 0, len(m.removedbuild_graph_metrics)) - for id := range m.removedbuild_graph_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -10830,6 +10409,9 @@ func (m *EvaluationStatMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *EvaluationStatMutation) ClearEdge(name string) error { switch name { + case evaluationstat.EdgeBuildGraphMetrics: + m.ClearBuildGraphMetrics() + return nil } return fmt.Errorf("unknown EvaluationStat unique edge %s", name) } @@ -11544,8 +11126,7 @@ type ExectionInfoMutation struct { addexit_code *int32 hostname *string clearedFields map[string]struct{} - test_result map[int]struct{} - removedtest_result map[int]struct{} + test_result *int clearedtest_result bool timing_breakdown *int clearedtiming_breakdown bool @@ -11942,14 +11523,9 @@ func (m *ExectionInfoMutation) ResetHostname() { delete(m.clearedFields, exectioninfo.FieldHostname) } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by ids. -func (m *ExectionInfoMutation) AddTestResultIDs(ids ...int) { - if m.test_result == nil { - m.test_result = make(map[int]struct{}) - } - for i := range ids { - m.test_result[ids[i]] = struct{}{} - } +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by id. +func (m *ExectionInfoMutation) SetTestResultID(id int) { + m.test_result = &id } // ClearTestResult clears the "test_result" edge to the TestResultBES entity. @@ -11962,29 +11538,20 @@ func (m *ExectionInfoMutation) TestResultCleared() bool { return m.clearedtest_result } -// RemoveTestResultIDs removes the "test_result" edge to the TestResultBES entity by IDs. -func (m *ExectionInfoMutation) RemoveTestResultIDs(ids ...int) { - if m.removedtest_result == nil { - m.removedtest_result = make(map[int]struct{}) - } - for i := range ids { - delete(m.test_result, ids[i]) - m.removedtest_result[ids[i]] = struct{}{} - } -} - -// RemovedTestResult returns the removed IDs of the "test_result" edge to the TestResultBES entity. -func (m *ExectionInfoMutation) RemovedTestResultIDs() (ids []int) { - for id := range m.removedtest_result { - ids = append(ids, id) +// TestResultID returns the "test_result" edge ID in the mutation. +func (m *ExectionInfoMutation) TestResultID() (id int, exists bool) { + if m.test_result != nil { + return *m.test_result, true } return } // TestResultIDs returns the "test_result" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TestResultID instead. It exists only for internal usage by the builders. func (m *ExectionInfoMutation) TestResultIDs() (ids []int) { - for id := range m.test_result { - ids = append(ids, id) + if id := m.test_result; id != nil { + ids = append(ids, *id) } return } @@ -11993,7 +11560,6 @@ func (m *ExectionInfoMutation) TestResultIDs() (ids []int) { func (m *ExectionInfoMutation) ResetTestResult() { m.test_result = nil m.clearedtest_result = false - m.removedtest_result = nil } // SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by id. @@ -12368,11 +11934,9 @@ func (m *ExectionInfoMutation) AddedEdges() []string { func (m *ExectionInfoMutation) AddedIDs(name string) []ent.Value { switch name { case exectioninfo.EdgeTestResult: - ids := make([]ent.Value, 0, len(m.test_result)) - for id := range m.test_result { - ids = append(ids, id) + if id := m.test_result; id != nil { + return []ent.Value{*id} } - return ids case exectioninfo.EdgeTimingBreakdown: if id := m.timing_breakdown; id != nil { return []ent.Value{*id} @@ -12390,9 +11954,6 @@ func (m *ExectionInfoMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *ExectionInfoMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedtest_result != nil { - edges = append(edges, exectioninfo.EdgeTestResult) - } if m.removedresource_usage != nil { edges = append(edges, exectioninfo.EdgeResourceUsage) } @@ -12403,12 +11964,6 @@ func (m *ExectionInfoMutation) RemovedEdges() []string { // the given name in this mutation. func (m *ExectionInfoMutation) RemovedIDs(name string) []ent.Value { switch name { - case exectioninfo.EdgeTestResult: - ids := make([]ent.Value, 0, len(m.removedtest_result)) - for id := range m.removedtest_result { - ids = append(ids, id) - } - return ids case exectioninfo.EdgeResourceUsage: ids := make([]ent.Value, 0, len(m.removedresource_usage)) for id := range m.removedresource_usage { @@ -12452,6 +12007,9 @@ func (m *ExectionInfoMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *ExectionInfoMutation) ClearEdge(name string) error { switch name { + case exectioninfo.EdgeTestResult: + m.ClearTestResult() + return nil case exectioninfo.EdgeTimingBreakdown: m.ClearTimingBreakdown() return nil @@ -12487,8 +12045,7 @@ type FilesMetricMutation struct { count *int32 addcount *int32 clearedFields map[string]struct{} - artifact_metrics map[int]struct{} - removedartifact_metrics map[int]struct{} + artifact_metrics *int clearedartifact_metrics bool done bool oldValue func(context.Context) (*FilesMetric, error) @@ -12733,14 +12290,9 @@ func (m *FilesMetricMutation) ResetCount() { delete(m.clearedFields, filesmetric.FieldCount) } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by ids. -func (m *FilesMetricMutation) AddArtifactMetricIDs(ids ...int) { - if m.artifact_metrics == nil { - m.artifact_metrics = make(map[int]struct{}) - } - for i := range ids { - m.artifact_metrics[ids[i]] = struct{}{} - } +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by id. +func (m *FilesMetricMutation) SetArtifactMetricsID(id int) { + m.artifact_metrics = &id } // ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. @@ -12753,29 +12305,20 @@ func (m *FilesMetricMutation) ArtifactMetricsCleared() bool { return m.clearedartifact_metrics } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (m *FilesMetricMutation) RemoveArtifactMetricIDs(ids ...int) { - if m.removedartifact_metrics == nil { - m.removedartifact_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.artifact_metrics, ids[i]) - m.removedartifact_metrics[ids[i]] = struct{}{} - } -} - -// RemovedArtifactMetrics returns the removed IDs of the "artifact_metrics" edge to the ArtifactMetrics entity. -func (m *FilesMetricMutation) RemovedArtifactMetricsIDs() (ids []int) { - for id := range m.removedartifact_metrics { - ids = append(ids, id) +// ArtifactMetricsID returns the "artifact_metrics" edge ID in the mutation. +func (m *FilesMetricMutation) ArtifactMetricsID() (id int, exists bool) { + if m.artifact_metrics != nil { + return *m.artifact_metrics, true } return } // ArtifactMetricsIDs returns the "artifact_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ArtifactMetricsID instead. It exists only for internal usage by the builders. func (m *FilesMetricMutation) ArtifactMetricsIDs() (ids []int) { - for id := range m.artifact_metrics { - ids = append(ids, id) + if id := m.artifact_metrics; id != nil { + ids = append(ids, *id) } return } @@ -12784,7 +12327,6 @@ func (m *FilesMetricMutation) ArtifactMetricsIDs() (ids []int) { func (m *FilesMetricMutation) ResetArtifactMetrics() { m.artifact_metrics = nil m.clearedartifact_metrics = false - m.removedartifact_metrics = nil } // Where appends a list predicates to the FilesMetricMutation builder. @@ -12991,11 +12533,9 @@ func (m *FilesMetricMutation) AddedEdges() []string { func (m *FilesMetricMutation) AddedIDs(name string) []ent.Value { switch name { case filesmetric.EdgeArtifactMetrics: - ids := make([]ent.Value, 0, len(m.artifact_metrics)) - for id := range m.artifact_metrics { - ids = append(ids, id) + if id := m.artifact_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -13003,23 +12543,12 @@ func (m *FilesMetricMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *FilesMetricMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedartifact_metrics != nil { - edges = append(edges, filesmetric.EdgeArtifactMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *FilesMetricMutation) RemovedIDs(name string) []ent.Value { - switch name { - case filesmetric.EdgeArtifactMetrics: - ids := make([]ent.Value, 0, len(m.removedartifact_metrics)) - for id := range m.removedartifact_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -13046,6 +12575,9 @@ func (m *FilesMetricMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *FilesMetricMutation) ClearEdge(name string) error { switch name { + case filesmetric.EdgeArtifactMetrics: + m.ClearArtifactMetrics() + return nil } return fmt.Errorf("unknown FilesMetric unique edge %s", name) } @@ -13071,8 +12603,7 @@ type GarbageMetricsMutation struct { garbage_collected *int64 addgarbage_collected *int64 clearedFields map[string]struct{} - memory_metrics map[int]struct{} - removedmemory_metrics map[int]struct{} + memory_metrics *int clearedmemory_metrics bool done bool oldValue func(context.Context) (*GarbageMetrics, error) @@ -13296,14 +12827,9 @@ func (m *GarbageMetricsMutation) ResetGarbageCollected() { delete(m.clearedFields, garbagemetrics.FieldGarbageCollected) } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by ids. -func (m *GarbageMetricsMutation) AddMemoryMetricIDs(ids ...int) { - if m.memory_metrics == nil { - m.memory_metrics = make(map[int]struct{}) - } - for i := range ids { - m.memory_metrics[ids[i]] = struct{}{} - } +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by id. +func (m *GarbageMetricsMutation) SetMemoryMetricsID(id int) { + m.memory_metrics = &id } // ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. @@ -13316,29 +12842,20 @@ func (m *GarbageMetricsMutation) MemoryMetricsCleared() bool { return m.clearedmemory_metrics } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (m *GarbageMetricsMutation) RemoveMemoryMetricIDs(ids ...int) { - if m.removedmemory_metrics == nil { - m.removedmemory_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.memory_metrics, ids[i]) - m.removedmemory_metrics[ids[i]] = struct{}{} +// MemoryMetricsID returns the "memory_metrics" edge ID in the mutation. +func (m *GarbageMetricsMutation) MemoryMetricsID() (id int, exists bool) { + if m.memory_metrics != nil { + return *m.memory_metrics, true } -} - -// RemovedMemoryMetrics returns the removed IDs of the "memory_metrics" edge to the MemoryMetrics entity. -func (m *GarbageMetricsMutation) RemovedMemoryMetricsIDs() (ids []int) { - for id := range m.removedmemory_metrics { - ids = append(ids, id) - } - return + return } // MemoryMetricsIDs returns the "memory_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MemoryMetricsID instead. It exists only for internal usage by the builders. func (m *GarbageMetricsMutation) MemoryMetricsIDs() (ids []int) { - for id := range m.memory_metrics { - ids = append(ids, id) + if id := m.memory_metrics; id != nil { + ids = append(ids, *id) } return } @@ -13347,7 +12864,6 @@ func (m *GarbageMetricsMutation) MemoryMetricsIDs() (ids []int) { func (m *GarbageMetricsMutation) ResetMemoryMetrics() { m.memory_metrics = nil m.clearedmemory_metrics = false - m.removedmemory_metrics = nil } // Where appends a list predicates to the GarbageMetricsMutation builder. @@ -13542,11 +13058,9 @@ func (m *GarbageMetricsMutation) AddedEdges() []string { func (m *GarbageMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case garbagemetrics.EdgeMemoryMetrics: - ids := make([]ent.Value, 0, len(m.memory_metrics)) - for id := range m.memory_metrics { - ids = append(ids, id) + if id := m.memory_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -13554,23 +13068,12 @@ func (m *GarbageMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *GarbageMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedmemory_metrics != nil { - edges = append(edges, garbagemetrics.EdgeMemoryMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *GarbageMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case garbagemetrics.EdgeMemoryMetrics: - ids := make([]ent.Value, 0, len(m.removedmemory_metrics)) - for id := range m.removedmemory_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -13597,6 +13100,9 @@ func (m *GarbageMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *GarbageMetricsMutation) ClearEdge(name string) error { switch name { + case garbagemetrics.EdgeMemoryMetrics: + m.ClearMemoryMetrics() + return nil } return fmt.Errorf("unknown GarbageMetrics unique edge %s", name) } @@ -13625,8 +13131,7 @@ type MemoryMetricsMutation struct { peak_post_gc_tenured_space_heap_size *int64 addpeak_post_gc_tenured_space_heap_size *int64 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool garbage_metrics map[int]struct{} removedgarbage_metrics map[int]struct{} @@ -13944,14 +13449,9 @@ func (m *MemoryMetricsMutation) ResetPeakPostGcTenuredSpaceHeapSize() { delete(m.clearedFields, memorymetrics.FieldPeakPostGcTenuredSpaceHeapSize) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *MemoryMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *MemoryMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -13964,29 +13464,20 @@ func (m *MemoryMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *MemoryMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *MemoryMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *MemoryMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *MemoryMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -13995,7 +13486,6 @@ func (m *MemoryMetricsMutation) MetricsIDs() (ids []int) { func (m *MemoryMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // AddGarbageMetricIDs adds the "garbage_metrics" edge to the GarbageMetrics entity by ids. @@ -14294,11 +13784,9 @@ func (m *MemoryMetricsMutation) AddedEdges() []string { func (m *MemoryMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case memorymetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case memorymetrics.EdgeGarbageMetrics: ids := make([]ent.Value, 0, len(m.garbage_metrics)) for id := range m.garbage_metrics { @@ -14312,9 +13800,6 @@ func (m *MemoryMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *MemoryMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedmetrics != nil { - edges = append(edges, memorymetrics.EdgeMetrics) - } if m.removedgarbage_metrics != nil { edges = append(edges, memorymetrics.EdgeGarbageMetrics) } @@ -14325,12 +13810,6 @@ func (m *MemoryMetricsMutation) RemovedEdges() []string { // the given name in this mutation. func (m *MemoryMetricsMutation) RemovedIDs(name string) []ent.Value { switch name { - case memorymetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids case memorymetrics.EdgeGarbageMetrics: ids := make([]ent.Value, 0, len(m.removedgarbage_metrics)) for id := range m.removedgarbage_metrics { @@ -14369,6 +13848,9 @@ func (m *MemoryMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *MemoryMetricsMutation) ClearEdge(name string) error { switch name { + case memorymetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown MemoryMetrics unique edge %s", name) } @@ -14396,35 +13878,25 @@ type MetricsMutation struct { clearedFields map[string]struct{} bazel_invocation *int clearedbazel_invocation bool - action_summary map[int]struct{} - removedaction_summary map[int]struct{} + action_summary *int clearedaction_summary bool - memory_metrics map[int]struct{} - removedmemory_metrics map[int]struct{} + memory_metrics *int clearedmemory_metrics bool - target_metrics map[int]struct{} - removedtarget_metrics map[int]struct{} + target_metrics *int clearedtarget_metrics bool - package_metrics map[int]struct{} - removedpackage_metrics map[int]struct{} + package_metrics *int clearedpackage_metrics bool - timing_metrics map[int]struct{} - removedtiming_metrics map[int]struct{} + timing_metrics *int clearedtiming_metrics bool - cumulative_metrics map[int]struct{} - removedcumulative_metrics map[int]struct{} + cumulative_metrics *int clearedcumulative_metrics bool - artifact_metrics map[int]struct{} - removedartifact_metrics map[int]struct{} + artifact_metrics *int clearedartifact_metrics bool - network_metrics map[int]struct{} - removednetwork_metrics map[int]struct{} + network_metrics *int clearednetwork_metrics bool - dynamic_execution_metrics map[int]struct{} - removeddynamic_execution_metrics map[int]struct{} + dynamic_execution_metrics *int cleareddynamic_execution_metrics bool - build_graph_metrics map[int]struct{} - removedbuild_graph_metrics map[int]struct{} + build_graph_metrics *int clearedbuild_graph_metrics bool done bool oldValue func(context.Context) (*Metrics, error) @@ -14568,14 +14040,9 @@ func (m *MetricsMutation) ResetBazelInvocation() { m.clearedbazel_invocation = false } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by ids. -func (m *MetricsMutation) AddActionSummaryIDs(ids ...int) { - if m.action_summary == nil { - m.action_summary = make(map[int]struct{}) - } - for i := range ids { - m.action_summary[ids[i]] = struct{}{} - } +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by id. +func (m *MetricsMutation) SetActionSummaryID(id int) { + m.action_summary = &id } // ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. @@ -14588,29 +14055,20 @@ func (m *MetricsMutation) ActionSummaryCleared() bool { return m.clearedaction_summary } -// RemoveActionSummaryIDs removes the "action_summary" edge to the ActionSummary entity by IDs. -func (m *MetricsMutation) RemoveActionSummaryIDs(ids ...int) { - if m.removedaction_summary == nil { - m.removedaction_summary = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_summary, ids[i]) - m.removedaction_summary[ids[i]] = struct{}{} - } -} - -// RemovedActionSummary returns the removed IDs of the "action_summary" edge to the ActionSummary entity. -func (m *MetricsMutation) RemovedActionSummaryIDs() (ids []int) { - for id := range m.removedaction_summary { - ids = append(ids, id) +// ActionSummaryID returns the "action_summary" edge ID in the mutation. +func (m *MetricsMutation) ActionSummaryID() (id int, exists bool) { + if m.action_summary != nil { + return *m.action_summary, true } return } // ActionSummaryIDs returns the "action_summary" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionSummaryID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) ActionSummaryIDs() (ids []int) { - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + ids = append(ids, *id) } return } @@ -14619,17 +14077,11 @@ func (m *MetricsMutation) ActionSummaryIDs() (ids []int) { func (m *MetricsMutation) ResetActionSummary() { m.action_summary = nil m.clearedaction_summary = false - m.removedaction_summary = nil } -// AddMemoryMetricIDs adds the "memory_metrics" edge to the MemoryMetrics entity by ids. -func (m *MetricsMutation) AddMemoryMetricIDs(ids ...int) { - if m.memory_metrics == nil { - m.memory_metrics = make(map[int]struct{}) - } - for i := range ids { - m.memory_metrics[ids[i]] = struct{}{} - } +// SetMemoryMetricsID sets the "memory_metrics" edge to the MemoryMetrics entity by id. +func (m *MetricsMutation) SetMemoryMetricsID(id int) { + m.memory_metrics = &id } // ClearMemoryMetrics clears the "memory_metrics" edge to the MemoryMetrics entity. @@ -14642,29 +14094,20 @@ func (m *MetricsMutation) MemoryMetricsCleared() bool { return m.clearedmemory_metrics } -// RemoveMemoryMetricIDs removes the "memory_metrics" edge to the MemoryMetrics entity by IDs. -func (m *MetricsMutation) RemoveMemoryMetricIDs(ids ...int) { - if m.removedmemory_metrics == nil { - m.removedmemory_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.memory_metrics, ids[i]) - m.removedmemory_metrics[ids[i]] = struct{}{} - } -} - -// RemovedMemoryMetrics returns the removed IDs of the "memory_metrics" edge to the MemoryMetrics entity. -func (m *MetricsMutation) RemovedMemoryMetricsIDs() (ids []int) { - for id := range m.removedmemory_metrics { - ids = append(ids, id) +// MemoryMetricsID returns the "memory_metrics" edge ID in the mutation. +func (m *MetricsMutation) MemoryMetricsID() (id int, exists bool) { + if m.memory_metrics != nil { + return *m.memory_metrics, true } return } // MemoryMetricsIDs returns the "memory_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MemoryMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) MemoryMetricsIDs() (ids []int) { - for id := range m.memory_metrics { - ids = append(ids, id) + if id := m.memory_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14673,17 +14116,11 @@ func (m *MetricsMutation) MemoryMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetMemoryMetrics() { m.memory_metrics = nil m.clearedmemory_metrics = false - m.removedmemory_metrics = nil } -// AddTargetMetricIDs adds the "target_metrics" edge to the TargetMetrics entity by ids. -func (m *MetricsMutation) AddTargetMetricIDs(ids ...int) { - if m.target_metrics == nil { - m.target_metrics = make(map[int]struct{}) - } - for i := range ids { - m.target_metrics[ids[i]] = struct{}{} - } +// SetTargetMetricsID sets the "target_metrics" edge to the TargetMetrics entity by id. +func (m *MetricsMutation) SetTargetMetricsID(id int) { + m.target_metrics = &id } // ClearTargetMetrics clears the "target_metrics" edge to the TargetMetrics entity. @@ -14696,29 +14133,20 @@ func (m *MetricsMutation) TargetMetricsCleared() bool { return m.clearedtarget_metrics } -// RemoveTargetMetricIDs removes the "target_metrics" edge to the TargetMetrics entity by IDs. -func (m *MetricsMutation) RemoveTargetMetricIDs(ids ...int) { - if m.removedtarget_metrics == nil { - m.removedtarget_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.target_metrics, ids[i]) - m.removedtarget_metrics[ids[i]] = struct{}{} - } -} - -// RemovedTargetMetrics returns the removed IDs of the "target_metrics" edge to the TargetMetrics entity. -func (m *MetricsMutation) RemovedTargetMetricsIDs() (ids []int) { - for id := range m.removedtarget_metrics { - ids = append(ids, id) +// TargetMetricsID returns the "target_metrics" edge ID in the mutation. +func (m *MetricsMutation) TargetMetricsID() (id int, exists bool) { + if m.target_metrics != nil { + return *m.target_metrics, true } return } // TargetMetricsIDs returns the "target_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TargetMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) TargetMetricsIDs() (ids []int) { - for id := range m.target_metrics { - ids = append(ids, id) + if id := m.target_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14727,17 +14155,11 @@ func (m *MetricsMutation) TargetMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetTargetMetrics() { m.target_metrics = nil m.clearedtarget_metrics = false - m.removedtarget_metrics = nil } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by ids. -func (m *MetricsMutation) AddPackageMetricIDs(ids ...int) { - if m.package_metrics == nil { - m.package_metrics = make(map[int]struct{}) - } - for i := range ids { - m.package_metrics[ids[i]] = struct{}{} - } +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by id. +func (m *MetricsMutation) SetPackageMetricsID(id int) { + m.package_metrics = &id } // ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. @@ -14750,29 +14172,20 @@ func (m *MetricsMutation) PackageMetricsCleared() bool { return m.clearedpackage_metrics } -// RemovePackageMetricIDs removes the "package_metrics" edge to the PackageMetrics entity by IDs. -func (m *MetricsMutation) RemovePackageMetricIDs(ids ...int) { - if m.removedpackage_metrics == nil { - m.removedpackage_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.package_metrics, ids[i]) - m.removedpackage_metrics[ids[i]] = struct{}{} - } -} - -// RemovedPackageMetrics returns the removed IDs of the "package_metrics" edge to the PackageMetrics entity. -func (m *MetricsMutation) RemovedPackageMetricsIDs() (ids []int) { - for id := range m.removedpackage_metrics { - ids = append(ids, id) +// PackageMetricsID returns the "package_metrics" edge ID in the mutation. +func (m *MetricsMutation) PackageMetricsID() (id int, exists bool) { + if m.package_metrics != nil { + return *m.package_metrics, true } return } // PackageMetricsIDs returns the "package_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// PackageMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) PackageMetricsIDs() (ids []int) { - for id := range m.package_metrics { - ids = append(ids, id) + if id := m.package_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14781,17 +14194,11 @@ func (m *MetricsMutation) PackageMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetPackageMetrics() { m.package_metrics = nil m.clearedpackage_metrics = false - m.removedpackage_metrics = nil } -// AddTimingMetricIDs adds the "timing_metrics" edge to the TimingMetrics entity by ids. -func (m *MetricsMutation) AddTimingMetricIDs(ids ...int) { - if m.timing_metrics == nil { - m.timing_metrics = make(map[int]struct{}) - } - for i := range ids { - m.timing_metrics[ids[i]] = struct{}{} - } +// SetTimingMetricsID sets the "timing_metrics" edge to the TimingMetrics entity by id. +func (m *MetricsMutation) SetTimingMetricsID(id int) { + m.timing_metrics = &id } // ClearTimingMetrics clears the "timing_metrics" edge to the TimingMetrics entity. @@ -14804,29 +14211,20 @@ func (m *MetricsMutation) TimingMetricsCleared() bool { return m.clearedtiming_metrics } -// RemoveTimingMetricIDs removes the "timing_metrics" edge to the TimingMetrics entity by IDs. -func (m *MetricsMutation) RemoveTimingMetricIDs(ids ...int) { - if m.removedtiming_metrics == nil { - m.removedtiming_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.timing_metrics, ids[i]) - m.removedtiming_metrics[ids[i]] = struct{}{} - } -} - -// RemovedTimingMetrics returns the removed IDs of the "timing_metrics" edge to the TimingMetrics entity. -func (m *MetricsMutation) RemovedTimingMetricsIDs() (ids []int) { - for id := range m.removedtiming_metrics { - ids = append(ids, id) +// TimingMetricsID returns the "timing_metrics" edge ID in the mutation. +func (m *MetricsMutation) TimingMetricsID() (id int, exists bool) { + if m.timing_metrics != nil { + return *m.timing_metrics, true } return } // TimingMetricsIDs returns the "timing_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TimingMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) TimingMetricsIDs() (ids []int) { - for id := range m.timing_metrics { - ids = append(ids, id) + if id := m.timing_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14835,17 +14233,11 @@ func (m *MetricsMutation) TimingMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetTimingMetrics() { m.timing_metrics = nil m.clearedtiming_metrics = false - m.removedtiming_metrics = nil } -// AddCumulativeMetricIDs adds the "cumulative_metrics" edge to the CumulativeMetrics entity by ids. -func (m *MetricsMutation) AddCumulativeMetricIDs(ids ...int) { - if m.cumulative_metrics == nil { - m.cumulative_metrics = make(map[int]struct{}) - } - for i := range ids { - m.cumulative_metrics[ids[i]] = struct{}{} - } +// SetCumulativeMetricsID sets the "cumulative_metrics" edge to the CumulativeMetrics entity by id. +func (m *MetricsMutation) SetCumulativeMetricsID(id int) { + m.cumulative_metrics = &id } // ClearCumulativeMetrics clears the "cumulative_metrics" edge to the CumulativeMetrics entity. @@ -14858,29 +14250,20 @@ func (m *MetricsMutation) CumulativeMetricsCleared() bool { return m.clearedcumulative_metrics } -// RemoveCumulativeMetricIDs removes the "cumulative_metrics" edge to the CumulativeMetrics entity by IDs. -func (m *MetricsMutation) RemoveCumulativeMetricIDs(ids ...int) { - if m.removedcumulative_metrics == nil { - m.removedcumulative_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.cumulative_metrics, ids[i]) - m.removedcumulative_metrics[ids[i]] = struct{}{} - } -} - -// RemovedCumulativeMetrics returns the removed IDs of the "cumulative_metrics" edge to the CumulativeMetrics entity. -func (m *MetricsMutation) RemovedCumulativeMetricsIDs() (ids []int) { - for id := range m.removedcumulative_metrics { - ids = append(ids, id) +// CumulativeMetricsID returns the "cumulative_metrics" edge ID in the mutation. +func (m *MetricsMutation) CumulativeMetricsID() (id int, exists bool) { + if m.cumulative_metrics != nil { + return *m.cumulative_metrics, true } return } // CumulativeMetricsIDs returns the "cumulative_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// CumulativeMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) CumulativeMetricsIDs() (ids []int) { - for id := range m.cumulative_metrics { - ids = append(ids, id) + if id := m.cumulative_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14889,17 +14272,11 @@ func (m *MetricsMutation) CumulativeMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetCumulativeMetrics() { m.cumulative_metrics = nil m.clearedcumulative_metrics = false - m.removedcumulative_metrics = nil } -// AddArtifactMetricIDs adds the "artifact_metrics" edge to the ArtifactMetrics entity by ids. -func (m *MetricsMutation) AddArtifactMetricIDs(ids ...int) { - if m.artifact_metrics == nil { - m.artifact_metrics = make(map[int]struct{}) - } - for i := range ids { - m.artifact_metrics[ids[i]] = struct{}{} - } +// SetArtifactMetricsID sets the "artifact_metrics" edge to the ArtifactMetrics entity by id. +func (m *MetricsMutation) SetArtifactMetricsID(id int) { + m.artifact_metrics = &id } // ClearArtifactMetrics clears the "artifact_metrics" edge to the ArtifactMetrics entity. @@ -14912,29 +14289,20 @@ func (m *MetricsMutation) ArtifactMetricsCleared() bool { return m.clearedartifact_metrics } -// RemoveArtifactMetricIDs removes the "artifact_metrics" edge to the ArtifactMetrics entity by IDs. -func (m *MetricsMutation) RemoveArtifactMetricIDs(ids ...int) { - if m.removedartifact_metrics == nil { - m.removedartifact_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.artifact_metrics, ids[i]) - m.removedartifact_metrics[ids[i]] = struct{}{} - } -} - -// RemovedArtifactMetrics returns the removed IDs of the "artifact_metrics" edge to the ArtifactMetrics entity. -func (m *MetricsMutation) RemovedArtifactMetricsIDs() (ids []int) { - for id := range m.removedartifact_metrics { - ids = append(ids, id) +// ArtifactMetricsID returns the "artifact_metrics" edge ID in the mutation. +func (m *MetricsMutation) ArtifactMetricsID() (id int, exists bool) { + if m.artifact_metrics != nil { + return *m.artifact_metrics, true } return } // ArtifactMetricsIDs returns the "artifact_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ArtifactMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) ArtifactMetricsIDs() (ids []int) { - for id := range m.artifact_metrics { - ids = append(ids, id) + if id := m.artifact_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14943,17 +14311,11 @@ func (m *MetricsMutation) ArtifactMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetArtifactMetrics() { m.artifact_metrics = nil m.clearedartifact_metrics = false - m.removedartifact_metrics = nil } -// AddNetworkMetricIDs adds the "network_metrics" edge to the NetworkMetrics entity by ids. -func (m *MetricsMutation) AddNetworkMetricIDs(ids ...int) { - if m.network_metrics == nil { - m.network_metrics = make(map[int]struct{}) - } - for i := range ids { - m.network_metrics[ids[i]] = struct{}{} - } +// SetNetworkMetricsID sets the "network_metrics" edge to the NetworkMetrics entity by id. +func (m *MetricsMutation) SetNetworkMetricsID(id int) { + m.network_metrics = &id } // ClearNetworkMetrics clears the "network_metrics" edge to the NetworkMetrics entity. @@ -14966,29 +14328,20 @@ func (m *MetricsMutation) NetworkMetricsCleared() bool { return m.clearednetwork_metrics } -// RemoveNetworkMetricIDs removes the "network_metrics" edge to the NetworkMetrics entity by IDs. -func (m *MetricsMutation) RemoveNetworkMetricIDs(ids ...int) { - if m.removednetwork_metrics == nil { - m.removednetwork_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.network_metrics, ids[i]) - m.removednetwork_metrics[ids[i]] = struct{}{} - } -} - -// RemovedNetworkMetrics returns the removed IDs of the "network_metrics" edge to the NetworkMetrics entity. -func (m *MetricsMutation) RemovedNetworkMetricsIDs() (ids []int) { - for id := range m.removednetwork_metrics { - ids = append(ids, id) +// NetworkMetricsID returns the "network_metrics" edge ID in the mutation. +func (m *MetricsMutation) NetworkMetricsID() (id int, exists bool) { + if m.network_metrics != nil { + return *m.network_metrics, true } return } // NetworkMetricsIDs returns the "network_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// NetworkMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) NetworkMetricsIDs() (ids []int) { - for id := range m.network_metrics { - ids = append(ids, id) + if id := m.network_metrics; id != nil { + ids = append(ids, *id) } return } @@ -14997,17 +14350,11 @@ func (m *MetricsMutation) NetworkMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetNetworkMetrics() { m.network_metrics = nil m.clearednetwork_metrics = false - m.removednetwork_metrics = nil } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ids. -func (m *MetricsMutation) AddDynamicExecutionMetricIDs(ids ...int) { - if m.dynamic_execution_metrics == nil { - m.dynamic_execution_metrics = make(map[int]struct{}) - } - for i := range ids { - m.dynamic_execution_metrics[ids[i]] = struct{}{} - } +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by id. +func (m *MetricsMutation) SetDynamicExecutionMetricsID(id int) { + m.dynamic_execution_metrics = &id } // ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. @@ -15020,29 +14367,20 @@ func (m *MetricsMutation) DynamicExecutionMetricsCleared() bool { return m.cleareddynamic_execution_metrics } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (m *MetricsMutation) RemoveDynamicExecutionMetricIDs(ids ...int) { - if m.removeddynamic_execution_metrics == nil { - m.removeddynamic_execution_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.dynamic_execution_metrics, ids[i]) - m.removeddynamic_execution_metrics[ids[i]] = struct{}{} - } -} - -// RemovedDynamicExecutionMetrics returns the removed IDs of the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. -func (m *MetricsMutation) RemovedDynamicExecutionMetricsIDs() (ids []int) { - for id := range m.removeddynamic_execution_metrics { - ids = append(ids, id) +// DynamicExecutionMetricsID returns the "dynamic_execution_metrics" edge ID in the mutation. +func (m *MetricsMutation) DynamicExecutionMetricsID() (id int, exists bool) { + if m.dynamic_execution_metrics != nil { + return *m.dynamic_execution_metrics, true } return } // DynamicExecutionMetricsIDs returns the "dynamic_execution_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// DynamicExecutionMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) DynamicExecutionMetricsIDs() (ids []int) { - for id := range m.dynamic_execution_metrics { - ids = append(ids, id) + if id := m.dynamic_execution_metrics; id != nil { + ids = append(ids, *id) } return } @@ -15051,17 +14389,11 @@ func (m *MetricsMutation) DynamicExecutionMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetDynamicExecutionMetrics() { m.dynamic_execution_metrics = nil m.cleareddynamic_execution_metrics = false - m.removeddynamic_execution_metrics = nil } -// AddBuildGraphMetricIDs adds the "build_graph_metrics" edge to the BuildGraphMetrics entity by ids. -func (m *MetricsMutation) AddBuildGraphMetricIDs(ids ...int) { - if m.build_graph_metrics == nil { - m.build_graph_metrics = make(map[int]struct{}) - } - for i := range ids { - m.build_graph_metrics[ids[i]] = struct{}{} - } +// SetBuildGraphMetricsID sets the "build_graph_metrics" edge to the BuildGraphMetrics entity by id. +func (m *MetricsMutation) SetBuildGraphMetricsID(id int) { + m.build_graph_metrics = &id } // ClearBuildGraphMetrics clears the "build_graph_metrics" edge to the BuildGraphMetrics entity. @@ -15074,29 +14406,20 @@ func (m *MetricsMutation) BuildGraphMetricsCleared() bool { return m.clearedbuild_graph_metrics } -// RemoveBuildGraphMetricIDs removes the "build_graph_metrics" edge to the BuildGraphMetrics entity by IDs. -func (m *MetricsMutation) RemoveBuildGraphMetricIDs(ids ...int) { - if m.removedbuild_graph_metrics == nil { - m.removedbuild_graph_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.build_graph_metrics, ids[i]) - m.removedbuild_graph_metrics[ids[i]] = struct{}{} - } -} - -// RemovedBuildGraphMetrics returns the removed IDs of the "build_graph_metrics" edge to the BuildGraphMetrics entity. -func (m *MetricsMutation) RemovedBuildGraphMetricsIDs() (ids []int) { - for id := range m.removedbuild_graph_metrics { - ids = append(ids, id) +// BuildGraphMetricsID returns the "build_graph_metrics" edge ID in the mutation. +func (m *MetricsMutation) BuildGraphMetricsID() (id int, exists bool) { + if m.build_graph_metrics != nil { + return *m.build_graph_metrics, true } return } // BuildGraphMetricsIDs returns the "build_graph_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// BuildGraphMetricsID instead. It exists only for internal usage by the builders. func (m *MetricsMutation) BuildGraphMetricsIDs() (ids []int) { - for id := range m.build_graph_metrics { - ids = append(ids, id) + if id := m.build_graph_metrics; id != nil { + ids = append(ids, *id) } return } @@ -15105,7 +14428,6 @@ func (m *MetricsMutation) BuildGraphMetricsIDs() (ids []int) { func (m *MetricsMutation) ResetBuildGraphMetrics() { m.build_graph_metrics = nil m.clearedbuild_graph_metrics = false - m.removedbuild_graph_metrics = nil } // Where appends a list predicates to the MetricsMutation builder. @@ -15262,65 +14584,45 @@ func (m *MetricsMutation) AddedIDs(name string) []ent.Value { return []ent.Value{*id} } case metrics.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.action_summary)) - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeMemoryMetrics: - ids := make([]ent.Value, 0, len(m.memory_metrics)) - for id := range m.memory_metrics { - ids = append(ids, id) + if id := m.memory_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeTargetMetrics: - ids := make([]ent.Value, 0, len(m.target_metrics)) - for id := range m.target_metrics { - ids = append(ids, id) + if id := m.target_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgePackageMetrics: - ids := make([]ent.Value, 0, len(m.package_metrics)) - for id := range m.package_metrics { - ids = append(ids, id) + if id := m.package_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeTimingMetrics: - ids := make([]ent.Value, 0, len(m.timing_metrics)) - for id := range m.timing_metrics { - ids = append(ids, id) + if id := m.timing_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeCumulativeMetrics: - ids := make([]ent.Value, 0, len(m.cumulative_metrics)) - for id := range m.cumulative_metrics { - ids = append(ids, id) + if id := m.cumulative_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeArtifactMetrics: - ids := make([]ent.Value, 0, len(m.artifact_metrics)) - for id := range m.artifact_metrics { - ids = append(ids, id) + if id := m.artifact_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeNetworkMetrics: - ids := make([]ent.Value, 0, len(m.network_metrics)) - for id := range m.network_metrics { - ids = append(ids, id) + if id := m.network_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeDynamicExecutionMetrics: - ids := make([]ent.Value, 0, len(m.dynamic_execution_metrics)) - for id := range m.dynamic_execution_metrics { - ids = append(ids, id) + if id := m.dynamic_execution_metrics; id != nil { + return []ent.Value{*id} } - return ids case metrics.EdgeBuildGraphMetrics: - ids := make([]ent.Value, 0, len(m.build_graph_metrics)) - for id := range m.build_graph_metrics { - ids = append(ids, id) + if id := m.build_graph_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -15328,104 +14630,12 @@ func (m *MetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *MetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 11) - if m.removedaction_summary != nil { - edges = append(edges, metrics.EdgeActionSummary) - } - if m.removedmemory_metrics != nil { - edges = append(edges, metrics.EdgeMemoryMetrics) - } - if m.removedtarget_metrics != nil { - edges = append(edges, metrics.EdgeTargetMetrics) - } - if m.removedpackage_metrics != nil { - edges = append(edges, metrics.EdgePackageMetrics) - } - if m.removedtiming_metrics != nil { - edges = append(edges, metrics.EdgeTimingMetrics) - } - if m.removedcumulative_metrics != nil { - edges = append(edges, metrics.EdgeCumulativeMetrics) - } - if m.removedartifact_metrics != nil { - edges = append(edges, metrics.EdgeArtifactMetrics) - } - if m.removednetwork_metrics != nil { - edges = append(edges, metrics.EdgeNetworkMetrics) - } - if m.removeddynamic_execution_metrics != nil { - edges = append(edges, metrics.EdgeDynamicExecutionMetrics) - } - if m.removedbuild_graph_metrics != nil { - edges = append(edges, metrics.EdgeBuildGraphMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *MetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case metrics.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.removedaction_summary)) - for id := range m.removedaction_summary { - ids = append(ids, id) - } - return ids - case metrics.EdgeMemoryMetrics: - ids := make([]ent.Value, 0, len(m.removedmemory_metrics)) - for id := range m.removedmemory_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeTargetMetrics: - ids := make([]ent.Value, 0, len(m.removedtarget_metrics)) - for id := range m.removedtarget_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgePackageMetrics: - ids := make([]ent.Value, 0, len(m.removedpackage_metrics)) - for id := range m.removedpackage_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeTimingMetrics: - ids := make([]ent.Value, 0, len(m.removedtiming_metrics)) - for id := range m.removedtiming_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeCumulativeMetrics: - ids := make([]ent.Value, 0, len(m.removedcumulative_metrics)) - for id := range m.removedcumulative_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeArtifactMetrics: - ids := make([]ent.Value, 0, len(m.removedartifact_metrics)) - for id := range m.removedartifact_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeNetworkMetrics: - ids := make([]ent.Value, 0, len(m.removednetwork_metrics)) - for id := range m.removednetwork_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeDynamicExecutionMetrics: - ids := make([]ent.Value, 0, len(m.removeddynamic_execution_metrics)) - for id := range m.removeddynamic_execution_metrics { - ids = append(ids, id) - } - return ids - case metrics.EdgeBuildGraphMetrics: - ids := make([]ent.Value, 0, len(m.removedbuild_graph_metrics)) - for id := range m.removedbuild_graph_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -15505,6 +14715,36 @@ func (m *MetricsMutation) ClearEdge(name string) error { case metrics.EdgeBazelInvocation: m.ClearBazelInvocation() return nil + case metrics.EdgeActionSummary: + m.ClearActionSummary() + return nil + case metrics.EdgeMemoryMetrics: + m.ClearMemoryMetrics() + return nil + case metrics.EdgeTargetMetrics: + m.ClearTargetMetrics() + return nil + case metrics.EdgePackageMetrics: + m.ClearPackageMetrics() + return nil + case metrics.EdgeTimingMetrics: + m.ClearTimingMetrics() + return nil + case metrics.EdgeCumulativeMetrics: + m.ClearCumulativeMetrics() + return nil + case metrics.EdgeArtifactMetrics: + m.ClearArtifactMetrics() + return nil + case metrics.EdgeNetworkMetrics: + m.ClearNetworkMetrics() + return nil + case metrics.EdgeDynamicExecutionMetrics: + m.ClearDynamicExecutionMetrics() + return nil + case metrics.EdgeBuildGraphMetrics: + m.ClearBuildGraphMetrics() + return nil } return fmt.Errorf("unknown Metrics unique edge %s", name) } @@ -15560,8 +14800,7 @@ type MissDetailMutation struct { count *int32 addcount *int32 clearedFields map[string]struct{} - action_cache_statistics map[int]struct{} - removedaction_cache_statistics map[int]struct{} + action_cache_statistics *int clearedaction_cache_statistics bool done bool oldValue func(context.Context) (*MissDetail, error) @@ -15785,14 +15024,9 @@ func (m *MissDetailMutation) ResetCount() { delete(m.clearedFields, missdetail.FieldCount) } -// AddActionCacheStatisticIDs adds the "action_cache_statistics" edge to the ActionCacheStatistics entity by ids. -func (m *MissDetailMutation) AddActionCacheStatisticIDs(ids ...int) { - if m.action_cache_statistics == nil { - m.action_cache_statistics = make(map[int]struct{}) - } - for i := range ids { - m.action_cache_statistics[ids[i]] = struct{}{} - } +// SetActionCacheStatisticsID sets the "action_cache_statistics" edge to the ActionCacheStatistics entity by id. +func (m *MissDetailMutation) SetActionCacheStatisticsID(id int) { + m.action_cache_statistics = &id } // ClearActionCacheStatistics clears the "action_cache_statistics" edge to the ActionCacheStatistics entity. @@ -15805,29 +15039,20 @@ func (m *MissDetailMutation) ActionCacheStatisticsCleared() bool { return m.clearedaction_cache_statistics } -// RemoveActionCacheStatisticIDs removes the "action_cache_statistics" edge to the ActionCacheStatistics entity by IDs. -func (m *MissDetailMutation) RemoveActionCacheStatisticIDs(ids ...int) { - if m.removedaction_cache_statistics == nil { - m.removedaction_cache_statistics = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_cache_statistics, ids[i]) - m.removedaction_cache_statistics[ids[i]] = struct{}{} - } -} - -// RemovedActionCacheStatistics returns the removed IDs of the "action_cache_statistics" edge to the ActionCacheStatistics entity. -func (m *MissDetailMutation) RemovedActionCacheStatisticsIDs() (ids []int) { - for id := range m.removedaction_cache_statistics { - ids = append(ids, id) +// ActionCacheStatisticsID returns the "action_cache_statistics" edge ID in the mutation. +func (m *MissDetailMutation) ActionCacheStatisticsID() (id int, exists bool) { + if m.action_cache_statistics != nil { + return *m.action_cache_statistics, true } return } // ActionCacheStatisticsIDs returns the "action_cache_statistics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionCacheStatisticsID instead. It exists only for internal usage by the builders. func (m *MissDetailMutation) ActionCacheStatisticsIDs() (ids []int) { - for id := range m.action_cache_statistics { - ids = append(ids, id) + if id := m.action_cache_statistics; id != nil { + ids = append(ids, *id) } return } @@ -15836,7 +15061,6 @@ func (m *MissDetailMutation) ActionCacheStatisticsIDs() (ids []int) { func (m *MissDetailMutation) ResetActionCacheStatistics() { m.action_cache_statistics = nil m.clearedaction_cache_statistics = false - m.removedaction_cache_statistics = nil } // Where appends a list predicates to the MissDetailMutation builder. @@ -16031,11 +15255,9 @@ func (m *MissDetailMutation) AddedEdges() []string { func (m *MissDetailMutation) AddedIDs(name string) []ent.Value { switch name { case missdetail.EdgeActionCacheStatistics: - ids := make([]ent.Value, 0, len(m.action_cache_statistics)) - for id := range m.action_cache_statistics { - ids = append(ids, id) + if id := m.action_cache_statistics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -16043,23 +15265,12 @@ func (m *MissDetailMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *MissDetailMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedaction_cache_statistics != nil { - edges = append(edges, missdetail.EdgeActionCacheStatistics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *MissDetailMutation) RemovedIDs(name string) []ent.Value { - switch name { - case missdetail.EdgeActionCacheStatistics: - ids := make([]ent.Value, 0, len(m.removedaction_cache_statistics)) - for id := range m.removedaction_cache_statistics { - ids = append(ids, id) - } - return ids - } return nil } @@ -16086,6 +15297,9 @@ func (m *MissDetailMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *MissDetailMutation) ClearEdge(name string) error { switch name { + case missdetail.EdgeActionCacheStatistics: + m.ClearActionCacheStatistics() + return nil } return fmt.Errorf("unknown MissDetail unique edge %s", name) } @@ -16108,8 +15322,7 @@ type NamedSetOfFilesMutation struct { typ string id *int clearedFields map[string]struct{} - output_group map[int]struct{} - removedoutput_group map[int]struct{} + output_group *int clearedoutput_group bool files map[int]struct{} removedfiles map[int]struct{} @@ -16219,14 +15432,9 @@ func (m *NamedSetOfFilesMutation) IDs(ctx context.Context) ([]int, error) { } } -// AddOutputGroupIDs adds the "output_group" edge to the OutputGroup entity by ids. -func (m *NamedSetOfFilesMutation) AddOutputGroupIDs(ids ...int) { - if m.output_group == nil { - m.output_group = make(map[int]struct{}) - } - for i := range ids { - m.output_group[ids[i]] = struct{}{} - } +// SetOutputGroupID sets the "output_group" edge to the OutputGroup entity by id. +func (m *NamedSetOfFilesMutation) SetOutputGroupID(id int) { + m.output_group = &id } // ClearOutputGroup clears the "output_group" edge to the OutputGroup entity. @@ -16239,29 +15447,20 @@ func (m *NamedSetOfFilesMutation) OutputGroupCleared() bool { return m.clearedoutput_group } -// RemoveOutputGroupIDs removes the "output_group" edge to the OutputGroup entity by IDs. -func (m *NamedSetOfFilesMutation) RemoveOutputGroupIDs(ids ...int) { - if m.removedoutput_group == nil { - m.removedoutput_group = make(map[int]struct{}) - } - for i := range ids { - delete(m.output_group, ids[i]) - m.removedoutput_group[ids[i]] = struct{}{} - } -} - -// RemovedOutputGroup returns the removed IDs of the "output_group" edge to the OutputGroup entity. -func (m *NamedSetOfFilesMutation) RemovedOutputGroupIDs() (ids []int) { - for id := range m.removedoutput_group { - ids = append(ids, id) +// OutputGroupID returns the "output_group" edge ID in the mutation. +func (m *NamedSetOfFilesMutation) OutputGroupID() (id int, exists bool) { + if m.output_group != nil { + return *m.output_group, true } return } // OutputGroupIDs returns the "output_group" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// OutputGroupID instead. It exists only for internal usage by the builders. func (m *NamedSetOfFilesMutation) OutputGroupIDs() (ids []int) { - for id := range m.output_group { - ids = append(ids, id) + if id := m.output_group; id != nil { + ids = append(ids, *id) } return } @@ -16270,7 +15469,6 @@ func (m *NamedSetOfFilesMutation) OutputGroupIDs() (ids []int) { func (m *NamedSetOfFilesMutation) ResetOutputGroup() { m.output_group = nil m.clearedoutput_group = false - m.removedoutput_group = nil } // AddFileIDs adds the "files" edge to the TestFile entity by ids. @@ -16492,11 +15690,9 @@ func (m *NamedSetOfFilesMutation) AddedEdges() []string { func (m *NamedSetOfFilesMutation) AddedIDs(name string) []ent.Value { switch name { case namedsetoffiles.EdgeOutputGroup: - ids := make([]ent.Value, 0, len(m.output_group)) - for id := range m.output_group { - ids = append(ids, id) + if id := m.output_group; id != nil { + return []ent.Value{*id} } - return ids case namedsetoffiles.EdgeFiles: ids := make([]ent.Value, 0, len(m.files)) for id := range m.files { @@ -16514,9 +15710,6 @@ func (m *NamedSetOfFilesMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *NamedSetOfFilesMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedoutput_group != nil { - edges = append(edges, namedsetoffiles.EdgeOutputGroup) - } if m.removedfiles != nil { edges = append(edges, namedsetoffiles.EdgeFiles) } @@ -16527,12 +15720,6 @@ func (m *NamedSetOfFilesMutation) RemovedEdges() []string { // the given name in this mutation. func (m *NamedSetOfFilesMutation) RemovedIDs(name string) []ent.Value { switch name { - case namedsetoffiles.EdgeOutputGroup: - ids := make([]ent.Value, 0, len(m.removedoutput_group)) - for id := range m.removedoutput_group { - ids = append(ids, id) - } - return ids case namedsetoffiles.EdgeFiles: ids := make([]ent.Value, 0, len(m.removedfiles)) for id := range m.removedfiles { @@ -16576,6 +15763,9 @@ func (m *NamedSetOfFilesMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *NamedSetOfFilesMutation) ClearEdge(name string) error { switch name { + case namedsetoffiles.EdgeOutputGroup: + m.ClearOutputGroup() + return nil case namedsetoffiles.EdgeFileSets: m.ClearFileSets() return nil @@ -16607,11 +15797,9 @@ type NetworkMetricsMutation struct { typ string id *int clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool - system_network_stats map[int]struct{} - removedsystem_network_stats map[int]struct{} + system_network_stats *int clearedsystem_network_stats bool done bool oldValue func(context.Context) (*NetworkMetrics, error) @@ -16716,14 +15904,9 @@ func (m *NetworkMetricsMutation) IDs(ctx context.Context) ([]int, error) { } } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *NetworkMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *NetworkMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -16736,29 +15919,20 @@ func (m *NetworkMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *NetworkMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *NetworkMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *NetworkMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *NetworkMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -16767,17 +15941,11 @@ func (m *NetworkMetricsMutation) MetricsIDs() (ids []int) { func (m *NetworkMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } -// AddSystemNetworkStatIDs adds the "system_network_stats" edge to the SystemNetworkStats entity by ids. -func (m *NetworkMetricsMutation) AddSystemNetworkStatIDs(ids ...int) { - if m.system_network_stats == nil { - m.system_network_stats = make(map[int]struct{}) - } - for i := range ids { - m.system_network_stats[ids[i]] = struct{}{} - } +// SetSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by id. +func (m *NetworkMetricsMutation) SetSystemNetworkStatsID(id int) { + m.system_network_stats = &id } // ClearSystemNetworkStats clears the "system_network_stats" edge to the SystemNetworkStats entity. @@ -16790,29 +15958,20 @@ func (m *NetworkMetricsMutation) SystemNetworkStatsCleared() bool { return m.clearedsystem_network_stats } -// RemoveSystemNetworkStatIDs removes the "system_network_stats" edge to the SystemNetworkStats entity by IDs. -func (m *NetworkMetricsMutation) RemoveSystemNetworkStatIDs(ids ...int) { - if m.removedsystem_network_stats == nil { - m.removedsystem_network_stats = make(map[int]struct{}) - } - for i := range ids { - delete(m.system_network_stats, ids[i]) - m.removedsystem_network_stats[ids[i]] = struct{}{} - } -} - -// RemovedSystemNetworkStats returns the removed IDs of the "system_network_stats" edge to the SystemNetworkStats entity. -func (m *NetworkMetricsMutation) RemovedSystemNetworkStatsIDs() (ids []int) { - for id := range m.removedsystem_network_stats { - ids = append(ids, id) +// SystemNetworkStatsID returns the "system_network_stats" edge ID in the mutation. +func (m *NetworkMetricsMutation) SystemNetworkStatsID() (id int, exists bool) { + if m.system_network_stats != nil { + return *m.system_network_stats, true } return } // SystemNetworkStatsIDs returns the "system_network_stats" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// SystemNetworkStatsID instead. It exists only for internal usage by the builders. func (m *NetworkMetricsMutation) SystemNetworkStatsIDs() (ids []int) { - for id := range m.system_network_stats { - ids = append(ids, id) + if id := m.system_network_stats; id != nil { + ids = append(ids, *id) } return } @@ -16821,7 +15980,6 @@ func (m *NetworkMetricsMutation) SystemNetworkStatsIDs() (ids []int) { func (m *NetworkMetricsMutation) ResetSystemNetworkStats() { m.system_network_stats = nil m.clearedsystem_network_stats = false - m.removedsystem_network_stats = nil } // Where appends a list predicates to the NetworkMetricsMutation builder. @@ -16947,17 +16105,13 @@ func (m *NetworkMetricsMutation) AddedEdges() []string { func (m *NetworkMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case networkmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case networkmetrics.EdgeSystemNetworkStats: - ids := make([]ent.Value, 0, len(m.system_network_stats)) - for id := range m.system_network_stats { - ids = append(ids, id) + if id := m.system_network_stats; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -16965,32 +16119,12 @@ func (m *NetworkMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *NetworkMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedmetrics != nil { - edges = append(edges, networkmetrics.EdgeMetrics) - } - if m.removedsystem_network_stats != nil { - edges = append(edges, networkmetrics.EdgeSystemNetworkStats) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *NetworkMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case networkmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - case networkmetrics.EdgeSystemNetworkStats: - ids := make([]ent.Value, 0, len(m.removedsystem_network_stats)) - for id := range m.removedsystem_network_stats { - ids = append(ids, id) - } - return ids - } return nil } @@ -17022,6 +16156,12 @@ func (m *NetworkMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *NetworkMetricsMutation) ClearEdge(name string) error { switch name { + case networkmetrics.EdgeMetrics: + m.ClearMetrics() + return nil + case networkmetrics.EdgeSystemNetworkStats: + m.ClearSystemNetworkStats() + return nil } return fmt.Errorf("unknown NetworkMetrics unique edge %s", name) } @@ -17049,8 +16189,7 @@ type OutputGroupMutation struct { name *string incomplete *bool clearedFields map[string]struct{} - target_complete map[int]struct{} - removedtarget_complete map[int]struct{} + target_complete *int clearedtarget_complete bool inline_files map[int]struct{} removedinline_files map[int]struct{} @@ -17258,14 +16397,9 @@ func (m *OutputGroupMutation) ResetIncomplete() { delete(m.clearedFields, outputgroup.FieldIncomplete) } -// AddTargetCompleteIDs adds the "target_complete" edge to the TargetComplete entity by ids. -func (m *OutputGroupMutation) AddTargetCompleteIDs(ids ...int) { - if m.target_complete == nil { - m.target_complete = make(map[int]struct{}) - } - for i := range ids { - m.target_complete[ids[i]] = struct{}{} - } +// SetTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by id. +func (m *OutputGroupMutation) SetTargetCompleteID(id int) { + m.target_complete = &id } // ClearTargetComplete clears the "target_complete" edge to the TargetComplete entity. @@ -17278,29 +16412,20 @@ func (m *OutputGroupMutation) TargetCompleteCleared() bool { return m.clearedtarget_complete } -// RemoveTargetCompleteIDs removes the "target_complete" edge to the TargetComplete entity by IDs. -func (m *OutputGroupMutation) RemoveTargetCompleteIDs(ids ...int) { - if m.removedtarget_complete == nil { - m.removedtarget_complete = make(map[int]struct{}) - } - for i := range ids { - delete(m.target_complete, ids[i]) - m.removedtarget_complete[ids[i]] = struct{}{} - } -} - -// RemovedTargetComplete returns the removed IDs of the "target_complete" edge to the TargetComplete entity. -func (m *OutputGroupMutation) RemovedTargetCompleteIDs() (ids []int) { - for id := range m.removedtarget_complete { - ids = append(ids, id) +// TargetCompleteID returns the "target_complete" edge ID in the mutation. +func (m *OutputGroupMutation) TargetCompleteID() (id int, exists bool) { + if m.target_complete != nil { + return *m.target_complete, true } return } // TargetCompleteIDs returns the "target_complete" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TargetCompleteID instead. It exists only for internal usage by the builders. func (m *OutputGroupMutation) TargetCompleteIDs() (ids []int) { - for id := range m.target_complete { - ids = append(ids, id) + if id := m.target_complete; id != nil { + ids = append(ids, *id) } return } @@ -17309,7 +16434,6 @@ func (m *OutputGroupMutation) TargetCompleteIDs() (ids []int) { func (m *OutputGroupMutation) ResetTargetComplete() { m.target_complete = nil m.clearedtarget_complete = false - m.removedtarget_complete = nil } // AddInlineFileIDs adds the "inline_files" edge to the TestFile entity by ids. @@ -17588,11 +16712,9 @@ func (m *OutputGroupMutation) AddedEdges() []string { func (m *OutputGroupMutation) AddedIDs(name string) []ent.Value { switch name { case outputgroup.EdgeTargetComplete: - ids := make([]ent.Value, 0, len(m.target_complete)) - for id := range m.target_complete { - ids = append(ids, id) + if id := m.target_complete; id != nil { + return []ent.Value{*id} } - return ids case outputgroup.EdgeInlineFiles: ids := make([]ent.Value, 0, len(m.inline_files)) for id := range m.inline_files { @@ -17610,9 +16732,6 @@ func (m *OutputGroupMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *OutputGroupMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedtarget_complete != nil { - edges = append(edges, outputgroup.EdgeTargetComplete) - } if m.removedinline_files != nil { edges = append(edges, outputgroup.EdgeInlineFiles) } @@ -17623,12 +16742,6 @@ func (m *OutputGroupMutation) RemovedEdges() []string { // the given name in this mutation. func (m *OutputGroupMutation) RemovedIDs(name string) []ent.Value { switch name { - case outputgroup.EdgeTargetComplete: - ids := make([]ent.Value, 0, len(m.removedtarget_complete)) - for id := range m.removedtarget_complete { - ids = append(ids, id) - } - return ids case outputgroup.EdgeInlineFiles: ids := make([]ent.Value, 0, len(m.removedinline_files)) for id := range m.removedinline_files { @@ -17672,6 +16785,9 @@ func (m *OutputGroupMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *OutputGroupMutation) ClearEdge(name string) error { switch name { + case outputgroup.EdgeTargetComplete: + m.ClearTargetComplete() + return nil case outputgroup.EdgeFileSets: m.ClearFileSets() return nil @@ -17714,8 +16830,7 @@ type PackageLoadMetricsMutation struct { package_overhead *uint64 addpackage_overhead *int64 clearedFields map[string]struct{} - package_metrics map[int]struct{} - removedpackage_metrics map[int]struct{} + package_metrics *int clearedpackage_metrics bool done bool oldValue func(context.Context) (*PackageLoadMetrics, error) @@ -18219,14 +17334,9 @@ func (m *PackageLoadMetricsMutation) ResetPackageOverhead() { delete(m.clearedFields, packageloadmetrics.FieldPackageOverhead) } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by ids. -func (m *PackageLoadMetricsMutation) AddPackageMetricIDs(ids ...int) { - if m.package_metrics == nil { - m.package_metrics = make(map[int]struct{}) - } - for i := range ids { - m.package_metrics[ids[i]] = struct{}{} - } +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by id. +func (m *PackageLoadMetricsMutation) SetPackageMetricsID(id int) { + m.package_metrics = &id } // ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. @@ -18239,29 +17349,20 @@ func (m *PackageLoadMetricsMutation) PackageMetricsCleared() bool { return m.clearedpackage_metrics } -// RemovePackageMetricIDs removes the "package_metrics" edge to the PackageMetrics entity by IDs. -func (m *PackageLoadMetricsMutation) RemovePackageMetricIDs(ids ...int) { - if m.removedpackage_metrics == nil { - m.removedpackage_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.package_metrics, ids[i]) - m.removedpackage_metrics[ids[i]] = struct{}{} - } -} - -// RemovedPackageMetrics returns the removed IDs of the "package_metrics" edge to the PackageMetrics entity. -func (m *PackageLoadMetricsMutation) RemovedPackageMetricsIDs() (ids []int) { - for id := range m.removedpackage_metrics { - ids = append(ids, id) +// PackageMetricsID returns the "package_metrics" edge ID in the mutation. +func (m *PackageLoadMetricsMutation) PackageMetricsID() (id int, exists bool) { + if m.package_metrics != nil { + return *m.package_metrics, true } return } // PackageMetricsIDs returns the "package_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// PackageMetricsID instead. It exists only for internal usage by the builders. func (m *PackageLoadMetricsMutation) PackageMetricsIDs() (ids []int) { - for id := range m.package_metrics { - ids = append(ids, id) + if id := m.package_metrics; id != nil { + ids = append(ids, *id) } return } @@ -18270,7 +17371,6 @@ func (m *PackageLoadMetricsMutation) PackageMetricsIDs() (ids []int) { func (m *PackageLoadMetricsMutation) ResetPackageMetrics() { m.package_metrics = nil m.clearedpackage_metrics = false - m.removedpackage_metrics = nil } // Where appends a list predicates to the PackageLoadMetricsMutation builder. @@ -18605,11 +17705,9 @@ func (m *PackageLoadMetricsMutation) AddedEdges() []string { func (m *PackageLoadMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case packageloadmetrics.EdgePackageMetrics: - ids := make([]ent.Value, 0, len(m.package_metrics)) - for id := range m.package_metrics { - ids = append(ids, id) + if id := m.package_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -18617,23 +17715,12 @@ func (m *PackageLoadMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *PackageLoadMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedpackage_metrics != nil { - edges = append(edges, packageloadmetrics.EdgePackageMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *PackageLoadMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case packageloadmetrics.EdgePackageMetrics: - ids := make([]ent.Value, 0, len(m.removedpackage_metrics)) - for id := range m.removedpackage_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -18660,6 +17747,9 @@ func (m *PackageLoadMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *PackageLoadMetricsMutation) ClearEdge(name string) error { switch name { + case packageloadmetrics.EdgePackageMetrics: + m.ClearPackageMetrics() + return nil } return fmt.Errorf("unknown PackageLoadMetrics unique edge %s", name) } @@ -18684,8 +17774,7 @@ type PackageMetricsMutation struct { packages_loaded *int64 addpackages_loaded *int64 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool package_load_metrics map[int]struct{} removedpackage_load_metrics map[int]struct{} @@ -18863,14 +17952,9 @@ func (m *PackageMetricsMutation) ResetPackagesLoaded() { delete(m.clearedFields, packagemetrics.FieldPackagesLoaded) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *PackageMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *PackageMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -18883,29 +17967,20 @@ func (m *PackageMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *PackageMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *PackageMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *PackageMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *PackageMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -18914,7 +17989,6 @@ func (m *PackageMetricsMutation) MetricsIDs() (ids []int) { func (m *PackageMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // AddPackageLoadMetricIDs adds the "package_load_metrics" edge to the PackageLoadMetrics entity by ids. @@ -19143,11 +18217,9 @@ func (m *PackageMetricsMutation) AddedEdges() []string { func (m *PackageMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case packagemetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids case packagemetrics.EdgePackageLoadMetrics: ids := make([]ent.Value, 0, len(m.package_load_metrics)) for id := range m.package_load_metrics { @@ -19161,9 +18233,6 @@ func (m *PackageMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *PackageMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedmetrics != nil { - edges = append(edges, packagemetrics.EdgeMetrics) - } if m.removedpackage_load_metrics != nil { edges = append(edges, packagemetrics.EdgePackageLoadMetrics) } @@ -19174,12 +18243,6 @@ func (m *PackageMetricsMutation) RemovedEdges() []string { // the given name in this mutation. func (m *PackageMetricsMutation) RemovedIDs(name string) []ent.Value { switch name { - case packagemetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids case packagemetrics.EdgePackageLoadMetrics: ids := make([]ent.Value, 0, len(m.removedpackage_load_metrics)) for id := range m.removedpackage_load_metrics { @@ -19218,6 +18281,9 @@ func (m *PackageMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *PackageMetricsMutation) ClearEdge(name string) error { switch name { + case packagemetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown PackageMetrics unique edge %s", name) } @@ -19250,8 +18316,7 @@ type RaceStatisticsMutation struct { renote_wins *int64 addrenote_wins *int64 clearedFields map[string]struct{} - dynamic_execution_metrics map[int]struct{} - removeddynamic_execution_metrics map[int]struct{} + dynamic_execution_metrics *int cleareddynamic_execution_metrics bool done bool oldValue func(context.Context) (*RaceStatistics, error) @@ -19643,14 +18708,9 @@ func (m *RaceStatisticsMutation) ResetRenoteWins() { delete(m.clearedFields, racestatistics.FieldRenoteWins) } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ids. -func (m *RaceStatisticsMutation) AddDynamicExecutionMetricIDs(ids ...int) { - if m.dynamic_execution_metrics == nil { - m.dynamic_execution_metrics = make(map[int]struct{}) - } - for i := range ids { - m.dynamic_execution_metrics[ids[i]] = struct{}{} - } +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by id. +func (m *RaceStatisticsMutation) SetDynamicExecutionMetricsID(id int) { + m.dynamic_execution_metrics = &id } // ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. @@ -19663,29 +18723,20 @@ func (m *RaceStatisticsMutation) DynamicExecutionMetricsCleared() bool { return m.cleareddynamic_execution_metrics } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (m *RaceStatisticsMutation) RemoveDynamicExecutionMetricIDs(ids ...int) { - if m.removeddynamic_execution_metrics == nil { - m.removeddynamic_execution_metrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.dynamic_execution_metrics, ids[i]) - m.removeddynamic_execution_metrics[ids[i]] = struct{}{} - } -} - -// RemovedDynamicExecutionMetrics returns the removed IDs of the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. -func (m *RaceStatisticsMutation) RemovedDynamicExecutionMetricsIDs() (ids []int) { - for id := range m.removeddynamic_execution_metrics { - ids = append(ids, id) +// DynamicExecutionMetricsID returns the "dynamic_execution_metrics" edge ID in the mutation. +func (m *RaceStatisticsMutation) DynamicExecutionMetricsID() (id int, exists bool) { + if m.dynamic_execution_metrics != nil { + return *m.dynamic_execution_metrics, true } return } // DynamicExecutionMetricsIDs returns the "dynamic_execution_metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// DynamicExecutionMetricsID instead. It exists only for internal usage by the builders. func (m *RaceStatisticsMutation) DynamicExecutionMetricsIDs() (ids []int) { - for id := range m.dynamic_execution_metrics { - ids = append(ids, id) + if id := m.dynamic_execution_metrics; id != nil { + ids = append(ids, *id) } return } @@ -19694,7 +18745,6 @@ func (m *RaceStatisticsMutation) DynamicExecutionMetricsIDs() (ids []int) { func (m *RaceStatisticsMutation) ResetDynamicExecutionMetrics() { m.dynamic_execution_metrics = nil m.cleareddynamic_execution_metrics = false - m.removeddynamic_execution_metrics = nil } // Where appends a list predicates to the RaceStatisticsMutation builder. @@ -19970,11 +19020,9 @@ func (m *RaceStatisticsMutation) AddedEdges() []string { func (m *RaceStatisticsMutation) AddedIDs(name string) []ent.Value { switch name { case racestatistics.EdgeDynamicExecutionMetrics: - ids := make([]ent.Value, 0, len(m.dynamic_execution_metrics)) - for id := range m.dynamic_execution_metrics { - ids = append(ids, id) + if id := m.dynamic_execution_metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -19982,23 +19030,12 @@ func (m *RaceStatisticsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *RaceStatisticsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removeddynamic_execution_metrics != nil { - edges = append(edges, racestatistics.EdgeDynamicExecutionMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *RaceStatisticsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case racestatistics.EdgeDynamicExecutionMetrics: - ids := make([]ent.Value, 0, len(m.removeddynamic_execution_metrics)) - for id := range m.removeddynamic_execution_metrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -20025,6 +19062,9 @@ func (m *RaceStatisticsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *RaceStatisticsMutation) ClearEdge(name string) error { switch name { + case racestatistics.EdgeDynamicExecutionMetrics: + m.ClearDynamicExecutionMetrics() + return nil } return fmt.Errorf("unknown RaceStatistics unique edge %s", name) } @@ -20049,8 +19089,7 @@ type ResourceUsageMutation struct { name *string value *string clearedFields map[string]struct{} - execution_info map[int]struct{} - removedexecution_info map[int]struct{} + execution_info *int clearedexecution_info bool done bool oldValue func(context.Context) (*ResourceUsage, error) @@ -20253,14 +19292,9 @@ func (m *ResourceUsageMutation) ResetValue() { delete(m.clearedFields, resourceusage.FieldValue) } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by ids. -func (m *ResourceUsageMutation) AddExecutionInfoIDs(ids ...int) { - if m.execution_info == nil { - m.execution_info = make(map[int]struct{}) - } - for i := range ids { - m.execution_info[ids[i]] = struct{}{} - } +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by id. +func (m *ResourceUsageMutation) SetExecutionInfoID(id int) { + m.execution_info = &id } // ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. @@ -20273,29 +19307,20 @@ func (m *ResourceUsageMutation) ExecutionInfoCleared() bool { return m.clearedexecution_info } -// RemoveExecutionInfoIDs removes the "execution_info" edge to the ExectionInfo entity by IDs. -func (m *ResourceUsageMutation) RemoveExecutionInfoIDs(ids ...int) { - if m.removedexecution_info == nil { - m.removedexecution_info = make(map[int]struct{}) - } - for i := range ids { - delete(m.execution_info, ids[i]) - m.removedexecution_info[ids[i]] = struct{}{} - } -} - -// RemovedExecutionInfo returns the removed IDs of the "execution_info" edge to the ExectionInfo entity. -func (m *ResourceUsageMutation) RemovedExecutionInfoIDs() (ids []int) { - for id := range m.removedexecution_info { - ids = append(ids, id) +// ExecutionInfoID returns the "execution_info" edge ID in the mutation. +func (m *ResourceUsageMutation) ExecutionInfoID() (id int, exists bool) { + if m.execution_info != nil { + return *m.execution_info, true } return } // ExecutionInfoIDs returns the "execution_info" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ExecutionInfoID instead. It exists only for internal usage by the builders. func (m *ResourceUsageMutation) ExecutionInfoIDs() (ids []int) { - for id := range m.execution_info { - ids = append(ids, id) + if id := m.execution_info; id != nil { + ids = append(ids, *id) } return } @@ -20304,7 +19329,6 @@ func (m *ResourceUsageMutation) ExecutionInfoIDs() (ids []int) { func (m *ResourceUsageMutation) ResetExecutionInfo() { m.execution_info = nil m.clearedexecution_info = false - m.removedexecution_info = nil } // Where appends a list predicates to the ResourceUsageMutation builder. @@ -20484,11 +19508,9 @@ func (m *ResourceUsageMutation) AddedEdges() []string { func (m *ResourceUsageMutation) AddedIDs(name string) []ent.Value { switch name { case resourceusage.EdgeExecutionInfo: - ids := make([]ent.Value, 0, len(m.execution_info)) - for id := range m.execution_info { - ids = append(ids, id) + if id := m.execution_info; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -20496,23 +19518,12 @@ func (m *ResourceUsageMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *ResourceUsageMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedexecution_info != nil { - edges = append(edges, resourceusage.EdgeExecutionInfo) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *ResourceUsageMutation) RemovedIDs(name string) []ent.Value { - switch name { - case resourceusage.EdgeExecutionInfo: - ids := make([]ent.Value, 0, len(m.removedexecution_info)) - for id := range m.removedexecution_info { - ids = append(ids, id) - } - return ids - } return nil } @@ -20539,6 +19550,9 @@ func (m *ResourceUsageMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *ResourceUsageMutation) ClearEdge(name string) error { switch name { + case resourceusage.EdgeExecutionInfo: + m.ClearExecutionInfo() + return nil } return fmt.Errorf("unknown ResourceUsage unique edge %s", name) } @@ -20565,8 +19579,7 @@ type RunnerCountMutation struct { actions_executed *int64 addactions_executed *int64 clearedFields map[string]struct{} - action_summary map[int]struct{} - removedaction_summary map[int]struct{} + action_summary *int clearedaction_summary bool done bool oldValue func(context.Context) (*RunnerCount, error) @@ -20839,14 +19852,9 @@ func (m *RunnerCountMutation) ResetActionsExecuted() { delete(m.clearedFields, runnercount.FieldActionsExecuted) } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by ids. -func (m *RunnerCountMutation) AddActionSummaryIDs(ids ...int) { - if m.action_summary == nil { - m.action_summary = make(map[int]struct{}) - } - for i := range ids { - m.action_summary[ids[i]] = struct{}{} - } +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by id. +func (m *RunnerCountMutation) SetActionSummaryID(id int) { + m.action_summary = &id } // ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. @@ -20859,29 +19867,20 @@ func (m *RunnerCountMutation) ActionSummaryCleared() bool { return m.clearedaction_summary } -// RemoveActionSummaryIDs removes the "action_summary" edge to the ActionSummary entity by IDs. -func (m *RunnerCountMutation) RemoveActionSummaryIDs(ids ...int) { - if m.removedaction_summary == nil { - m.removedaction_summary = make(map[int]struct{}) - } - for i := range ids { - delete(m.action_summary, ids[i]) - m.removedaction_summary[ids[i]] = struct{}{} - } -} - -// RemovedActionSummary returns the removed IDs of the "action_summary" edge to the ActionSummary entity. -func (m *RunnerCountMutation) RemovedActionSummaryIDs() (ids []int) { - for id := range m.removedaction_summary { - ids = append(ids, id) +// ActionSummaryID returns the "action_summary" edge ID in the mutation. +func (m *RunnerCountMutation) ActionSummaryID() (id int, exists bool) { + if m.action_summary != nil { + return *m.action_summary, true } return } // ActionSummaryIDs returns the "action_summary" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ActionSummaryID instead. It exists only for internal usage by the builders. func (m *RunnerCountMutation) ActionSummaryIDs() (ids []int) { - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + ids = append(ids, *id) } return } @@ -20890,7 +19889,6 @@ func (m *RunnerCountMutation) ActionSummaryIDs() (ids []int) { func (m *RunnerCountMutation) ResetActionSummary() { m.action_summary = nil m.clearedaction_summary = false - m.removedaction_summary = nil } // Where appends a list predicates to the RunnerCountMutation builder. @@ -21108,11 +20106,9 @@ func (m *RunnerCountMutation) AddedEdges() []string { func (m *RunnerCountMutation) AddedIDs(name string) []ent.Value { switch name { case runnercount.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.action_summary)) - for id := range m.action_summary { - ids = append(ids, id) + if id := m.action_summary; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -21120,23 +20116,12 @@ func (m *RunnerCountMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *RunnerCountMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedaction_summary != nil { - edges = append(edges, runnercount.EdgeActionSummary) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *RunnerCountMutation) RemovedIDs(name string) []ent.Value { - switch name { - case runnercount.EdgeActionSummary: - ids := make([]ent.Value, 0, len(m.removedaction_summary)) - for id := range m.removedaction_summary { - ids = append(ids, id) - } - return ids - } return nil } @@ -21163,6 +20148,9 @@ func (m *RunnerCountMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *RunnerCountMutation) ClearEdge(name string) error { switch name { + case runnercount.EdgeActionSummary: + m.ClearActionSummary() + return nil } return fmt.Errorf("unknown RunnerCount unique edge %s", name) } @@ -22397,8 +21385,7 @@ type TargetCompleteMutation struct { addtest_timeout *int64 test_size *targetcomplete.TestSize clearedFields map[string]struct{} - target_pair map[int]struct{} - removedtarget_pair map[int]struct{} + target_pair *int clearedtarget_pair bool important_output map[int]struct{} removedimportant_output map[int]struct{} @@ -22933,14 +21920,9 @@ func (m *TargetCompleteMutation) ResetTestSize() { delete(m.clearedFields, targetcomplete.FieldTestSize) } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by ids. -func (m *TargetCompleteMutation) AddTargetPairIDs(ids ...int) { - if m.target_pair == nil { - m.target_pair = make(map[int]struct{}) - } - for i := range ids { - m.target_pair[ids[i]] = struct{}{} - } +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by id. +func (m *TargetCompleteMutation) SetTargetPairID(id int) { + m.target_pair = &id } // ClearTargetPair clears the "target_pair" edge to the TargetPair entity. @@ -22953,29 +21935,20 @@ func (m *TargetCompleteMutation) TargetPairCleared() bool { return m.clearedtarget_pair } -// RemoveTargetPairIDs removes the "target_pair" edge to the TargetPair entity by IDs. -func (m *TargetCompleteMutation) RemoveTargetPairIDs(ids ...int) { - if m.removedtarget_pair == nil { - m.removedtarget_pair = make(map[int]struct{}) - } - for i := range ids { - delete(m.target_pair, ids[i]) - m.removedtarget_pair[ids[i]] = struct{}{} - } -} - -// RemovedTargetPair returns the removed IDs of the "target_pair" edge to the TargetPair entity. -func (m *TargetCompleteMutation) RemovedTargetPairIDs() (ids []int) { - for id := range m.removedtarget_pair { - ids = append(ids, id) +// TargetPairID returns the "target_pair" edge ID in the mutation. +func (m *TargetCompleteMutation) TargetPairID() (id int, exists bool) { + if m.target_pair != nil { + return *m.target_pair, true } return } // TargetPairIDs returns the "target_pair" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TargetPairID instead. It exists only for internal usage by the builders. func (m *TargetCompleteMutation) TargetPairIDs() (ids []int) { - for id := range m.target_pair { - ids = append(ids, id) + if id := m.target_pair; id != nil { + ids = append(ids, *id) } return } @@ -22984,7 +21957,6 @@ func (m *TargetCompleteMutation) TargetPairIDs() (ids []int) { func (m *TargetCompleteMutation) ResetTargetPair() { m.target_pair = nil m.clearedtarget_pair = false - m.removedtarget_pair = nil } // AddImportantOutputIDs adds the "important_output" edge to the TestFile entity by ids. @@ -23474,11 +22446,9 @@ func (m *TargetCompleteMutation) AddedEdges() []string { func (m *TargetCompleteMutation) AddedIDs(name string) []ent.Value { switch name { case targetcomplete.EdgeTargetPair: - ids := make([]ent.Value, 0, len(m.target_pair)) - for id := range m.target_pair { - ids = append(ids, id) + if id := m.target_pair; id != nil { + return []ent.Value{*id} } - return ids case targetcomplete.EdgeImportantOutput: ids := make([]ent.Value, 0, len(m.important_output)) for id := range m.important_output { @@ -23502,9 +22472,6 @@ func (m *TargetCompleteMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TargetCompleteMutation) RemovedEdges() []string { edges := make([]string, 0, 4) - if m.removedtarget_pair != nil { - edges = append(edges, targetcomplete.EdgeTargetPair) - } if m.removedimportant_output != nil { edges = append(edges, targetcomplete.EdgeImportantOutput) } @@ -23518,12 +22485,6 @@ func (m *TargetCompleteMutation) RemovedEdges() []string { // the given name in this mutation. func (m *TargetCompleteMutation) RemovedIDs(name string) []ent.Value { switch name { - case targetcomplete.EdgeTargetPair: - ids := make([]ent.Value, 0, len(m.removedtarget_pair)) - for id := range m.removedtarget_pair { - ids = append(ids, id) - } - return ids case targetcomplete.EdgeImportantOutput: ids := make([]ent.Value, 0, len(m.removedimportant_output)) for id := range m.removedimportant_output { @@ -23578,6 +22539,9 @@ func (m *TargetCompleteMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TargetCompleteMutation) ClearEdge(name string) error { switch name { + case targetcomplete.EdgeTargetPair: + m.ClearTargetPair() + return nil case targetcomplete.EdgeOutputGroup: m.ClearOutputGroup() return nil @@ -23618,8 +22582,7 @@ type TargetConfiguredMutation struct { addstart_time_in_ms *int64 test_size *targetconfigured.TestSize clearedFields map[string]struct{} - target_pair map[int]struct{} - removedtarget_pair map[int]struct{} + target_pair *int clearedtarget_pair bool done bool oldValue func(context.Context) (*TargetConfigured, error) @@ -23957,14 +22920,9 @@ func (m *TargetConfiguredMutation) ResetTestSize() { delete(m.clearedFields, targetconfigured.FieldTestSize) } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by ids. -func (m *TargetConfiguredMutation) AddTargetPairIDs(ids ...int) { - if m.target_pair == nil { - m.target_pair = make(map[int]struct{}) - } - for i := range ids { - m.target_pair[ids[i]] = struct{}{} - } +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by id. +func (m *TargetConfiguredMutation) SetTargetPairID(id int) { + m.target_pair = &id } // ClearTargetPair clears the "target_pair" edge to the TargetPair entity. @@ -23977,29 +22935,20 @@ func (m *TargetConfiguredMutation) TargetPairCleared() bool { return m.clearedtarget_pair } -// RemoveTargetPairIDs removes the "target_pair" edge to the TargetPair entity by IDs. -func (m *TargetConfiguredMutation) RemoveTargetPairIDs(ids ...int) { - if m.removedtarget_pair == nil { - m.removedtarget_pair = make(map[int]struct{}) - } - for i := range ids { - delete(m.target_pair, ids[i]) - m.removedtarget_pair[ids[i]] = struct{}{} - } -} - -// RemovedTargetPair returns the removed IDs of the "target_pair" edge to the TargetPair entity. -func (m *TargetConfiguredMutation) RemovedTargetPairIDs() (ids []int) { - for id := range m.removedtarget_pair { - ids = append(ids, id) +// TargetPairID returns the "target_pair" edge ID in the mutation. +func (m *TargetConfiguredMutation) TargetPairID() (id int, exists bool) { + if m.target_pair != nil { + return *m.target_pair, true } return } // TargetPairIDs returns the "target_pair" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TargetPairID instead. It exists only for internal usage by the builders. func (m *TargetConfiguredMutation) TargetPairIDs() (ids []int) { - for id := range m.target_pair { - ids = append(ids, id) + if id := m.target_pair; id != nil { + ids = append(ids, *id) } return } @@ -24008,7 +22957,6 @@ func (m *TargetConfiguredMutation) TargetPairIDs() (ids []int) { func (m *TargetConfiguredMutation) ResetTargetPair() { m.target_pair = nil m.clearedtarget_pair = false - m.removedtarget_pair = nil } // Where appends a list predicates to the TargetConfiguredMutation builder. @@ -24249,11 +23197,9 @@ func (m *TargetConfiguredMutation) AddedEdges() []string { func (m *TargetConfiguredMutation) AddedIDs(name string) []ent.Value { switch name { case targetconfigured.EdgeTargetPair: - ids := make([]ent.Value, 0, len(m.target_pair)) - for id := range m.target_pair { - ids = append(ids, id) + if id := m.target_pair; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -24261,23 +23207,12 @@ func (m *TargetConfiguredMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TargetConfiguredMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedtarget_pair != nil { - edges = append(edges, targetconfigured.EdgeTargetPair) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *TargetConfiguredMutation) RemovedIDs(name string) []ent.Value { - switch name { - case targetconfigured.EdgeTargetPair: - ids := make([]ent.Value, 0, len(m.removedtarget_pair)) - for id := range m.removedtarget_pair { - ids = append(ids, id) - } - return ids - } return nil } @@ -24304,6 +23239,9 @@ func (m *TargetConfiguredMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TargetConfiguredMutation) ClearEdge(name string) error { switch name { + case targetconfigured.EdgeTargetPair: + m.ClearTargetPair() + return nil } return fmt.Errorf("unknown TargetConfigured unique edge %s", name) } @@ -24332,8 +23270,7 @@ type TargetMetricsMutation struct { targets_configured_not_including_aspects *int64 addtargets_configured_not_including_aspects *int64 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool done bool oldValue func(context.Context) (*TargetMetrics, error) @@ -24648,14 +23585,9 @@ func (m *TargetMetricsMutation) ResetTargetsConfiguredNotIncludingAspects() { delete(m.clearedFields, targetmetrics.FieldTargetsConfiguredNotIncludingAspects) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *TargetMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *TargetMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -24668,29 +23600,20 @@ func (m *TargetMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *TargetMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *TargetMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *TargetMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *TargetMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -24699,7 +23622,6 @@ func (m *TargetMetricsMutation) MetricsIDs() (ids []int) { func (m *TargetMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // Where appends a list predicates to the TargetMetricsMutation builder. @@ -24941,11 +23863,9 @@ func (m *TargetMetricsMutation) AddedEdges() []string { func (m *TargetMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case targetmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -24953,23 +23873,12 @@ func (m *TargetMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TargetMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedmetrics != nil { - edges = append(edges, targetmetrics.EdgeMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *TargetMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case targetmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -24996,6 +23905,9 @@ func (m *TargetMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TargetMetricsMutation) ClearEdge(name string) error { switch name { + case targetmetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown TargetMetrics unique edge %s", name) } @@ -25025,8 +23937,7 @@ type TargetPairMutation struct { test_size *targetpair.TestSize abort_reason *targetpair.AbortReason clearedFields map[string]struct{} - bazel_invocation map[int]struct{} - removedbazel_invocation map[int]struct{} + bazel_invocation *int clearedbazel_invocation bool configuration *int clearedconfiguration bool @@ -25450,14 +24361,9 @@ func (m *TargetPairMutation) ResetAbortReason() { delete(m.clearedFields, targetpair.FieldAbortReason) } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by ids. -func (m *TargetPairMutation) AddBazelInvocationIDs(ids ...int) { - if m.bazel_invocation == nil { - m.bazel_invocation = make(map[int]struct{}) - } - for i := range ids { - m.bazel_invocation[ids[i]] = struct{}{} - } +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by id. +func (m *TargetPairMutation) SetBazelInvocationID(id int) { + m.bazel_invocation = &id } // ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. @@ -25470,29 +24376,20 @@ func (m *TargetPairMutation) BazelInvocationCleared() bool { return m.clearedbazel_invocation } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (m *TargetPairMutation) RemoveBazelInvocationIDs(ids ...int) { - if m.removedbazel_invocation == nil { - m.removedbazel_invocation = make(map[int]struct{}) - } - for i := range ids { - delete(m.bazel_invocation, ids[i]) - m.removedbazel_invocation[ids[i]] = struct{}{} - } -} - -// RemovedBazelInvocation returns the removed IDs of the "bazel_invocation" edge to the BazelInvocation entity. -func (m *TargetPairMutation) RemovedBazelInvocationIDs() (ids []int) { - for id := range m.removedbazel_invocation { - ids = append(ids, id) +// BazelInvocationID returns the "bazel_invocation" edge ID in the mutation. +func (m *TargetPairMutation) BazelInvocationID() (id int, exists bool) { + if m.bazel_invocation != nil { + return *m.bazel_invocation, true } return } // BazelInvocationIDs returns the "bazel_invocation" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// BazelInvocationID instead. It exists only for internal usage by the builders. func (m *TargetPairMutation) BazelInvocationIDs() (ids []int) { - for id := range m.bazel_invocation { - ids = append(ids, id) + if id := m.bazel_invocation; id != nil { + ids = append(ids, *id) } return } @@ -25501,7 +24398,6 @@ func (m *TargetPairMutation) BazelInvocationIDs() (ids []int) { func (m *TargetPairMutation) ResetBazelInvocation() { m.bazel_invocation = nil m.clearedbazel_invocation = false - m.removedbazel_invocation = nil } // SetConfigurationID sets the "configuration" edge to the TargetConfigured entity by id. @@ -25872,11 +24768,9 @@ func (m *TargetPairMutation) AddedEdges() []string { func (m *TargetPairMutation) AddedIDs(name string) []ent.Value { switch name { case targetpair.EdgeBazelInvocation: - ids := make([]ent.Value, 0, len(m.bazel_invocation)) - for id := range m.bazel_invocation { - ids = append(ids, id) + if id := m.bazel_invocation; id != nil { + return []ent.Value{*id} } - return ids case targetpair.EdgeConfiguration: if id := m.configuration; id != nil { return []ent.Value{*id} @@ -25892,23 +24786,12 @@ func (m *TargetPairMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TargetPairMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedbazel_invocation != nil { - edges = append(edges, targetpair.EdgeBazelInvocation) - } return edges } - -// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with -// the given name in this mutation. -func (m *TargetPairMutation) RemovedIDs(name string) []ent.Value { - switch name { - case targetpair.EdgeBazelInvocation: - ids := make([]ent.Value, 0, len(m.removedbazel_invocation)) - for id := range m.removedbazel_invocation { - ids = append(ids, id) - } - return ids - } + +// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with +// the given name in this mutation. +func (m *TargetPairMutation) RemovedIDs(name string) []ent.Value { return nil } @@ -25945,6 +24828,9 @@ func (m *TargetPairMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TargetPairMutation) ClearEdge(name string) error { switch name { + case targetpair.EdgeBazelInvocation: + m.ClearBazelInvocation() + return nil case targetpair.EdgeConfiguration: m.ClearConfiguration() return nil @@ -25983,11 +24869,11 @@ type TestCollectionMutation struct { strategy *string cached_locally *bool cached_remotely *bool + first_seen *time.Time duration_ms *int64 addduration_ms *int64 clearedFields map[string]struct{} - bazel_invocation map[int]struct{} - removedbazel_invocation map[int]struct{} + bazel_invocation *int clearedbazel_invocation bool test_summary *int clearedtest_summary bool @@ -26342,6 +25228,55 @@ func (m *TestCollectionMutation) ResetCachedRemotely() { delete(m.clearedFields, testcollection.FieldCachedRemotely) } +// SetFirstSeen sets the "first_seen" field. +func (m *TestCollectionMutation) SetFirstSeen(t time.Time) { + m.first_seen = &t +} + +// FirstSeen returns the value of the "first_seen" field in the mutation. +func (m *TestCollectionMutation) FirstSeen() (r time.Time, exists bool) { + v := m.first_seen + if v == nil { + return + } + return *v, true +} + +// OldFirstSeen returns the old "first_seen" field's value of the TestCollection entity. +// If the TestCollection object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *TestCollectionMutation) OldFirstSeen(ctx context.Context) (v *time.Time, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldFirstSeen is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldFirstSeen requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldFirstSeen: %w", err) + } + return oldValue.FirstSeen, nil +} + +// ClearFirstSeen clears the value of the "first_seen" field. +func (m *TestCollectionMutation) ClearFirstSeen() { + m.first_seen = nil + m.clearedFields[testcollection.FieldFirstSeen] = struct{}{} +} + +// FirstSeenCleared returns if the "first_seen" field was cleared in this mutation. +func (m *TestCollectionMutation) FirstSeenCleared() bool { + _, ok := m.clearedFields[testcollection.FieldFirstSeen] + return ok +} + +// ResetFirstSeen resets all changes to the "first_seen" field. +func (m *TestCollectionMutation) ResetFirstSeen() { + m.first_seen = nil + delete(m.clearedFields, testcollection.FieldFirstSeen) +} + // SetDurationMs sets the "duration_ms" field. func (m *TestCollectionMutation) SetDurationMs(i int64) { m.duration_ms = &i @@ -26412,14 +25347,9 @@ func (m *TestCollectionMutation) ResetDurationMs() { delete(m.clearedFields, testcollection.FieldDurationMs) } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by ids. -func (m *TestCollectionMutation) AddBazelInvocationIDs(ids ...int) { - if m.bazel_invocation == nil { - m.bazel_invocation = make(map[int]struct{}) - } - for i := range ids { - m.bazel_invocation[ids[i]] = struct{}{} - } +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by id. +func (m *TestCollectionMutation) SetBazelInvocationID(id int) { + m.bazel_invocation = &id } // ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. @@ -26432,29 +25362,20 @@ func (m *TestCollectionMutation) BazelInvocationCleared() bool { return m.clearedbazel_invocation } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (m *TestCollectionMutation) RemoveBazelInvocationIDs(ids ...int) { - if m.removedbazel_invocation == nil { - m.removedbazel_invocation = make(map[int]struct{}) - } - for i := range ids { - delete(m.bazel_invocation, ids[i]) - m.removedbazel_invocation[ids[i]] = struct{}{} - } -} - -// RemovedBazelInvocation returns the removed IDs of the "bazel_invocation" edge to the BazelInvocation entity. -func (m *TestCollectionMutation) RemovedBazelInvocationIDs() (ids []int) { - for id := range m.removedbazel_invocation { - ids = append(ids, id) +// BazelInvocationID returns the "bazel_invocation" edge ID in the mutation. +func (m *TestCollectionMutation) BazelInvocationID() (id int, exists bool) { + if m.bazel_invocation != nil { + return *m.bazel_invocation, true } return } // BazelInvocationIDs returns the "bazel_invocation" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// BazelInvocationID instead. It exists only for internal usage by the builders. func (m *TestCollectionMutation) BazelInvocationIDs() (ids []int) { - for id := range m.bazel_invocation { - ids = append(ids, id) + if id := m.bazel_invocation; id != nil { + ids = append(ids, *id) } return } @@ -26463,7 +25384,6 @@ func (m *TestCollectionMutation) BazelInvocationIDs() (ids []int) { func (m *TestCollectionMutation) ResetBazelInvocation() { m.bazel_invocation = nil m.clearedbazel_invocation = false - m.removedbazel_invocation = nil } // SetTestSummaryID sets the "test_summary" edge to the TestSummary entity by id. @@ -26593,7 +25513,7 @@ func (m *TestCollectionMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *TestCollectionMutation) Fields() []string { - fields := make([]string, 0, 6) + fields := make([]string, 0, 7) if m.label != nil { fields = append(fields, testcollection.FieldLabel) } @@ -26609,6 +25529,9 @@ func (m *TestCollectionMutation) Fields() []string { if m.cached_remotely != nil { fields = append(fields, testcollection.FieldCachedRemotely) } + if m.first_seen != nil { + fields = append(fields, testcollection.FieldFirstSeen) + } if m.duration_ms != nil { fields = append(fields, testcollection.FieldDurationMs) } @@ -26630,6 +25553,8 @@ func (m *TestCollectionMutation) Field(name string) (ent.Value, bool) { return m.CachedLocally() case testcollection.FieldCachedRemotely: return m.CachedRemotely() + case testcollection.FieldFirstSeen: + return m.FirstSeen() case testcollection.FieldDurationMs: return m.DurationMs() } @@ -26651,6 +25576,8 @@ func (m *TestCollectionMutation) OldField(ctx context.Context, name string) (ent return m.OldCachedLocally(ctx) case testcollection.FieldCachedRemotely: return m.OldCachedRemotely(ctx) + case testcollection.FieldFirstSeen: + return m.OldFirstSeen(ctx) case testcollection.FieldDurationMs: return m.OldDurationMs(ctx) } @@ -26697,6 +25624,13 @@ func (m *TestCollectionMutation) SetField(name string, value ent.Value) error { } m.SetCachedRemotely(v) return nil + case testcollection.FieldFirstSeen: + v, ok := value.(time.Time) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetFirstSeen(v) + return nil case testcollection.FieldDurationMs: v, ok := value.(int64) if !ok { @@ -26764,6 +25698,9 @@ func (m *TestCollectionMutation) ClearedFields() []string { if m.FieldCleared(testcollection.FieldCachedRemotely) { fields = append(fields, testcollection.FieldCachedRemotely) } + if m.FieldCleared(testcollection.FieldFirstSeen) { + fields = append(fields, testcollection.FieldFirstSeen) + } if m.FieldCleared(testcollection.FieldDurationMs) { fields = append(fields, testcollection.FieldDurationMs) } @@ -26796,6 +25733,9 @@ func (m *TestCollectionMutation) ClearField(name string) error { case testcollection.FieldCachedRemotely: m.ClearCachedRemotely() return nil + case testcollection.FieldFirstSeen: + m.ClearFirstSeen() + return nil case testcollection.FieldDurationMs: m.ClearDurationMs() return nil @@ -26822,6 +25762,9 @@ func (m *TestCollectionMutation) ResetField(name string) error { case testcollection.FieldCachedRemotely: m.ResetCachedRemotely() return nil + case testcollection.FieldFirstSeen: + m.ResetFirstSeen() + return nil case testcollection.FieldDurationMs: m.ResetDurationMs() return nil @@ -26849,11 +25792,9 @@ func (m *TestCollectionMutation) AddedEdges() []string { func (m *TestCollectionMutation) AddedIDs(name string) []ent.Value { switch name { case testcollection.EdgeBazelInvocation: - ids := make([]ent.Value, 0, len(m.bazel_invocation)) - for id := range m.bazel_invocation { - ids = append(ids, id) + if id := m.bazel_invocation; id != nil { + return []ent.Value{*id} } - return ids case testcollection.EdgeTestSummary: if id := m.test_summary; id != nil { return []ent.Value{*id} @@ -26871,9 +25812,6 @@ func (m *TestCollectionMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TestCollectionMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedbazel_invocation != nil { - edges = append(edges, testcollection.EdgeBazelInvocation) - } if m.removedtest_results != nil { edges = append(edges, testcollection.EdgeTestResults) } @@ -26884,12 +25822,6 @@ func (m *TestCollectionMutation) RemovedEdges() []string { // the given name in this mutation. func (m *TestCollectionMutation) RemovedIDs(name string) []ent.Value { switch name { - case testcollection.EdgeBazelInvocation: - ids := make([]ent.Value, 0, len(m.removedbazel_invocation)) - for id := range m.removedbazel_invocation { - ids = append(ids, id) - } - return ids case testcollection.EdgeTestResults: ids := make([]ent.Value, 0, len(m.removedtest_results)) for id := range m.removedtest_results { @@ -26933,6 +25865,9 @@ func (m *TestCollectionMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TestCollectionMutation) ClearEdge(name string) error { switch name { + case testcollection.EdgeBazelInvocation: + m.ClearBazelInvocation() + return nil case testcollection.EdgeTestSummary: m.ClearTestSummary() return nil @@ -26971,8 +25906,7 @@ type TestFileMutation struct { prefix *[]string appendprefix []string clearedFields map[string]struct{} - test_result map[int]struct{} - removedtest_result map[int]struct{} + test_result *int clearedtest_result bool done bool oldValue func(context.Context) (*TestFile, error) @@ -27359,14 +26293,9 @@ func (m *TestFileMutation) ResetPrefix() { delete(m.clearedFields, testfile.FieldPrefix) } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by ids. -func (m *TestFileMutation) AddTestResultIDs(ids ...int) { - if m.test_result == nil { - m.test_result = make(map[int]struct{}) - } - for i := range ids { - m.test_result[ids[i]] = struct{}{} - } +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by id. +func (m *TestFileMutation) SetTestResultID(id int) { + m.test_result = &id } // ClearTestResult clears the "test_result" edge to the TestResultBES entity. @@ -27379,29 +26308,20 @@ func (m *TestFileMutation) TestResultCleared() bool { return m.clearedtest_result } -// RemoveTestResultIDs removes the "test_result" edge to the TestResultBES entity by IDs. -func (m *TestFileMutation) RemoveTestResultIDs(ids ...int) { - if m.removedtest_result == nil { - m.removedtest_result = make(map[int]struct{}) - } - for i := range ids { - delete(m.test_result, ids[i]) - m.removedtest_result[ids[i]] = struct{}{} - } -} - -// RemovedTestResult returns the removed IDs of the "test_result" edge to the TestResultBES entity. -func (m *TestFileMutation) RemovedTestResultIDs() (ids []int) { - for id := range m.removedtest_result { - ids = append(ids, id) +// TestResultID returns the "test_result" edge ID in the mutation. +func (m *TestFileMutation) TestResultID() (id int, exists bool) { + if m.test_result != nil { + return *m.test_result, true } return } // TestResultIDs returns the "test_result" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TestResultID instead. It exists only for internal usage by the builders. func (m *TestFileMutation) TestResultIDs() (ids []int) { - for id := range m.test_result { - ids = append(ids, id) + if id := m.test_result; id != nil { + ids = append(ids, *id) } return } @@ -27410,7 +26330,6 @@ func (m *TestFileMutation) TestResultIDs() (ids []int) { func (m *TestFileMutation) ResetTestResult() { m.test_result = nil m.clearedtest_result = false - m.removedtest_result = nil } // Where appends a list predicates to the TestFileMutation builder. @@ -27674,11 +26593,9 @@ func (m *TestFileMutation) AddedEdges() []string { func (m *TestFileMutation) AddedIDs(name string) []ent.Value { switch name { case testfile.EdgeTestResult: - ids := make([]ent.Value, 0, len(m.test_result)) - for id := range m.test_result { - ids = append(ids, id) + if id := m.test_result; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -27686,23 +26603,12 @@ func (m *TestFileMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TestFileMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedtest_result != nil { - edges = append(edges, testfile.EdgeTestResult) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *TestFileMutation) RemovedIDs(name string) []ent.Value { - switch name { - case testfile.EdgeTestResult: - ids := make([]ent.Value, 0, len(m.removedtest_result)) - for id := range m.removedtest_result { - ids = append(ids, id) - } - return ids - } return nil } @@ -27729,6 +26635,9 @@ func (m *TestFileMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TestFileMutation) ClearEdge(name string) error { switch name { + case testfile.EdgeTestResult: + m.ClearTestResult() + return nil } return fmt.Errorf("unknown TestFile unique edge %s", name) } @@ -29034,8 +27943,7 @@ type TestSummaryMutation struct { addtotal_run_duration *int64 label *string clearedFields map[string]struct{} - test_collection map[int]struct{} - removedtest_collection map[int]struct{} + test_collection *int clearedtest_collection bool passed map[int]struct{} removedpassed map[int]struct{} @@ -29804,14 +28712,9 @@ func (m *TestSummaryMutation) ResetLabel() { delete(m.clearedFields, testsummary.FieldLabel) } -// AddTestCollectionIDs adds the "test_collection" edge to the TestCollection entity by ids. -func (m *TestSummaryMutation) AddTestCollectionIDs(ids ...int) { - if m.test_collection == nil { - m.test_collection = make(map[int]struct{}) - } - for i := range ids { - m.test_collection[ids[i]] = struct{}{} - } +// SetTestCollectionID sets the "test_collection" edge to the TestCollection entity by id. +func (m *TestSummaryMutation) SetTestCollectionID(id int) { + m.test_collection = &id } // ClearTestCollection clears the "test_collection" edge to the TestCollection entity. @@ -29824,29 +28727,20 @@ func (m *TestSummaryMutation) TestCollectionCleared() bool { return m.clearedtest_collection } -// RemoveTestCollectionIDs removes the "test_collection" edge to the TestCollection entity by IDs. -func (m *TestSummaryMutation) RemoveTestCollectionIDs(ids ...int) { - if m.removedtest_collection == nil { - m.removedtest_collection = make(map[int]struct{}) - } - for i := range ids { - delete(m.test_collection, ids[i]) - m.removedtest_collection[ids[i]] = struct{}{} - } -} - -// RemovedTestCollection returns the removed IDs of the "test_collection" edge to the TestCollection entity. -func (m *TestSummaryMutation) RemovedTestCollectionIDs() (ids []int) { - for id := range m.removedtest_collection { - ids = append(ids, id) +// TestCollectionID returns the "test_collection" edge ID in the mutation. +func (m *TestSummaryMutation) TestCollectionID() (id int, exists bool) { + if m.test_collection != nil { + return *m.test_collection, true } return } // TestCollectionIDs returns the "test_collection" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TestCollectionID instead. It exists only for internal usage by the builders. func (m *TestSummaryMutation) TestCollectionIDs() (ids []int) { - for id := range m.test_collection { - ids = append(ids, id) + if id := m.test_collection; id != nil { + ids = append(ids, *id) } return } @@ -29855,7 +28749,6 @@ func (m *TestSummaryMutation) TestCollectionIDs() (ids []int) { func (m *TestSummaryMutation) ResetTestCollection() { m.test_collection = nil m.clearedtest_collection = false - m.removedtest_collection = nil } // AddPassedIDs adds the "passed" edge to the TestFile entity by ids. @@ -30432,11 +29325,9 @@ func (m *TestSummaryMutation) AddedEdges() []string { func (m *TestSummaryMutation) AddedIDs(name string) []ent.Value { switch name { case testsummary.EdgeTestCollection: - ids := make([]ent.Value, 0, len(m.test_collection)) - for id := range m.test_collection { - ids = append(ids, id) + if id := m.test_collection; id != nil { + return []ent.Value{*id} } - return ids case testsummary.EdgePassed: ids := make([]ent.Value, 0, len(m.passed)) for id := range m.passed { @@ -30456,9 +29347,6 @@ func (m *TestSummaryMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TestSummaryMutation) RemovedEdges() []string { edges := make([]string, 0, 3) - if m.removedtest_collection != nil { - edges = append(edges, testsummary.EdgeTestCollection) - } if m.removedpassed != nil { edges = append(edges, testsummary.EdgePassed) } @@ -30472,12 +29360,6 @@ func (m *TestSummaryMutation) RemovedEdges() []string { // the given name in this mutation. func (m *TestSummaryMutation) RemovedIDs(name string) []ent.Value { switch name { - case testsummary.EdgeTestCollection: - ids := make([]ent.Value, 0, len(m.removedtest_collection)) - for id := range m.removedtest_collection { - ids = append(ids, id) - } - return ids case testsummary.EdgePassed: ids := make([]ent.Value, 0, len(m.removedpassed)) for id := range m.removedpassed { @@ -30527,6 +29409,9 @@ func (m *TestSummaryMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TestSummaryMutation) ClearEdge(name string) error { switch name { + case testsummary.EdgeTestCollection: + m.ClearTestCollection() + return nil } return fmt.Errorf("unknown TestSummary unique edge %s", name) } @@ -30557,8 +29442,7 @@ type TimingBreakdownMutation struct { name *string time *string clearedFields map[string]struct{} - execution_info map[int]struct{} - removedexecution_info map[int]struct{} + execution_info *int clearedexecution_info bool child map[int]struct{} removedchild map[int]struct{} @@ -30764,14 +29648,9 @@ func (m *TimingBreakdownMutation) ResetTime() { delete(m.clearedFields, timingbreakdown.FieldTime) } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by ids. -func (m *TimingBreakdownMutation) AddExecutionInfoIDs(ids ...int) { - if m.execution_info == nil { - m.execution_info = make(map[int]struct{}) - } - for i := range ids { - m.execution_info[ids[i]] = struct{}{} - } +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by id. +func (m *TimingBreakdownMutation) SetExecutionInfoID(id int) { + m.execution_info = &id } // ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. @@ -30784,29 +29663,20 @@ func (m *TimingBreakdownMutation) ExecutionInfoCleared() bool { return m.clearedexecution_info } -// RemoveExecutionInfoIDs removes the "execution_info" edge to the ExectionInfo entity by IDs. -func (m *TimingBreakdownMutation) RemoveExecutionInfoIDs(ids ...int) { - if m.removedexecution_info == nil { - m.removedexecution_info = make(map[int]struct{}) - } - for i := range ids { - delete(m.execution_info, ids[i]) - m.removedexecution_info[ids[i]] = struct{}{} - } -} - -// RemovedExecutionInfo returns the removed IDs of the "execution_info" edge to the ExectionInfo entity. -func (m *TimingBreakdownMutation) RemovedExecutionInfoIDs() (ids []int) { - for id := range m.removedexecution_info { - ids = append(ids, id) +// ExecutionInfoID returns the "execution_info" edge ID in the mutation. +func (m *TimingBreakdownMutation) ExecutionInfoID() (id int, exists bool) { + if m.execution_info != nil { + return *m.execution_info, true } return } // ExecutionInfoIDs returns the "execution_info" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// ExecutionInfoID instead. It exists only for internal usage by the builders. func (m *TimingBreakdownMutation) ExecutionInfoIDs() (ids []int) { - for id := range m.execution_info { - ids = append(ids, id) + if id := m.execution_info; id != nil { + ids = append(ids, *id) } return } @@ -30815,7 +29685,6 @@ func (m *TimingBreakdownMutation) ExecutionInfoIDs() (ids []int) { func (m *TimingBreakdownMutation) ResetExecutionInfo() { m.execution_info = nil m.clearedexecution_info = false - m.removedexecution_info = nil } // AddChildIDs adds the "child" edge to the TimingChild entity by ids. @@ -31052,11 +29921,9 @@ func (m *TimingBreakdownMutation) AddedEdges() []string { func (m *TimingBreakdownMutation) AddedIDs(name string) []ent.Value { switch name { case timingbreakdown.EdgeExecutionInfo: - ids := make([]ent.Value, 0, len(m.execution_info)) - for id := range m.execution_info { - ids = append(ids, id) + if id := m.execution_info; id != nil { + return []ent.Value{*id} } - return ids case timingbreakdown.EdgeChild: ids := make([]ent.Value, 0, len(m.child)) for id := range m.child { @@ -31070,9 +29937,6 @@ func (m *TimingBreakdownMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TimingBreakdownMutation) RemovedEdges() []string { edges := make([]string, 0, 2) - if m.removedexecution_info != nil { - edges = append(edges, timingbreakdown.EdgeExecutionInfo) - } if m.removedchild != nil { edges = append(edges, timingbreakdown.EdgeChild) } @@ -31083,12 +29947,6 @@ func (m *TimingBreakdownMutation) RemovedEdges() []string { // the given name in this mutation. func (m *TimingBreakdownMutation) RemovedIDs(name string) []ent.Value { switch name { - case timingbreakdown.EdgeExecutionInfo: - ids := make([]ent.Value, 0, len(m.removedexecution_info)) - for id := range m.removedexecution_info { - ids = append(ids, id) - } - return ids case timingbreakdown.EdgeChild: ids := make([]ent.Value, 0, len(m.removedchild)) for id := range m.removedchild { @@ -31127,6 +29985,9 @@ func (m *TimingBreakdownMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TimingBreakdownMutation) ClearEdge(name string) error { switch name { + case timingbreakdown.EdgeExecutionInfo: + m.ClearExecutionInfo() + return nil } return fmt.Errorf("unknown TimingBreakdown unique edge %s", name) } @@ -31154,8 +30015,7 @@ type TimingChildMutation struct { name *string time *string clearedFields map[string]struct{} - timing_breakdown map[int]struct{} - removedtiming_breakdown map[int]struct{} + timing_breakdown *int clearedtiming_breakdown bool done bool oldValue func(context.Context) (*TimingChild, error) @@ -31358,14 +30218,9 @@ func (m *TimingChildMutation) ResetTime() { delete(m.clearedFields, timingchild.FieldTime) } -// AddTimingBreakdownIDs adds the "timing_breakdown" edge to the TimingBreakdown entity by ids. -func (m *TimingChildMutation) AddTimingBreakdownIDs(ids ...int) { - if m.timing_breakdown == nil { - m.timing_breakdown = make(map[int]struct{}) - } - for i := range ids { - m.timing_breakdown[ids[i]] = struct{}{} - } +// SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by id. +func (m *TimingChildMutation) SetTimingBreakdownID(id int) { + m.timing_breakdown = &id } // ClearTimingBreakdown clears the "timing_breakdown" edge to the TimingBreakdown entity. @@ -31378,29 +30233,20 @@ func (m *TimingChildMutation) TimingBreakdownCleared() bool { return m.clearedtiming_breakdown } -// RemoveTimingBreakdownIDs removes the "timing_breakdown" edge to the TimingBreakdown entity by IDs. -func (m *TimingChildMutation) RemoveTimingBreakdownIDs(ids ...int) { - if m.removedtiming_breakdown == nil { - m.removedtiming_breakdown = make(map[int]struct{}) - } - for i := range ids { - delete(m.timing_breakdown, ids[i]) - m.removedtiming_breakdown[ids[i]] = struct{}{} - } -} - -// RemovedTimingBreakdown returns the removed IDs of the "timing_breakdown" edge to the TimingBreakdown entity. -func (m *TimingChildMutation) RemovedTimingBreakdownIDs() (ids []int) { - for id := range m.removedtiming_breakdown { - ids = append(ids, id) +// TimingBreakdownID returns the "timing_breakdown" edge ID in the mutation. +func (m *TimingChildMutation) TimingBreakdownID() (id int, exists bool) { + if m.timing_breakdown != nil { + return *m.timing_breakdown, true } return } // TimingBreakdownIDs returns the "timing_breakdown" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// TimingBreakdownID instead. It exists only for internal usage by the builders. func (m *TimingChildMutation) TimingBreakdownIDs() (ids []int) { - for id := range m.timing_breakdown { - ids = append(ids, id) + if id := m.timing_breakdown; id != nil { + ids = append(ids, *id) } return } @@ -31409,7 +30255,6 @@ func (m *TimingChildMutation) TimingBreakdownIDs() (ids []int) { func (m *TimingChildMutation) ResetTimingBreakdown() { m.timing_breakdown = nil m.clearedtiming_breakdown = false - m.removedtiming_breakdown = nil } // Where appends a list predicates to the TimingChildMutation builder. @@ -31589,11 +30434,9 @@ func (m *TimingChildMutation) AddedEdges() []string { func (m *TimingChildMutation) AddedIDs(name string) []ent.Value { switch name { case timingchild.EdgeTimingBreakdown: - ids := make([]ent.Value, 0, len(m.timing_breakdown)) - for id := range m.timing_breakdown { - ids = append(ids, id) + if id := m.timing_breakdown; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -31601,23 +30444,12 @@ func (m *TimingChildMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TimingChildMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedtiming_breakdown != nil { - edges = append(edges, timingchild.EdgeTimingBreakdown) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *TimingChildMutation) RemovedIDs(name string) []ent.Value { - switch name { - case timingchild.EdgeTimingBreakdown: - ids := make([]ent.Value, 0, len(m.removedtiming_breakdown)) - for id := range m.removedtiming_breakdown { - ids = append(ids, id) - } - return ids - } return nil } @@ -31644,6 +30476,9 @@ func (m *TimingChildMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TimingChildMutation) ClearEdge(name string) error { switch name { + case timingchild.EdgeTimingBreakdown: + m.ClearTimingBreakdown() + return nil } return fmt.Errorf("unknown TimingChild unique edge %s", name) } @@ -31676,8 +30511,7 @@ type TimingMetricsMutation struct { actions_execution_start_in_ms *int64 addactions_execution_start_in_ms *int64 clearedFields map[string]struct{} - metrics map[int]struct{} - removedmetrics map[int]struct{} + metrics *int clearedmetrics bool done bool oldValue func(context.Context) (*TimingMetrics, error) @@ -32132,14 +30966,9 @@ func (m *TimingMetricsMutation) ResetActionsExecutionStartInMs() { delete(m.clearedFields, timingmetrics.FieldActionsExecutionStartInMs) } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by ids. -func (m *TimingMetricsMutation) AddMetricIDs(ids ...int) { - if m.metrics == nil { - m.metrics = make(map[int]struct{}) - } - for i := range ids { - m.metrics[ids[i]] = struct{}{} - } +// SetMetricsID sets the "metrics" edge to the Metrics entity by id. +func (m *TimingMetricsMutation) SetMetricsID(id int) { + m.metrics = &id } // ClearMetrics clears the "metrics" edge to the Metrics entity. @@ -32152,29 +30981,20 @@ func (m *TimingMetricsMutation) MetricsCleared() bool { return m.clearedmetrics } -// RemoveMetricIDs removes the "metrics" edge to the Metrics entity by IDs. -func (m *TimingMetricsMutation) RemoveMetricIDs(ids ...int) { - if m.removedmetrics == nil { - m.removedmetrics = make(map[int]struct{}) - } - for i := range ids { - delete(m.metrics, ids[i]) - m.removedmetrics[ids[i]] = struct{}{} - } -} - -// RemovedMetrics returns the removed IDs of the "metrics" edge to the Metrics entity. -func (m *TimingMetricsMutation) RemovedMetricsIDs() (ids []int) { - for id := range m.removedmetrics { - ids = append(ids, id) +// MetricsID returns the "metrics" edge ID in the mutation. +func (m *TimingMetricsMutation) MetricsID() (id int, exists bool) { + if m.metrics != nil { + return *m.metrics, true } return } // MetricsIDs returns the "metrics" edge IDs in the mutation. +// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use +// MetricsID instead. It exists only for internal usage by the builders. func (m *TimingMetricsMutation) MetricsIDs() (ids []int) { - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + ids = append(ids, *id) } return } @@ -32183,7 +31003,6 @@ func (m *TimingMetricsMutation) MetricsIDs() (ids []int) { func (m *TimingMetricsMutation) ResetMetrics() { m.metrics = nil m.clearedmetrics = false - m.removedmetrics = nil } // Where appends a list predicates to the TimingMetricsMutation builder. @@ -32495,11 +31314,9 @@ func (m *TimingMetricsMutation) AddedEdges() []string { func (m *TimingMetricsMutation) AddedIDs(name string) []ent.Value { switch name { case timingmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.metrics)) - for id := range m.metrics { - ids = append(ids, id) + if id := m.metrics; id != nil { + return []ent.Value{*id} } - return ids } return nil } @@ -32507,23 +31324,12 @@ func (m *TimingMetricsMutation) AddedIDs(name string) []ent.Value { // RemovedEdges returns all edge names that were removed in this mutation. func (m *TimingMetricsMutation) RemovedEdges() []string { edges := make([]string, 0, 1) - if m.removedmetrics != nil { - edges = append(edges, timingmetrics.EdgeMetrics) - } return edges } // RemovedIDs returns all IDs (to other nodes) that were removed for the edge with // the given name in this mutation. func (m *TimingMetricsMutation) RemovedIDs(name string) []ent.Value { - switch name { - case timingmetrics.EdgeMetrics: - ids := make([]ent.Value, 0, len(m.removedmetrics)) - for id := range m.removedmetrics { - ids = append(ids, id) - } - return ids - } return nil } @@ -32550,6 +31356,9 @@ func (m *TimingMetricsMutation) EdgeCleared(name string) bool { // if that edge is not defined in the schema. func (m *TimingMetricsMutation) ClearEdge(name string) error { switch name { + case timingmetrics.EdgeMetrics: + m.ClearMetrics() + return nil } return fmt.Errorf("unknown TimingMetrics unique edge %s", name) } diff --git a/ent/gen/ent/namedsetoffiles.go b/ent/gen/ent/namedsetoffiles.go index adbfbcb..63ae28b 100644 --- a/ent/gen/ent/namedsetoffiles.go +++ b/ent/gen/ent/namedsetoffiles.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/namedsetoffiles" + "github.com/buildbarn/bb-portal/ent/gen/ent/outputgroup" ) // NamedSetOfFiles is the model entity for the NamedSetOfFiles schema. @@ -20,13 +21,14 @@ type NamedSetOfFiles struct { // The values are being populated by the NamedSetOfFilesQuery when eager-loading is set. Edges NamedSetOfFilesEdges `json:"edges"` named_set_of_files_file_sets *int + output_group_file_sets *int selectValues sql.SelectValues } // NamedSetOfFilesEdges holds the relations/edges for other nodes in the graph. type NamedSetOfFilesEdges struct { // OutputGroup holds the value of the output_group edge. - OutputGroup []*OutputGroup `json:"output_group,omitempty"` + OutputGroup *OutputGroup `json:"output_group,omitempty"` // Files holds the value of the files edge. Files []*TestFile `json:"files,omitempty"` // FileSets holds the value of the file_sets edge. @@ -37,15 +39,16 @@ type NamedSetOfFilesEdges struct { // totalCount holds the count of the edges above. totalCount [3]map[string]int - namedOutputGroup map[string][]*OutputGroup - namedFiles map[string][]*TestFile + namedFiles map[string][]*TestFile } // OutputGroupOrErr returns the OutputGroup value or an error if the edge -// was not loaded in eager-loading. -func (e NamedSetOfFilesEdges) OutputGroupOrErr() ([]*OutputGroup, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e NamedSetOfFilesEdges) OutputGroupOrErr() (*OutputGroup, error) { + if e.OutputGroup != nil { return e.OutputGroup, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: outputgroup.Label} } return nil, &NotLoadedError{edge: "output_group"} } @@ -79,6 +82,8 @@ func (*NamedSetOfFiles) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case namedsetoffiles.ForeignKeys[0]: // named_set_of_files_file_sets values[i] = new(sql.NullInt64) + case namedsetoffiles.ForeignKeys[1]: // output_group_file_sets + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -107,6 +112,13 @@ func (nsof *NamedSetOfFiles) assignValues(columns []string, values []any) error nsof.named_set_of_files_file_sets = new(int) *nsof.named_set_of_files_file_sets = int(value.Int64) } + case namedsetoffiles.ForeignKeys[1]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field output_group_file_sets", value) + } else if value.Valid { + nsof.output_group_file_sets = new(int) + *nsof.output_group_file_sets = int(value.Int64) + } default: nsof.selectValues.Set(columns[i], values[i]) } @@ -162,30 +174,6 @@ func (nsof *NamedSetOfFiles) String() string { return builder.String() } -// NamedOutputGroup returns the OutputGroup named value or an error if the edge was not -// loaded in eager-loading with this name. -func (nsof *NamedSetOfFiles) NamedOutputGroup(name string) ([]*OutputGroup, error) { - if nsof.Edges.namedOutputGroup == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := nsof.Edges.namedOutputGroup[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (nsof *NamedSetOfFiles) appendNamedOutputGroup(name string, edges ...*OutputGroup) { - if nsof.Edges.namedOutputGroup == nil { - nsof.Edges.namedOutputGroup = make(map[string][]*OutputGroup) - } - if len(edges) == 0 { - nsof.Edges.namedOutputGroup[name] = []*OutputGroup{} - } else { - nsof.Edges.namedOutputGroup[name] = append(nsof.Edges.namedOutputGroup[name], edges...) - } -} - // NamedFiles returns the Files named value or an error if the edge was not // loaded in eager-loading with this name. func (nsof *NamedSetOfFiles) NamedFiles(name string) ([]*TestFile, error) { diff --git a/ent/gen/ent/namedsetoffiles/namedsetoffiles.go b/ent/gen/ent/namedsetoffiles/namedsetoffiles.go index 2f668ee..f162380 100644 --- a/ent/gen/ent/namedsetoffiles/namedsetoffiles.go +++ b/ent/gen/ent/namedsetoffiles/namedsetoffiles.go @@ -21,7 +21,7 @@ const ( // Table holds the table name of the namedsetoffiles in the database. Table = "named_set_of_files" // OutputGroupTable is the table that holds the output_group relation/edge. - OutputGroupTable = "output_groups" + OutputGroupTable = "named_set_of_files" // OutputGroupInverseTable is the table name for the OutputGroup entity. // It exists in this package in order to avoid circular dependency with the "outputgroup" package. OutputGroupInverseTable = "output_groups" @@ -49,6 +49,7 @@ var Columns = []string{ // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ "named_set_of_files_file_sets", + "output_group_file_sets", } // ValidColumn reports if the column name is valid (part of the table columns). @@ -74,17 +75,10 @@ func ByID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldID, opts...).ToFunc() } -// ByOutputGroupCount orders the results by output_group count. -func ByOutputGroupCount(opts ...sql.OrderTermOption) OrderOption { +// ByOutputGroupField orders the results by output_group field. +func ByOutputGroupField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newOutputGroupStep(), opts...) - } -} - -// ByOutputGroup orders the results by output_group terms. -func ByOutputGroup(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newOutputGroupStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newOutputGroupStep(), sql.OrderByField(field, opts...)) } } @@ -112,7 +106,7 @@ func newOutputGroupStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(OutputGroupInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, OutputGroupTable, OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, true, OutputGroupTable, OutputGroupColumn), ) } func newFilesStep() *sqlgraph.Step { diff --git a/ent/gen/ent/namedsetoffiles/where.go b/ent/gen/ent/namedsetoffiles/where.go index c83617c..deea37c 100644 --- a/ent/gen/ent/namedsetoffiles/where.go +++ b/ent/gen/ent/namedsetoffiles/where.go @@ -58,7 +58,7 @@ func HasOutputGroup() predicate.NamedSetOfFiles { return predicate.NamedSetOfFiles(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, OutputGroupTable, OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, true, OutputGroupTable, OutputGroupColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/namedsetoffiles_create.go b/ent/gen/ent/namedsetoffiles_create.go index 053cc0c..777d210 100644 --- a/ent/gen/ent/namedsetoffiles_create.go +++ b/ent/gen/ent/namedsetoffiles_create.go @@ -20,19 +20,23 @@ type NamedSetOfFilesCreate struct { hooks []Hook } -// AddOutputGroupIDs adds the "output_group" edge to the OutputGroup entity by IDs. -func (nsofc *NamedSetOfFilesCreate) AddOutputGroupIDs(ids ...int) *NamedSetOfFilesCreate { - nsofc.mutation.AddOutputGroupIDs(ids...) +// SetOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID. +func (nsofc *NamedSetOfFilesCreate) SetOutputGroupID(id int) *NamedSetOfFilesCreate { + nsofc.mutation.SetOutputGroupID(id) return nsofc } -// AddOutputGroup adds the "output_group" edges to the OutputGroup entity. -func (nsofc *NamedSetOfFilesCreate) AddOutputGroup(o ...*OutputGroup) *NamedSetOfFilesCreate { - ids := make([]int, len(o)) - for i := range o { - ids[i] = o[i].ID +// SetNillableOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID if the given value is not nil. +func (nsofc *NamedSetOfFilesCreate) SetNillableOutputGroupID(id *int) *NamedSetOfFilesCreate { + if id != nil { + nsofc = nsofc.SetOutputGroupID(*id) } - return nsofc.AddOutputGroupIDs(ids...) + return nsofc +} + +// SetOutputGroup sets the "output_group" edge to the OutputGroup entity. +func (nsofc *NamedSetOfFilesCreate) SetOutputGroup(o *OutputGroup) *NamedSetOfFilesCreate { + return nsofc.SetOutputGroupID(o.ID) } // AddFileIDs adds the "files" edge to the TestFile entity by IDs. @@ -131,7 +135,7 @@ func (nsofc *NamedSetOfFilesCreate) createSpec() (*NamedSetOfFiles, *sqlgraph.Cr ) if nodes := nsofc.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: namedsetoffiles.OutputGroupTable, Columns: []string{namedsetoffiles.OutputGroupColumn}, @@ -143,6 +147,7 @@ func (nsofc *NamedSetOfFilesCreate) createSpec() (*NamedSetOfFiles, *sqlgraph.Cr for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.output_group_file_sets = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := nsofc.mutation.FilesIDs(); len(nodes) > 0 { diff --git a/ent/gen/ent/namedsetoffiles_query.go b/ent/gen/ent/namedsetoffiles_query.go index f7813df..0989b3a 100644 --- a/ent/gen/ent/namedsetoffiles_query.go +++ b/ent/gen/ent/namedsetoffiles_query.go @@ -20,18 +20,17 @@ import ( // NamedSetOfFilesQuery is the builder for querying NamedSetOfFiles entities. type NamedSetOfFilesQuery struct { config - ctx *QueryContext - order []namedsetoffiles.OrderOption - inters []Interceptor - predicates []predicate.NamedSetOfFiles - withOutputGroup *OutputGroupQuery - withFiles *TestFileQuery - withFileSets *NamedSetOfFilesQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*NamedSetOfFiles) error - withNamedOutputGroup map[string]*OutputGroupQuery - withNamedFiles map[string]*TestFileQuery + ctx *QueryContext + order []namedsetoffiles.OrderOption + inters []Interceptor + predicates []predicate.NamedSetOfFiles + withOutputGroup *OutputGroupQuery + withFiles *TestFileQuery + withFileSets *NamedSetOfFilesQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*NamedSetOfFiles) error + withNamedFiles map[string]*TestFileQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -82,7 +81,7 @@ func (nsofq *NamedSetOfFilesQuery) QueryOutputGroup() *OutputGroupQuery { step := sqlgraph.NewStep( sqlgraph.From(namedsetoffiles.Table, namedsetoffiles.FieldID, selector), sqlgraph.To(outputgroup.Table, outputgroup.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, namedsetoffiles.OutputGroupTable, namedsetoffiles.OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, true, namedsetoffiles.OutputGroupTable, namedsetoffiles.OutputGroupColumn), ) fromU = sqlgraph.SetNeighbors(nsofq.driver.Dialect(), step) return fromU, nil @@ -431,7 +430,7 @@ func (nsofq *NamedSetOfFilesQuery) sqlAll(ctx context.Context, hooks ...queryHoo nsofq.withFileSets != nil, } ) - if nsofq.withFileSets != nil { + if nsofq.withOutputGroup != nil || nsofq.withFileSets != nil { withFKs = true } if withFKs { @@ -459,9 +458,8 @@ func (nsofq *NamedSetOfFilesQuery) sqlAll(ctx context.Context, hooks ...queryHoo return nodes, nil } if query := nsofq.withOutputGroup; query != nil { - if err := nsofq.loadOutputGroup(ctx, query, nodes, - func(n *NamedSetOfFiles) { n.Edges.OutputGroup = []*OutputGroup{} }, - func(n *NamedSetOfFiles, e *OutputGroup) { n.Edges.OutputGroup = append(n.Edges.OutputGroup, e) }); err != nil { + if err := nsofq.loadOutputGroup(ctx, query, nodes, nil, + func(n *NamedSetOfFiles, e *OutputGroup) { n.Edges.OutputGroup = e }); err != nil { return nil, err } } @@ -478,13 +476,6 @@ func (nsofq *NamedSetOfFilesQuery) sqlAll(ctx context.Context, hooks ...queryHoo return nil, err } } - for name, query := range nsofq.withNamedOutputGroup { - if err := nsofq.loadOutputGroup(ctx, query, nodes, - func(n *NamedSetOfFiles) { n.appendNamedOutputGroup(name) }, - func(n *NamedSetOfFiles, e *OutputGroup) { n.appendNamedOutputGroup(name, e) }); err != nil { - return nil, err - } - } for name, query := range nsofq.withNamedFiles { if err := nsofq.loadFiles(ctx, query, nodes, func(n *NamedSetOfFiles) { n.appendNamedFiles(name) }, @@ -501,33 +492,34 @@ func (nsofq *NamedSetOfFilesQuery) sqlAll(ctx context.Context, hooks ...queryHoo } func (nsofq *NamedSetOfFilesQuery) loadOutputGroup(ctx context.Context, query *OutputGroupQuery, nodes []*NamedSetOfFiles, init func(*NamedSetOfFiles), assign func(*NamedSetOfFiles, *OutputGroup)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*NamedSetOfFiles) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*NamedSetOfFiles) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].output_group_file_sets == nil { + continue + } + fk := *nodes[i].output_group_file_sets + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.OutputGroup(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(namedsetoffiles.OutputGroupColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(outputgroup.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.output_group_file_sets - if fk == nil { - return fmt.Errorf(`foreign-key "output_group_file_sets" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "output_group_file_sets" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "output_group_file_sets" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } @@ -679,20 +671,6 @@ func (nsofq *NamedSetOfFilesQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedOutputGroup tells the query-builder to eager-load the nodes that are connected to the "output_group" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (nsofq *NamedSetOfFilesQuery) WithNamedOutputGroup(name string, opts ...func(*OutputGroupQuery)) *NamedSetOfFilesQuery { - query := (&OutputGroupClient{config: nsofq.config}).Query() - for _, opt := range opts { - opt(query) - } - if nsofq.withNamedOutputGroup == nil { - nsofq.withNamedOutputGroup = make(map[string]*OutputGroupQuery) - } - nsofq.withNamedOutputGroup[name] = query - return nsofq -} - // WithNamedFiles tells the query-builder to eager-load the nodes that are connected to the "files" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (nsofq *NamedSetOfFilesQuery) WithNamedFiles(name string, opts ...func(*TestFileQuery)) *NamedSetOfFilesQuery { diff --git a/ent/gen/ent/namedsetoffiles_update.go b/ent/gen/ent/namedsetoffiles_update.go index b15c254..f870f76 100644 --- a/ent/gen/ent/namedsetoffiles_update.go +++ b/ent/gen/ent/namedsetoffiles_update.go @@ -29,19 +29,23 @@ func (nsofu *NamedSetOfFilesUpdate) Where(ps ...predicate.NamedSetOfFiles) *Name return nsofu } -// AddOutputGroupIDs adds the "output_group" edge to the OutputGroup entity by IDs. -func (nsofu *NamedSetOfFilesUpdate) AddOutputGroupIDs(ids ...int) *NamedSetOfFilesUpdate { - nsofu.mutation.AddOutputGroupIDs(ids...) +// SetOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID. +func (nsofu *NamedSetOfFilesUpdate) SetOutputGroupID(id int) *NamedSetOfFilesUpdate { + nsofu.mutation.SetOutputGroupID(id) return nsofu } -// AddOutputGroup adds the "output_group" edges to the OutputGroup entity. -func (nsofu *NamedSetOfFilesUpdate) AddOutputGroup(o ...*OutputGroup) *NamedSetOfFilesUpdate { - ids := make([]int, len(o)) - for i := range o { - ids[i] = o[i].ID +// SetNillableOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID if the given value is not nil. +func (nsofu *NamedSetOfFilesUpdate) SetNillableOutputGroupID(id *int) *NamedSetOfFilesUpdate { + if id != nil { + nsofu = nsofu.SetOutputGroupID(*id) } - return nsofu.AddOutputGroupIDs(ids...) + return nsofu +} + +// SetOutputGroup sets the "output_group" edge to the OutputGroup entity. +func (nsofu *NamedSetOfFilesUpdate) SetOutputGroup(o *OutputGroup) *NamedSetOfFilesUpdate { + return nsofu.SetOutputGroupID(o.ID) } // AddFileIDs adds the "files" edge to the TestFile entity by IDs. @@ -83,27 +87,12 @@ func (nsofu *NamedSetOfFilesUpdate) Mutation() *NamedSetOfFilesMutation { return nsofu.mutation } -// ClearOutputGroup clears all "output_group" edges to the OutputGroup entity. +// ClearOutputGroup clears the "output_group" edge to the OutputGroup entity. func (nsofu *NamedSetOfFilesUpdate) ClearOutputGroup() *NamedSetOfFilesUpdate { nsofu.mutation.ClearOutputGroup() return nsofu } -// RemoveOutputGroupIDs removes the "output_group" edge to OutputGroup entities by IDs. -func (nsofu *NamedSetOfFilesUpdate) RemoveOutputGroupIDs(ids ...int) *NamedSetOfFilesUpdate { - nsofu.mutation.RemoveOutputGroupIDs(ids...) - return nsofu -} - -// RemoveOutputGroup removes "output_group" edges to OutputGroup entities. -func (nsofu *NamedSetOfFilesUpdate) RemoveOutputGroup(o ...*OutputGroup) *NamedSetOfFilesUpdate { - ids := make([]int, len(o)) - for i := range o { - ids[i] = o[i].ID - } - return nsofu.RemoveOutputGroupIDs(ids...) -} - // ClearFiles clears all "files" edges to the TestFile entity. func (nsofu *NamedSetOfFilesUpdate) ClearFiles() *NamedSetOfFilesUpdate { nsofu.mutation.ClearFiles() @@ -169,20 +158,7 @@ func (nsofu *NamedSetOfFilesUpdate) sqlSave(ctx context.Context) (n int, err err } if nsofu.mutation.OutputGroupCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: namedsetoffiles.OutputGroupTable, - Columns: []string{namedsetoffiles.OutputGroupColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(outputgroup.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := nsofu.mutation.RemovedOutputGroupIDs(); len(nodes) > 0 && !nsofu.mutation.OutputGroupCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: namedsetoffiles.OutputGroupTable, Columns: []string{namedsetoffiles.OutputGroupColumn}, @@ -191,14 +167,11 @@ func (nsofu *NamedSetOfFilesUpdate) sqlSave(ctx context.Context) (n int, err err IDSpec: sqlgraph.NewFieldSpec(outputgroup.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := nsofu.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: namedsetoffiles.OutputGroupTable, Columns: []string{namedsetoffiles.OutputGroupColumn}, @@ -306,19 +279,23 @@ type NamedSetOfFilesUpdateOne struct { mutation *NamedSetOfFilesMutation } -// AddOutputGroupIDs adds the "output_group" edge to the OutputGroup entity by IDs. -func (nsofuo *NamedSetOfFilesUpdateOne) AddOutputGroupIDs(ids ...int) *NamedSetOfFilesUpdateOne { - nsofuo.mutation.AddOutputGroupIDs(ids...) +// SetOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID. +func (nsofuo *NamedSetOfFilesUpdateOne) SetOutputGroupID(id int) *NamedSetOfFilesUpdateOne { + nsofuo.mutation.SetOutputGroupID(id) return nsofuo } -// AddOutputGroup adds the "output_group" edges to the OutputGroup entity. -func (nsofuo *NamedSetOfFilesUpdateOne) AddOutputGroup(o ...*OutputGroup) *NamedSetOfFilesUpdateOne { - ids := make([]int, len(o)) - for i := range o { - ids[i] = o[i].ID +// SetNillableOutputGroupID sets the "output_group" edge to the OutputGroup entity by ID if the given value is not nil. +func (nsofuo *NamedSetOfFilesUpdateOne) SetNillableOutputGroupID(id *int) *NamedSetOfFilesUpdateOne { + if id != nil { + nsofuo = nsofuo.SetOutputGroupID(*id) } - return nsofuo.AddOutputGroupIDs(ids...) + return nsofuo +} + +// SetOutputGroup sets the "output_group" edge to the OutputGroup entity. +func (nsofuo *NamedSetOfFilesUpdateOne) SetOutputGroup(o *OutputGroup) *NamedSetOfFilesUpdateOne { + return nsofuo.SetOutputGroupID(o.ID) } // AddFileIDs adds the "files" edge to the TestFile entity by IDs. @@ -360,27 +337,12 @@ func (nsofuo *NamedSetOfFilesUpdateOne) Mutation() *NamedSetOfFilesMutation { return nsofuo.mutation } -// ClearOutputGroup clears all "output_group" edges to the OutputGroup entity. +// ClearOutputGroup clears the "output_group" edge to the OutputGroup entity. func (nsofuo *NamedSetOfFilesUpdateOne) ClearOutputGroup() *NamedSetOfFilesUpdateOne { nsofuo.mutation.ClearOutputGroup() return nsofuo } -// RemoveOutputGroupIDs removes the "output_group" edge to OutputGroup entities by IDs. -func (nsofuo *NamedSetOfFilesUpdateOne) RemoveOutputGroupIDs(ids ...int) *NamedSetOfFilesUpdateOne { - nsofuo.mutation.RemoveOutputGroupIDs(ids...) - return nsofuo -} - -// RemoveOutputGroup removes "output_group" edges to OutputGroup entities. -func (nsofuo *NamedSetOfFilesUpdateOne) RemoveOutputGroup(o ...*OutputGroup) *NamedSetOfFilesUpdateOne { - ids := make([]int, len(o)) - for i := range o { - ids[i] = o[i].ID - } - return nsofuo.RemoveOutputGroupIDs(ids...) -} - // ClearFiles clears all "files" edges to the TestFile entity. func (nsofuo *NamedSetOfFilesUpdateOne) ClearFiles() *NamedSetOfFilesUpdateOne { nsofuo.mutation.ClearFiles() @@ -476,20 +438,7 @@ func (nsofuo *NamedSetOfFilesUpdateOne) sqlSave(ctx context.Context) (_node *Nam } if nsofuo.mutation.OutputGroupCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: namedsetoffiles.OutputGroupTable, - Columns: []string{namedsetoffiles.OutputGroupColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(outputgroup.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := nsofuo.mutation.RemovedOutputGroupIDs(); len(nodes) > 0 && !nsofuo.mutation.OutputGroupCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: namedsetoffiles.OutputGroupTable, Columns: []string{namedsetoffiles.OutputGroupColumn}, @@ -498,14 +447,11 @@ func (nsofuo *NamedSetOfFilesUpdateOne) sqlSave(ctx context.Context) (_node *Nam IDSpec: sqlgraph.NewFieldSpec(outputgroup.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := nsofuo.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: namedsetoffiles.OutputGroupTable, Columns: []string{namedsetoffiles.OutputGroupColumn}, diff --git a/ent/gen/ent/networkmetrics.go b/ent/gen/ent/networkmetrics.go index 11f8e74..2958fd7 100644 --- a/ent/gen/ent/networkmetrics.go +++ b/ent/gen/ent/networkmetrics.go @@ -8,7 +8,9 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" "github.com/buildbarn/bb-portal/ent/gen/ent/networkmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/systemnetworkstats" ) // NetworkMetrics is the model entity for the NetworkMetrics schema. @@ -18,40 +20,42 @@ type NetworkMetrics struct { ID int `json:"id,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the NetworkMetricsQuery when eager-loading is set. - Edges NetworkMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges NetworkMetricsEdges `json:"edges"` + metrics_network_metrics *int + selectValues sql.SelectValues } // NetworkMetricsEdges holds the relations/edges for other nodes in the graph. type NetworkMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // SystemNetworkStats holds the value of the system_network_stats edge. - SystemNetworkStats []*SystemNetworkStats `json:"system_network_stats,omitempty"` + SystemNetworkStats *SystemNetworkStats `json:"system_network_stats,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [2]bool // totalCount holds the count of the edges above. totalCount [2]map[string]int - - namedMetrics map[string][]*Metrics - namedSystemNetworkStats map[string][]*SystemNetworkStats } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e NetworkMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e NetworkMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } // SystemNetworkStatsOrErr returns the SystemNetworkStats value or an error if the edge -// was not loaded in eager-loading. -func (e NetworkMetricsEdges) SystemNetworkStatsOrErr() ([]*SystemNetworkStats, error) { - if e.loadedTypes[1] { +// was not loaded in eager-loading, or loaded but was not found. +func (e NetworkMetricsEdges) SystemNetworkStatsOrErr() (*SystemNetworkStats, error) { + if e.SystemNetworkStats != nil { return e.SystemNetworkStats, nil + } else if e.loadedTypes[1] { + return nil, &NotFoundError{label: systemnetworkstats.Label} } return nil, &NotLoadedError{edge: "system_network_stats"} } @@ -63,6 +67,8 @@ func (*NetworkMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case networkmetrics.FieldID: values[i] = new(sql.NullInt64) + case networkmetrics.ForeignKeys[0]: // metrics_network_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -84,6 +90,13 @@ func (nm *NetworkMetrics) assignValues(columns []string, values []any) error { return fmt.Errorf("unexpected type %T for field id", value) } nm.ID = int(value.Int64) + case networkmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_network_metrics", value) + } else if value.Valid { + nm.metrics_network_metrics = new(int) + *nm.metrics_network_metrics = int(value.Int64) + } default: nm.selectValues.Set(columns[i], values[i]) } @@ -134,53 +147,5 @@ func (nm *NetworkMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (nm *NetworkMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if nm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := nm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (nm *NetworkMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if nm.Edges.namedMetrics == nil { - nm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - nm.Edges.namedMetrics[name] = []*Metrics{} - } else { - nm.Edges.namedMetrics[name] = append(nm.Edges.namedMetrics[name], edges...) - } -} - -// NamedSystemNetworkStats returns the SystemNetworkStats named value or an error if the edge was not -// loaded in eager-loading with this name. -func (nm *NetworkMetrics) NamedSystemNetworkStats(name string) ([]*SystemNetworkStats, error) { - if nm.Edges.namedSystemNetworkStats == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := nm.Edges.namedSystemNetworkStats[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (nm *NetworkMetrics) appendNamedSystemNetworkStats(name string, edges ...*SystemNetworkStats) { - if nm.Edges.namedSystemNetworkStats == nil { - nm.Edges.namedSystemNetworkStats = make(map[string][]*SystemNetworkStats) - } - if len(edges) == 0 { - nm.Edges.namedSystemNetworkStats[name] = []*SystemNetworkStats{} - } else { - nm.Edges.namedSystemNetworkStats[name] = append(nm.Edges.namedSystemNetworkStats[name], edges...) - } -} - // NetworkMetricsSlice is a parsable slice of NetworkMetrics. type NetworkMetricsSlice []*NetworkMetrics diff --git a/ent/gen/ent/networkmetrics/networkmetrics.go b/ent/gen/ent/networkmetrics/networkmetrics.go index f0f7d90..912d47f 100644 --- a/ent/gen/ent/networkmetrics/networkmetrics.go +++ b/ent/gen/ent/networkmetrics/networkmetrics.go @@ -18,11 +18,13 @@ const ( EdgeSystemNetworkStats = "system_network_stats" // Table holds the table name of the networkmetrics in the database. Table = "network_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_network_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "network_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_network_metrics" // SystemNetworkStatsTable is the table that holds the system_network_stats relation/edge. SystemNetworkStatsTable = "system_network_stats" // SystemNetworkStatsInverseTable is the table name for the SystemNetworkStats entity. @@ -37,11 +39,11 @@ var Columns = []string{ FieldID, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "network_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "network_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_network_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -50,6 +52,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -61,44 +68,30 @@ func ByID(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldID, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) - } -} - -// BySystemNetworkStatsCount orders the results by system_network_stats count. -func BySystemNetworkStatsCount(opts ...sql.OrderTermOption) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newSystemNetworkStatsStep(), opts...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } -// BySystemNetworkStats orders the results by system_network_stats terms. -func BySystemNetworkStats(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// BySystemNetworkStatsField orders the results by system_network_stats field. +func BySystemNetworkStatsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newSystemNetworkStatsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newSystemNetworkStatsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newSystemNetworkStatsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(SystemNetworkStatsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, SystemNetworkStatsTable, SystemNetworkStatsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, SystemNetworkStatsTable, SystemNetworkStatsColumn), ) } diff --git a/ent/gen/ent/networkmetrics/where.go b/ent/gen/ent/networkmetrics/where.go index 803fb7f..1c3b72c 100644 --- a/ent/gen/ent/networkmetrics/where.go +++ b/ent/gen/ent/networkmetrics/where.go @@ -58,7 +58,7 @@ func HasMetrics() predicate.NetworkMetrics { return predicate.NetworkMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -81,7 +81,7 @@ func HasSystemNetworkStats() predicate.NetworkMetrics { return predicate.NetworkMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, SystemNetworkStatsTable, SystemNetworkStatsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, SystemNetworkStatsTable, SystemNetworkStatsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/networkmetrics_create.go b/ent/gen/ent/networkmetrics_create.go index d216587..c3c61c0 100644 --- a/ent/gen/ent/networkmetrics_create.go +++ b/ent/gen/ent/networkmetrics_create.go @@ -20,34 +20,42 @@ type NetworkMetricsCreate struct { hooks []Hook } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (nmc *NetworkMetricsCreate) AddMetricIDs(ids ...int) *NetworkMetricsCreate { - nmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (nmc *NetworkMetricsCreate) SetMetricsID(id int) *NetworkMetricsCreate { + nmc.mutation.SetMetricsID(id) return nmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (nmc *NetworkMetricsCreate) AddMetrics(m ...*Metrics) *NetworkMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (nmc *NetworkMetricsCreate) SetNillableMetricsID(id *int) *NetworkMetricsCreate { + if id != nil { + nmc = nmc.SetMetricsID(*id) } - return nmc.AddMetricIDs(ids...) + return nmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (nmc *NetworkMetricsCreate) SetMetrics(m *Metrics) *NetworkMetricsCreate { + return nmc.SetMetricsID(m.ID) } -// AddSystemNetworkStatIDs adds the "system_network_stats" edge to the SystemNetworkStats entity by IDs. -func (nmc *NetworkMetricsCreate) AddSystemNetworkStatIDs(ids ...int) *NetworkMetricsCreate { - nmc.mutation.AddSystemNetworkStatIDs(ids...) +// SetSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID. +func (nmc *NetworkMetricsCreate) SetSystemNetworkStatsID(id int) *NetworkMetricsCreate { + nmc.mutation.SetSystemNetworkStatsID(id) return nmc } -// AddSystemNetworkStats adds the "system_network_stats" edges to the SystemNetworkStats entity. -func (nmc *NetworkMetricsCreate) AddSystemNetworkStats(s ...*SystemNetworkStats) *NetworkMetricsCreate { - ids := make([]int, len(s)) - for i := range s { - ids[i] = s[i].ID +// SetNillableSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID if the given value is not nil. +func (nmc *NetworkMetricsCreate) SetNillableSystemNetworkStatsID(id *int) *NetworkMetricsCreate { + if id != nil { + nmc = nmc.SetSystemNetworkStatsID(*id) } - return nmc.AddSystemNetworkStatIDs(ids...) + return nmc +} + +// SetSystemNetworkStats sets the "system_network_stats" edge to the SystemNetworkStats entity. +func (nmc *NetworkMetricsCreate) SetSystemNetworkStats(s *SystemNetworkStats) *NetworkMetricsCreate { + return nmc.SetSystemNetworkStatsID(s.ID) } // Mutation returns the NetworkMetricsMutation object of the builder. @@ -112,10 +120,10 @@ func (nmc *NetworkMetricsCreate) createSpec() (*NetworkMetrics, *sqlgraph.Create ) if nodes := nmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, + Columns: []string{networkmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -124,11 +132,12 @@ func (nmc *NetworkMetricsCreate) createSpec() (*NetworkMetrics, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_network_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := nmc.mutation.SystemNetworkStatsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: networkmetrics.SystemNetworkStatsTable, Columns: []string{networkmetrics.SystemNetworkStatsColumn}, diff --git a/ent/gen/ent/networkmetrics_query.go b/ent/gen/ent/networkmetrics_query.go index 9bf0745..507a0b6 100644 --- a/ent/gen/ent/networkmetrics_query.go +++ b/ent/gen/ent/networkmetrics_query.go @@ -20,16 +20,15 @@ import ( // NetworkMetricsQuery is the builder for querying NetworkMetrics entities. type NetworkMetricsQuery struct { config - ctx *QueryContext - order []networkmetrics.OrderOption - inters []Interceptor - predicates []predicate.NetworkMetrics - withMetrics *MetricsQuery - withSystemNetworkStats *SystemNetworkStatsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*NetworkMetrics) error - withNamedMetrics map[string]*MetricsQuery - withNamedSystemNetworkStats map[string]*SystemNetworkStatsQuery + ctx *QueryContext + order []networkmetrics.OrderOption + inters []Interceptor + predicates []predicate.NetworkMetrics + withMetrics *MetricsQuery + withSystemNetworkStats *SystemNetworkStatsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*NetworkMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -80,7 +79,7 @@ func (nmq *NetworkMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(networkmetrics.Table, networkmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, networkmetrics.MetricsTable, networkmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, networkmetrics.MetricsTable, networkmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(nmq.driver.Dialect(), step) return fromU, nil @@ -102,7 +101,7 @@ func (nmq *NetworkMetricsQuery) QuerySystemNetworkStats() *SystemNetworkStatsQue step := sqlgraph.NewStep( sqlgraph.From(networkmetrics.Table, networkmetrics.FieldID, selector), sqlgraph.To(systemnetworkstats.Table, systemnetworkstats.FieldID), - sqlgraph.Edge(sqlgraph.O2M, false, networkmetrics.SystemNetworkStatsTable, networkmetrics.SystemNetworkStatsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, networkmetrics.SystemNetworkStatsTable, networkmetrics.SystemNetworkStatsColumn), ) fromU = sqlgraph.SetNeighbors(nmq.driver.Dialect(), step) return fromU, nil @@ -387,12 +386,19 @@ func (nmq *NetworkMetricsQuery) prepareQuery(ctx context.Context) error { func (nmq *NetworkMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*NetworkMetrics, error) { var ( nodes = []*NetworkMetrics{} + withFKs = nmq.withFKs _spec = nmq.querySpec() loadedTypes = [2]bool{ nmq.withMetrics != nil, nmq.withSystemNetworkStats != nil, } ) + if nmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, networkmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*NetworkMetrics).scanValues(nil, columns) } @@ -415,32 +421,14 @@ func (nmq *NetworkMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := nmq.withMetrics; query != nil { - if err := nmq.loadMetrics(ctx, query, nodes, - func(n *NetworkMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *NetworkMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := nmq.loadMetrics(ctx, query, nodes, nil, + func(n *NetworkMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } if query := nmq.withSystemNetworkStats; query != nil { - if err := nmq.loadSystemNetworkStats(ctx, query, nodes, - func(n *NetworkMetrics) { n.Edges.SystemNetworkStats = []*SystemNetworkStats{} }, - func(n *NetworkMetrics, e *SystemNetworkStats) { - n.Edges.SystemNetworkStats = append(n.Edges.SystemNetworkStats, e) - }); err != nil { - return nil, err - } - } - for name, query := range nmq.withNamedMetrics { - if err := nmq.loadMetrics(ctx, query, nodes, - func(n *NetworkMetrics) { n.appendNamedMetrics(name) }, - func(n *NetworkMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } - for name, query := range nmq.withNamedSystemNetworkStats { - if err := nmq.loadSystemNetworkStats(ctx, query, nodes, - func(n *NetworkMetrics) { n.appendNamedSystemNetworkStats(name) }, - func(n *NetworkMetrics, e *SystemNetworkStats) { n.appendNamedSystemNetworkStats(name, e) }); err != nil { + if err := nmq.loadSystemNetworkStats(ctx, query, nodes, nil, + func(n *NetworkMetrics, e *SystemNetworkStats) { n.Edges.SystemNetworkStats = e }); err != nil { return nil, err } } @@ -453,62 +441,33 @@ func (nmq *NetworkMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (nmq *NetworkMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*NetworkMetrics, init func(*NetworkMetrics), assign func(*NetworkMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*NetworkMetrics) - nids := make(map[int]map[*NetworkMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*NetworkMetrics) + for i := range nodes { + if nodes[i].metrics_network_metrics == nil { + continue } + fk := *nodes[i].metrics_network_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(networkmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(networkmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(networkmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(networkmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*NetworkMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_network_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -519,9 +478,6 @@ func (nmq *NetworkMetricsQuery) loadSystemNetworkStats(ctx context.Context, quer for i := range nodes { fks = append(fks, nodes[i].ID) nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) - } } query.withFKs = true query.Where(predicate.SystemNetworkStats(func(s *sql.Selector) { @@ -629,34 +585,6 @@ func (nmq *NetworkMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (nmq *NetworkMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *NetworkMetricsQuery { - query := (&MetricsClient{config: nmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if nmq.withNamedMetrics == nil { - nmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - nmq.withNamedMetrics[name] = query - return nmq -} - -// WithNamedSystemNetworkStats tells the query-builder to eager-load the nodes that are connected to the "system_network_stats" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (nmq *NetworkMetricsQuery) WithNamedSystemNetworkStats(name string, opts ...func(*SystemNetworkStatsQuery)) *NetworkMetricsQuery { - query := (&SystemNetworkStatsClient{config: nmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if nmq.withNamedSystemNetworkStats == nil { - nmq.withNamedSystemNetworkStats = make(map[string]*SystemNetworkStatsQuery) - } - nmq.withNamedSystemNetworkStats[name] = query - return nmq -} - // NetworkMetricsGroupBy is the group-by builder for NetworkMetrics entities. type NetworkMetricsGroupBy struct { selector diff --git a/ent/gen/ent/networkmetrics_update.go b/ent/gen/ent/networkmetrics_update.go index 84e4768..f9c23b8 100644 --- a/ent/gen/ent/networkmetrics_update.go +++ b/ent/gen/ent/networkmetrics_update.go @@ -29,34 +29,42 @@ func (nmu *NetworkMetricsUpdate) Where(ps ...predicate.NetworkMetrics) *NetworkM return nmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (nmu *NetworkMetricsUpdate) AddMetricIDs(ids ...int) *NetworkMetricsUpdate { - nmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (nmu *NetworkMetricsUpdate) SetMetricsID(id int) *NetworkMetricsUpdate { + nmu.mutation.SetMetricsID(id) return nmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (nmu *NetworkMetricsUpdate) AddMetrics(m ...*Metrics) *NetworkMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (nmu *NetworkMetricsUpdate) SetNillableMetricsID(id *int) *NetworkMetricsUpdate { + if id != nil { + nmu = nmu.SetMetricsID(*id) } - return nmu.AddMetricIDs(ids...) + return nmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (nmu *NetworkMetricsUpdate) SetMetrics(m *Metrics) *NetworkMetricsUpdate { + return nmu.SetMetricsID(m.ID) } -// AddSystemNetworkStatIDs adds the "system_network_stats" edge to the SystemNetworkStats entity by IDs. -func (nmu *NetworkMetricsUpdate) AddSystemNetworkStatIDs(ids ...int) *NetworkMetricsUpdate { - nmu.mutation.AddSystemNetworkStatIDs(ids...) +// SetSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID. +func (nmu *NetworkMetricsUpdate) SetSystemNetworkStatsID(id int) *NetworkMetricsUpdate { + nmu.mutation.SetSystemNetworkStatsID(id) return nmu } -// AddSystemNetworkStats adds the "system_network_stats" edges to the SystemNetworkStats entity. -func (nmu *NetworkMetricsUpdate) AddSystemNetworkStats(s ...*SystemNetworkStats) *NetworkMetricsUpdate { - ids := make([]int, len(s)) - for i := range s { - ids[i] = s[i].ID +// SetNillableSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID if the given value is not nil. +func (nmu *NetworkMetricsUpdate) SetNillableSystemNetworkStatsID(id *int) *NetworkMetricsUpdate { + if id != nil { + nmu = nmu.SetSystemNetworkStatsID(*id) } - return nmu.AddSystemNetworkStatIDs(ids...) + return nmu +} + +// SetSystemNetworkStats sets the "system_network_stats" edge to the SystemNetworkStats entity. +func (nmu *NetworkMetricsUpdate) SetSystemNetworkStats(s *SystemNetworkStats) *NetworkMetricsUpdate { + return nmu.SetSystemNetworkStatsID(s.ID) } // Mutation returns the NetworkMetricsMutation object of the builder. @@ -64,48 +72,18 @@ func (nmu *NetworkMetricsUpdate) Mutation() *NetworkMetricsMutation { return nmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (nmu *NetworkMetricsUpdate) ClearMetrics() *NetworkMetricsUpdate { nmu.mutation.ClearMetrics() return nmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (nmu *NetworkMetricsUpdate) RemoveMetricIDs(ids ...int) *NetworkMetricsUpdate { - nmu.mutation.RemoveMetricIDs(ids...) - return nmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (nmu *NetworkMetricsUpdate) RemoveMetrics(m ...*Metrics) *NetworkMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return nmu.RemoveMetricIDs(ids...) -} - -// ClearSystemNetworkStats clears all "system_network_stats" edges to the SystemNetworkStats entity. +// ClearSystemNetworkStats clears the "system_network_stats" edge to the SystemNetworkStats entity. func (nmu *NetworkMetricsUpdate) ClearSystemNetworkStats() *NetworkMetricsUpdate { nmu.mutation.ClearSystemNetworkStats() return nmu } -// RemoveSystemNetworkStatIDs removes the "system_network_stats" edge to SystemNetworkStats entities by IDs. -func (nmu *NetworkMetricsUpdate) RemoveSystemNetworkStatIDs(ids ...int) *NetworkMetricsUpdate { - nmu.mutation.RemoveSystemNetworkStatIDs(ids...) - return nmu -} - -// RemoveSystemNetworkStats removes "system_network_stats" edges to SystemNetworkStats entities. -func (nmu *NetworkMetricsUpdate) RemoveSystemNetworkStats(s ...*SystemNetworkStats) *NetworkMetricsUpdate { - ids := make([]int, len(s)) - for i := range s { - ids[i] = s[i].ID - } - return nmu.RemoveSystemNetworkStatIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (nmu *NetworkMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, nmu.sqlSave, nmu.mutation, nmu.hooks) @@ -144,10 +122,10 @@ func (nmu *NetworkMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, + Columns: []string{networkmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -155,28 +133,12 @@ func (nmu *NetworkMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := nmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !nmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := nmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, + Columns: []string{networkmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -189,20 +151,7 @@ func (nmu *NetworkMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nmu.mutation.SystemNetworkStatsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: networkmetrics.SystemNetworkStatsTable, - Columns: []string{networkmetrics.SystemNetworkStatsColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(systemnetworkstats.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := nmu.mutation.RemovedSystemNetworkStatsIDs(); len(nodes) > 0 && !nmu.mutation.SystemNetworkStatsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: networkmetrics.SystemNetworkStatsTable, Columns: []string{networkmetrics.SystemNetworkStatsColumn}, @@ -211,14 +160,11 @@ func (nmu *NetworkMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) IDSpec: sqlgraph.NewFieldSpec(systemnetworkstats.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := nmu.mutation.SystemNetworkStatsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: networkmetrics.SystemNetworkStatsTable, Columns: []string{networkmetrics.SystemNetworkStatsColumn}, @@ -252,34 +198,42 @@ type NetworkMetricsUpdateOne struct { mutation *NetworkMetricsMutation } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (nmuo *NetworkMetricsUpdateOne) AddMetricIDs(ids ...int) *NetworkMetricsUpdateOne { - nmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (nmuo *NetworkMetricsUpdateOne) SetMetricsID(id int) *NetworkMetricsUpdateOne { + nmuo.mutation.SetMetricsID(id) return nmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (nmuo *NetworkMetricsUpdateOne) AddMetrics(m ...*Metrics) *NetworkMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (nmuo *NetworkMetricsUpdateOne) SetNillableMetricsID(id *int) *NetworkMetricsUpdateOne { + if id != nil { + nmuo = nmuo.SetMetricsID(*id) } - return nmuo.AddMetricIDs(ids...) + return nmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (nmuo *NetworkMetricsUpdateOne) SetMetrics(m *Metrics) *NetworkMetricsUpdateOne { + return nmuo.SetMetricsID(m.ID) } -// AddSystemNetworkStatIDs adds the "system_network_stats" edge to the SystemNetworkStats entity by IDs. -func (nmuo *NetworkMetricsUpdateOne) AddSystemNetworkStatIDs(ids ...int) *NetworkMetricsUpdateOne { - nmuo.mutation.AddSystemNetworkStatIDs(ids...) +// SetSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID. +func (nmuo *NetworkMetricsUpdateOne) SetSystemNetworkStatsID(id int) *NetworkMetricsUpdateOne { + nmuo.mutation.SetSystemNetworkStatsID(id) return nmuo } -// AddSystemNetworkStats adds the "system_network_stats" edges to the SystemNetworkStats entity. -func (nmuo *NetworkMetricsUpdateOne) AddSystemNetworkStats(s ...*SystemNetworkStats) *NetworkMetricsUpdateOne { - ids := make([]int, len(s)) - for i := range s { - ids[i] = s[i].ID +// SetNillableSystemNetworkStatsID sets the "system_network_stats" edge to the SystemNetworkStats entity by ID if the given value is not nil. +func (nmuo *NetworkMetricsUpdateOne) SetNillableSystemNetworkStatsID(id *int) *NetworkMetricsUpdateOne { + if id != nil { + nmuo = nmuo.SetSystemNetworkStatsID(*id) } - return nmuo.AddSystemNetworkStatIDs(ids...) + return nmuo +} + +// SetSystemNetworkStats sets the "system_network_stats" edge to the SystemNetworkStats entity. +func (nmuo *NetworkMetricsUpdateOne) SetSystemNetworkStats(s *SystemNetworkStats) *NetworkMetricsUpdateOne { + return nmuo.SetSystemNetworkStatsID(s.ID) } // Mutation returns the NetworkMetricsMutation object of the builder. @@ -287,48 +241,18 @@ func (nmuo *NetworkMetricsUpdateOne) Mutation() *NetworkMetricsMutation { return nmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (nmuo *NetworkMetricsUpdateOne) ClearMetrics() *NetworkMetricsUpdateOne { nmuo.mutation.ClearMetrics() return nmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (nmuo *NetworkMetricsUpdateOne) RemoveMetricIDs(ids ...int) *NetworkMetricsUpdateOne { - nmuo.mutation.RemoveMetricIDs(ids...) - return nmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (nmuo *NetworkMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *NetworkMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return nmuo.RemoveMetricIDs(ids...) -} - -// ClearSystemNetworkStats clears all "system_network_stats" edges to the SystemNetworkStats entity. +// ClearSystemNetworkStats clears the "system_network_stats" edge to the SystemNetworkStats entity. func (nmuo *NetworkMetricsUpdateOne) ClearSystemNetworkStats() *NetworkMetricsUpdateOne { nmuo.mutation.ClearSystemNetworkStats() return nmuo } -// RemoveSystemNetworkStatIDs removes the "system_network_stats" edge to SystemNetworkStats entities by IDs. -func (nmuo *NetworkMetricsUpdateOne) RemoveSystemNetworkStatIDs(ids ...int) *NetworkMetricsUpdateOne { - nmuo.mutation.RemoveSystemNetworkStatIDs(ids...) - return nmuo -} - -// RemoveSystemNetworkStats removes "system_network_stats" edges to SystemNetworkStats entities. -func (nmuo *NetworkMetricsUpdateOne) RemoveSystemNetworkStats(s ...*SystemNetworkStats) *NetworkMetricsUpdateOne { - ids := make([]int, len(s)) - for i := range s { - ids[i] = s[i].ID - } - return nmuo.RemoveSystemNetworkStatIDs(ids...) -} - // Where appends a list predicates to the NetworkMetricsUpdate builder. func (nmuo *NetworkMetricsUpdateOne) Where(ps ...predicate.NetworkMetrics) *NetworkMetricsUpdateOne { nmuo.mutation.Where(ps...) @@ -397,10 +321,10 @@ func (nmuo *NetworkMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Networ } if nmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, + Columns: []string{networkmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -408,28 +332,12 @@ func (nmuo *NetworkMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Networ } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := nmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !nmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := nmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: networkmetrics.MetricsTable, - Columns: networkmetrics.MetricsPrimaryKey, + Columns: []string{networkmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -442,20 +350,7 @@ func (nmuo *NetworkMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Networ } if nmuo.mutation.SystemNetworkStatsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: false, - Table: networkmetrics.SystemNetworkStatsTable, - Columns: []string{networkmetrics.SystemNetworkStatsColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(systemnetworkstats.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := nmuo.mutation.RemovedSystemNetworkStatsIDs(); len(nodes) > 0 && !nmuo.mutation.SystemNetworkStatsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: networkmetrics.SystemNetworkStatsTable, Columns: []string{networkmetrics.SystemNetworkStatsColumn}, @@ -464,14 +359,11 @@ func (nmuo *NetworkMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Networ IDSpec: sqlgraph.NewFieldSpec(systemnetworkstats.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := nmuo.mutation.SystemNetworkStatsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: false, Table: networkmetrics.SystemNetworkStatsTable, Columns: []string{networkmetrics.SystemNetworkStatsColumn}, diff --git a/ent/gen/ent/outputgroup.go b/ent/gen/ent/outputgroup.go index 56e853b..871e101 100644 --- a/ent/gen/ent/outputgroup.go +++ b/ent/gen/ent/outputgroup.go @@ -10,6 +10,7 @@ import ( "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/namedsetoffiles" "github.com/buildbarn/bb-portal/ent/gen/ent/outputgroup" + "github.com/buildbarn/bb-portal/ent/gen/ent/targetcomplete" ) // OutputGroup is the model entity for the OutputGroup schema. @@ -23,15 +24,15 @@ type OutputGroup struct { Incomplete bool `json:"incomplete,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the OutputGroupQuery when eager-loading is set. - Edges OutputGroupEdges `json:"edges"` - output_group_file_sets *int - selectValues sql.SelectValues + Edges OutputGroupEdges `json:"edges"` + target_complete_output_group *int + selectValues sql.SelectValues } // OutputGroupEdges holds the relations/edges for other nodes in the graph. type OutputGroupEdges struct { // TargetComplete holds the value of the target_complete edge. - TargetComplete []*TargetComplete `json:"target_complete,omitempty"` + TargetComplete *TargetComplete `json:"target_complete,omitempty"` // InlineFiles holds the value of the inline_files edge. InlineFiles []*TestFile `json:"inline_files,omitempty"` // FileSets holds the value of the file_sets edge. @@ -42,15 +43,16 @@ type OutputGroupEdges struct { // totalCount holds the count of the edges above. totalCount [3]map[string]int - namedTargetComplete map[string][]*TargetComplete - namedInlineFiles map[string][]*TestFile + namedInlineFiles map[string][]*TestFile } // TargetCompleteOrErr returns the TargetComplete value or an error if the edge -// was not loaded in eager-loading. -func (e OutputGroupEdges) TargetCompleteOrErr() ([]*TargetComplete, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e OutputGroupEdges) TargetCompleteOrErr() (*TargetComplete, error) { + if e.TargetComplete != nil { return e.TargetComplete, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: targetcomplete.Label} } return nil, &NotLoadedError{edge: "target_complete"} } @@ -86,7 +88,7 @@ func (*OutputGroup) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case outputgroup.FieldName: values[i] = new(sql.NullString) - case outputgroup.ForeignKeys[0]: // output_group_file_sets + case outputgroup.ForeignKeys[0]: // target_complete_output_group values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -123,10 +125,10 @@ func (og *OutputGroup) assignValues(columns []string, values []any) error { } case outputgroup.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field output_group_file_sets", value) + return fmt.Errorf("unexpected type %T for edge-field target_complete_output_group", value) } else if value.Valid { - og.output_group_file_sets = new(int) - *og.output_group_file_sets = int(value.Int64) + og.target_complete_output_group = new(int) + *og.target_complete_output_group = int(value.Int64) } default: og.selectValues.Set(columns[i], values[i]) @@ -188,30 +190,6 @@ func (og *OutputGroup) String() string { return builder.String() } -// NamedTargetComplete returns the TargetComplete named value or an error if the edge was not -// loaded in eager-loading with this name. -func (og *OutputGroup) NamedTargetComplete(name string) ([]*TargetComplete, error) { - if og.Edges.namedTargetComplete == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := og.Edges.namedTargetComplete[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (og *OutputGroup) appendNamedTargetComplete(name string, edges ...*TargetComplete) { - if og.Edges.namedTargetComplete == nil { - og.Edges.namedTargetComplete = make(map[string][]*TargetComplete) - } - if len(edges) == 0 { - og.Edges.namedTargetComplete[name] = []*TargetComplete{} - } else { - og.Edges.namedTargetComplete[name] = append(og.Edges.namedTargetComplete[name], edges...) - } -} - // NamedInlineFiles returns the InlineFiles named value or an error if the edge was not // loaded in eager-loading with this name. func (og *OutputGroup) NamedInlineFiles(name string) ([]*TestFile, error) { diff --git a/ent/gen/ent/outputgroup/outputgroup.go b/ent/gen/ent/outputgroup/outputgroup.go index cc75e96..f96e9eb 100644 --- a/ent/gen/ent/outputgroup/outputgroup.go +++ b/ent/gen/ent/outputgroup/outputgroup.go @@ -25,7 +25,7 @@ const ( // Table holds the table name of the outputgroup in the database. Table = "output_groups" // TargetCompleteTable is the table that holds the target_complete relation/edge. - TargetCompleteTable = "target_completes" + TargetCompleteTable = "output_groups" // TargetCompleteInverseTable is the table name for the TargetComplete entity. // It exists in this package in order to avoid circular dependency with the "targetcomplete" package. TargetCompleteInverseTable = "target_completes" @@ -39,7 +39,7 @@ const ( // InlineFilesColumn is the table column denoting the inline_files relation/edge. InlineFilesColumn = "output_group_inline_files" // FileSetsTable is the table that holds the file_sets relation/edge. - FileSetsTable = "output_groups" + FileSetsTable = "named_set_of_files" // FileSetsInverseTable is the table name for the NamedSetOfFiles entity. // It exists in this package in order to avoid circular dependency with the "namedsetoffiles" package. FileSetsInverseTable = "named_set_of_files" @@ -57,7 +57,7 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "output_groups" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "output_group_file_sets", + "target_complete_output_group", } // ValidColumn reports if the column name is valid (part of the table columns). @@ -93,17 +93,10 @@ func ByIncomplete(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldIncomplete, opts...).ToFunc() } -// ByTargetCompleteCount orders the results by target_complete count. -func ByTargetCompleteCount(opts ...sql.OrderTermOption) OrderOption { +// ByTargetCompleteField orders the results by target_complete field. +func ByTargetCompleteField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTargetCompleteStep(), opts...) - } -} - -// ByTargetComplete orders the results by target_complete terms. -func ByTargetComplete(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTargetCompleteStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTargetCompleteStep(), sql.OrderByField(field, opts...)) } } @@ -131,7 +124,7 @@ func newTargetCompleteStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TargetCompleteInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetCompleteTable, TargetCompleteColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetCompleteTable, TargetCompleteColumn), ) } func newInlineFilesStep() *sqlgraph.Step { @@ -145,6 +138,6 @@ func newFileSetsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(FileSetsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, FileSetsTable, FileSetsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, FileSetsTable, FileSetsColumn), ) } diff --git a/ent/gen/ent/outputgroup/where.go b/ent/gen/ent/outputgroup/where.go index 5a91477..054cdd1 100644 --- a/ent/gen/ent/outputgroup/where.go +++ b/ent/gen/ent/outputgroup/where.go @@ -163,7 +163,7 @@ func HasTargetComplete() predicate.OutputGroup { return predicate.OutputGroup(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetCompleteTable, TargetCompleteColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetCompleteTable, TargetCompleteColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -209,7 +209,7 @@ func HasFileSets() predicate.OutputGroup { return predicate.OutputGroup(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, FileSetsTable, FileSetsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, FileSetsTable, FileSetsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/outputgroup_create.go b/ent/gen/ent/outputgroup_create.go index ce7fc58..32f61f4 100644 --- a/ent/gen/ent/outputgroup_create.go +++ b/ent/gen/ent/outputgroup_create.go @@ -49,19 +49,23 @@ func (ogc *OutputGroupCreate) SetNillableIncomplete(b *bool) *OutputGroupCreate return ogc } -// AddTargetCompleteIDs adds the "target_complete" edge to the TargetComplete entity by IDs. -func (ogc *OutputGroupCreate) AddTargetCompleteIDs(ids ...int) *OutputGroupCreate { - ogc.mutation.AddTargetCompleteIDs(ids...) +// SetTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID. +func (ogc *OutputGroupCreate) SetTargetCompleteID(id int) *OutputGroupCreate { + ogc.mutation.SetTargetCompleteID(id) return ogc } -// AddTargetComplete adds the "target_complete" edges to the TargetComplete entity. -func (ogc *OutputGroupCreate) AddTargetComplete(t ...*TargetComplete) *OutputGroupCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID if the given value is not nil. +func (ogc *OutputGroupCreate) SetNillableTargetCompleteID(id *int) *OutputGroupCreate { + if id != nil { + ogc = ogc.SetTargetCompleteID(*id) } - return ogc.AddTargetCompleteIDs(ids...) + return ogc +} + +// SetTargetComplete sets the "target_complete" edge to the TargetComplete entity. +func (ogc *OutputGroupCreate) SetTargetComplete(t *TargetComplete) *OutputGroupCreate { + return ogc.SetTargetCompleteID(t.ID) } // AddInlineFileIDs adds the "inline_files" edge to the TestFile entity by IDs. @@ -168,7 +172,7 @@ func (ogc *OutputGroupCreate) createSpec() (*OutputGroup, *sqlgraph.CreateSpec) } if nodes := ogc.mutation.TargetCompleteIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: outputgroup.TargetCompleteTable, Columns: []string{outputgroup.TargetCompleteColumn}, @@ -180,6 +184,7 @@ func (ogc *OutputGroupCreate) createSpec() (*OutputGroup, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.target_complete_output_group = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := ogc.mutation.InlineFilesIDs(); len(nodes) > 0 { @@ -200,7 +205,7 @@ func (ogc *OutputGroupCreate) createSpec() (*OutputGroup, *sqlgraph.CreateSpec) } if nodes := ogc.mutation.FileSetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: outputgroup.FileSetsTable, Columns: []string{outputgroup.FileSetsColumn}, @@ -212,7 +217,6 @@ func (ogc *OutputGroupCreate) createSpec() (*OutputGroup, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.output_group_file_sets = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/outputgroup_query.go b/ent/gen/ent/outputgroup_query.go index 8456930..1438e8e 100644 --- a/ent/gen/ent/outputgroup_query.go +++ b/ent/gen/ent/outputgroup_query.go @@ -21,18 +21,17 @@ import ( // OutputGroupQuery is the builder for querying OutputGroup entities. type OutputGroupQuery struct { config - ctx *QueryContext - order []outputgroup.OrderOption - inters []Interceptor - predicates []predicate.OutputGroup - withTargetComplete *TargetCompleteQuery - withInlineFiles *TestFileQuery - withFileSets *NamedSetOfFilesQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*OutputGroup) error - withNamedTargetComplete map[string]*TargetCompleteQuery - withNamedInlineFiles map[string]*TestFileQuery + ctx *QueryContext + order []outputgroup.OrderOption + inters []Interceptor + predicates []predicate.OutputGroup + withTargetComplete *TargetCompleteQuery + withInlineFiles *TestFileQuery + withFileSets *NamedSetOfFilesQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*OutputGroup) error + withNamedInlineFiles map[string]*TestFileQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -83,7 +82,7 @@ func (ogq *OutputGroupQuery) QueryTargetComplete() *TargetCompleteQuery { step := sqlgraph.NewStep( sqlgraph.From(outputgroup.Table, outputgroup.FieldID, selector), sqlgraph.To(targetcomplete.Table, targetcomplete.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, outputgroup.TargetCompleteTable, outputgroup.TargetCompleteColumn), + sqlgraph.Edge(sqlgraph.O2O, true, outputgroup.TargetCompleteTable, outputgroup.TargetCompleteColumn), ) fromU = sqlgraph.SetNeighbors(ogq.driver.Dialect(), step) return fromU, nil @@ -127,7 +126,7 @@ func (ogq *OutputGroupQuery) QueryFileSets() *NamedSetOfFilesQuery { step := sqlgraph.NewStep( sqlgraph.From(outputgroup.Table, outputgroup.FieldID, selector), sqlgraph.To(namedsetoffiles.Table, namedsetoffiles.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, outputgroup.FileSetsTable, outputgroup.FileSetsColumn), + sqlgraph.Edge(sqlgraph.O2O, false, outputgroup.FileSetsTable, outputgroup.FileSetsColumn), ) fromU = sqlgraph.SetNeighbors(ogq.driver.Dialect(), step) return fromU, nil @@ -454,7 +453,7 @@ func (ogq *OutputGroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] ogq.withFileSets != nil, } ) - if ogq.withFileSets != nil { + if ogq.withTargetComplete != nil { withFKs = true } if withFKs { @@ -482,9 +481,8 @@ func (ogq *OutputGroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nodes, nil } if query := ogq.withTargetComplete; query != nil { - if err := ogq.loadTargetComplete(ctx, query, nodes, - func(n *OutputGroup) { n.Edges.TargetComplete = []*TargetComplete{} }, - func(n *OutputGroup, e *TargetComplete) { n.Edges.TargetComplete = append(n.Edges.TargetComplete, e) }); err != nil { + if err := ogq.loadTargetComplete(ctx, query, nodes, nil, + func(n *OutputGroup, e *TargetComplete) { n.Edges.TargetComplete = e }); err != nil { return nil, err } } @@ -501,13 +499,6 @@ func (ogq *OutputGroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nil, err } } - for name, query := range ogq.withNamedTargetComplete { - if err := ogq.loadTargetComplete(ctx, query, nodes, - func(n *OutputGroup) { n.appendNamedTargetComplete(name) }, - func(n *OutputGroup, e *TargetComplete) { n.appendNamedTargetComplete(name, e) }); err != nil { - return nil, err - } - } for name, query := range ogq.withNamedInlineFiles { if err := ogq.loadInlineFiles(ctx, query, nodes, func(n *OutputGroup) { n.appendNamedInlineFiles(name) }, @@ -524,33 +515,34 @@ func (ogq *OutputGroupQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] } func (ogq *OutputGroupQuery) loadTargetComplete(ctx context.Context, query *TargetCompleteQuery, nodes []*OutputGroup, init func(*OutputGroup), assign func(*OutputGroup, *TargetComplete)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*OutputGroup) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*OutputGroup) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].target_complete_output_group == nil { + continue } + fk := *nodes[i].target_complete_output_group + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.TargetComplete(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(outputgroup.TargetCompleteColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(targetcomplete.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.target_complete_output_group - if fk == nil { - return fmt.Errorf(`foreign-key "target_complete_output_group" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "target_complete_output_group" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "target_complete_output_group" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } @@ -586,34 +578,30 @@ func (ogq *OutputGroupQuery) loadInlineFiles(ctx context.Context, query *TestFil return nil } func (ogq *OutputGroupQuery) loadFileSets(ctx context.Context, query *NamedSetOfFilesQuery, nodes []*OutputGroup, init func(*OutputGroup), assign func(*OutputGroup, *NamedSetOfFiles)) error { - ids := make([]int, 0, len(nodes)) - nodeids := make(map[int][]*OutputGroup) + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*OutputGroup) for i := range nodes { - if nodes[i].output_group_file_sets == nil { - continue - } - fk := *nodes[i].output_group_file_sets - if _, ok := nodeids[fk]; !ok { - ids = append(ids, fk) - } - nodeids[fk] = append(nodeids[fk], nodes[i]) - } - if len(ids) == 0 { - return nil + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - query.Where(namedsetoffiles.IDIn(ids...)) + query.withFKs = true + query.Where(predicate.NamedSetOfFiles(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(outputgroup.FileSetsColumn), fks...)) + })) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nodeids[n.ID] - if !ok { - return fmt.Errorf(`unexpected foreign-key "output_group_file_sets" returned %v`, n.ID) + fk := n.output_group_file_sets + if fk == nil { + return fmt.Errorf(`foreign-key "output_group_file_sets" is nil for node %v`, n.ID) } - for i := range nodes { - assign(nodes[i], n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "output_group_file_sets" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -702,20 +690,6 @@ func (ogq *OutputGroupQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTargetComplete tells the query-builder to eager-load the nodes that are connected to the "target_complete" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (ogq *OutputGroupQuery) WithNamedTargetComplete(name string, opts ...func(*TargetCompleteQuery)) *OutputGroupQuery { - query := (&TargetCompleteClient{config: ogq.config}).Query() - for _, opt := range opts { - opt(query) - } - if ogq.withNamedTargetComplete == nil { - ogq.withNamedTargetComplete = make(map[string]*TargetCompleteQuery) - } - ogq.withNamedTargetComplete[name] = query - return ogq -} - // WithNamedInlineFiles tells the query-builder to eager-load the nodes that are connected to the "inline_files" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (ogq *OutputGroupQuery) WithNamedInlineFiles(name string, opts ...func(*TestFileQuery)) *OutputGroupQuery { diff --git a/ent/gen/ent/outputgroup_update.go b/ent/gen/ent/outputgroup_update.go index 635cb24..41fb7f9 100644 --- a/ent/gen/ent/outputgroup_update.go +++ b/ent/gen/ent/outputgroup_update.go @@ -70,19 +70,23 @@ func (ogu *OutputGroupUpdate) ClearIncomplete() *OutputGroupUpdate { return ogu } -// AddTargetCompleteIDs adds the "target_complete" edge to the TargetComplete entity by IDs. -func (ogu *OutputGroupUpdate) AddTargetCompleteIDs(ids ...int) *OutputGroupUpdate { - ogu.mutation.AddTargetCompleteIDs(ids...) +// SetTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID. +func (ogu *OutputGroupUpdate) SetTargetCompleteID(id int) *OutputGroupUpdate { + ogu.mutation.SetTargetCompleteID(id) return ogu } -// AddTargetComplete adds the "target_complete" edges to the TargetComplete entity. -func (ogu *OutputGroupUpdate) AddTargetComplete(t ...*TargetComplete) *OutputGroupUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID if the given value is not nil. +func (ogu *OutputGroupUpdate) SetNillableTargetCompleteID(id *int) *OutputGroupUpdate { + if id != nil { + ogu = ogu.SetTargetCompleteID(*id) } - return ogu.AddTargetCompleteIDs(ids...) + return ogu +} + +// SetTargetComplete sets the "target_complete" edge to the TargetComplete entity. +func (ogu *OutputGroupUpdate) SetTargetComplete(t *TargetComplete) *OutputGroupUpdate { + return ogu.SetTargetCompleteID(t.ID) } // AddInlineFileIDs adds the "inline_files" edge to the TestFile entity by IDs. @@ -124,27 +128,12 @@ func (ogu *OutputGroupUpdate) Mutation() *OutputGroupMutation { return ogu.mutation } -// ClearTargetComplete clears all "target_complete" edges to the TargetComplete entity. +// ClearTargetComplete clears the "target_complete" edge to the TargetComplete entity. func (ogu *OutputGroupUpdate) ClearTargetComplete() *OutputGroupUpdate { ogu.mutation.ClearTargetComplete() return ogu } -// RemoveTargetCompleteIDs removes the "target_complete" edge to TargetComplete entities by IDs. -func (ogu *OutputGroupUpdate) RemoveTargetCompleteIDs(ids ...int) *OutputGroupUpdate { - ogu.mutation.RemoveTargetCompleteIDs(ids...) - return ogu -} - -// RemoveTargetComplete removes "target_complete" edges to TargetComplete entities. -func (ogu *OutputGroupUpdate) RemoveTargetComplete(t ...*TargetComplete) *OutputGroupUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return ogu.RemoveTargetCompleteIDs(ids...) -} - // ClearInlineFiles clears all "inline_files" edges to the TestFile entity. func (ogu *OutputGroupUpdate) ClearInlineFiles() *OutputGroupUpdate { ogu.mutation.ClearInlineFiles() @@ -222,7 +211,7 @@ func (ogu *OutputGroupUpdate) sqlSave(ctx context.Context) (n int, err error) { } if ogu.mutation.TargetCompleteCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: outputgroup.TargetCompleteTable, Columns: []string{outputgroup.TargetCompleteColumn}, @@ -233,25 +222,9 @@ func (ogu *OutputGroupUpdate) sqlSave(ctx context.Context) (n int, err error) { } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := ogu.mutation.RemovedTargetCompleteIDs(); len(nodes) > 0 && !ogu.mutation.TargetCompleteCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: outputgroup.TargetCompleteTable, - Columns: []string{outputgroup.TargetCompleteColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetcomplete.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := ogu.mutation.TargetCompleteIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: outputgroup.TargetCompleteTable, Columns: []string{outputgroup.TargetCompleteColumn}, @@ -312,7 +285,7 @@ func (ogu *OutputGroupUpdate) sqlSave(ctx context.Context) (n int, err error) { } if ogu.mutation.FileSetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: outputgroup.FileSetsTable, Columns: []string{outputgroup.FileSetsColumn}, @@ -325,7 +298,7 @@ func (ogu *OutputGroupUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := ogu.mutation.FileSetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: outputgroup.FileSetsTable, Columns: []string{outputgroup.FileSetsColumn}, @@ -399,19 +372,23 @@ func (oguo *OutputGroupUpdateOne) ClearIncomplete() *OutputGroupUpdateOne { return oguo } -// AddTargetCompleteIDs adds the "target_complete" edge to the TargetComplete entity by IDs. -func (oguo *OutputGroupUpdateOne) AddTargetCompleteIDs(ids ...int) *OutputGroupUpdateOne { - oguo.mutation.AddTargetCompleteIDs(ids...) +// SetTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID. +func (oguo *OutputGroupUpdateOne) SetTargetCompleteID(id int) *OutputGroupUpdateOne { + oguo.mutation.SetTargetCompleteID(id) return oguo } -// AddTargetComplete adds the "target_complete" edges to the TargetComplete entity. -func (oguo *OutputGroupUpdateOne) AddTargetComplete(t ...*TargetComplete) *OutputGroupUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetCompleteID sets the "target_complete" edge to the TargetComplete entity by ID if the given value is not nil. +func (oguo *OutputGroupUpdateOne) SetNillableTargetCompleteID(id *int) *OutputGroupUpdateOne { + if id != nil { + oguo = oguo.SetTargetCompleteID(*id) } - return oguo.AddTargetCompleteIDs(ids...) + return oguo +} + +// SetTargetComplete sets the "target_complete" edge to the TargetComplete entity. +func (oguo *OutputGroupUpdateOne) SetTargetComplete(t *TargetComplete) *OutputGroupUpdateOne { + return oguo.SetTargetCompleteID(t.ID) } // AddInlineFileIDs adds the "inline_files" edge to the TestFile entity by IDs. @@ -453,27 +430,12 @@ func (oguo *OutputGroupUpdateOne) Mutation() *OutputGroupMutation { return oguo.mutation } -// ClearTargetComplete clears all "target_complete" edges to the TargetComplete entity. +// ClearTargetComplete clears the "target_complete" edge to the TargetComplete entity. func (oguo *OutputGroupUpdateOne) ClearTargetComplete() *OutputGroupUpdateOne { oguo.mutation.ClearTargetComplete() return oguo } -// RemoveTargetCompleteIDs removes the "target_complete" edge to TargetComplete entities by IDs. -func (oguo *OutputGroupUpdateOne) RemoveTargetCompleteIDs(ids ...int) *OutputGroupUpdateOne { - oguo.mutation.RemoveTargetCompleteIDs(ids...) - return oguo -} - -// RemoveTargetComplete removes "target_complete" edges to TargetComplete entities. -func (oguo *OutputGroupUpdateOne) RemoveTargetComplete(t ...*TargetComplete) *OutputGroupUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return oguo.RemoveTargetCompleteIDs(ids...) -} - // ClearInlineFiles clears all "inline_files" edges to the TestFile entity. func (oguo *OutputGroupUpdateOne) ClearInlineFiles() *OutputGroupUpdateOne { oguo.mutation.ClearInlineFiles() @@ -581,7 +543,7 @@ func (oguo *OutputGroupUpdateOne) sqlSave(ctx context.Context) (_node *OutputGro } if oguo.mutation.TargetCompleteCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: outputgroup.TargetCompleteTable, Columns: []string{outputgroup.TargetCompleteColumn}, @@ -592,25 +554,9 @@ func (oguo *OutputGroupUpdateOne) sqlSave(ctx context.Context) (_node *OutputGro } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := oguo.mutation.RemovedTargetCompleteIDs(); len(nodes) > 0 && !oguo.mutation.TargetCompleteCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: outputgroup.TargetCompleteTable, - Columns: []string{outputgroup.TargetCompleteColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetcomplete.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := oguo.mutation.TargetCompleteIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: outputgroup.TargetCompleteTable, Columns: []string{outputgroup.TargetCompleteColumn}, @@ -671,7 +617,7 @@ func (oguo *OutputGroupUpdateOne) sqlSave(ctx context.Context) (_node *OutputGro } if oguo.mutation.FileSetsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: outputgroup.FileSetsTable, Columns: []string{outputgroup.FileSetsColumn}, @@ -684,7 +630,7 @@ func (oguo *OutputGroupUpdateOne) sqlSave(ctx context.Context) (_node *OutputGro } if nodes := oguo.mutation.FileSetsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: outputgroup.FileSetsTable, Columns: []string{outputgroup.FileSetsColumn}, diff --git a/ent/gen/ent/packageloadmetrics.go b/ent/gen/ent/packageloadmetrics.go index c20b77c..9386f5d 100644 --- a/ent/gen/ent/packageloadmetrics.go +++ b/ent/gen/ent/packageloadmetrics.go @@ -9,6 +9,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/packageloadmetrics" + "github.com/buildbarn/bb-portal/ent/gen/ent/packagemetrics" ) // PackageLoadMetrics is the model entity for the PackageLoadMetrics schema. @@ -30,28 +31,29 @@ type PackageLoadMetrics struct { PackageOverhead uint64 `json:"package_overhead,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the PackageLoadMetricsQuery when eager-loading is set. - Edges PackageLoadMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges PackageLoadMetricsEdges `json:"edges"` + package_metrics_package_load_metrics *int + selectValues sql.SelectValues } // PackageLoadMetricsEdges holds the relations/edges for other nodes in the graph. type PackageLoadMetricsEdges struct { // PackageMetrics holds the value of the package_metrics edge. - PackageMetrics []*PackageMetrics `json:"package_metrics,omitempty"` + PackageMetrics *PackageMetrics `json:"package_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedPackageMetrics map[string][]*PackageMetrics } // PackageMetricsOrErr returns the PackageMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e PackageLoadMetricsEdges) PackageMetricsOrErr() ([]*PackageMetrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e PackageLoadMetricsEdges) PackageMetricsOrErr() (*PackageMetrics, error) { + if e.PackageMetrics != nil { return e.PackageMetrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: packagemetrics.Label} } return nil, &NotLoadedError{edge: "package_metrics"} } @@ -65,6 +67,8 @@ func (*PackageLoadMetrics) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case packageloadmetrics.FieldName: values[i] = new(sql.NullString) + case packageloadmetrics.ForeignKeys[0]: // package_metrics_package_load_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -122,6 +126,13 @@ func (plm *PackageLoadMetrics) assignValues(columns []string, values []any) erro } else if value.Valid { plm.PackageOverhead = uint64(value.Int64) } + case packageloadmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field package_metrics_package_load_metrics", value) + } else if value.Valid { + plm.package_metrics_package_load_metrics = new(int) + *plm.package_metrics_package_load_metrics = int(value.Int64) + } default: plm.selectValues.Set(columns[i], values[i]) } @@ -184,29 +195,5 @@ func (plm *PackageLoadMetrics) String() string { return builder.String() } -// NamedPackageMetrics returns the PackageMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (plm *PackageLoadMetrics) NamedPackageMetrics(name string) ([]*PackageMetrics, error) { - if plm.Edges.namedPackageMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := plm.Edges.namedPackageMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (plm *PackageLoadMetrics) appendNamedPackageMetrics(name string, edges ...*PackageMetrics) { - if plm.Edges.namedPackageMetrics == nil { - plm.Edges.namedPackageMetrics = make(map[string][]*PackageMetrics) - } - if len(edges) == 0 { - plm.Edges.namedPackageMetrics[name] = []*PackageMetrics{} - } else { - plm.Edges.namedPackageMetrics[name] = append(plm.Edges.namedPackageMetrics[name], edges...) - } -} - // PackageLoadMetricsSlice is a parsable slice of PackageLoadMetrics. type PackageLoadMetricsSlice []*PackageLoadMetrics diff --git a/ent/gen/ent/packageloadmetrics/packageloadmetrics.go b/ent/gen/ent/packageloadmetrics/packageloadmetrics.go index e1daa5c..0efc8a5 100644 --- a/ent/gen/ent/packageloadmetrics/packageloadmetrics.go +++ b/ent/gen/ent/packageloadmetrics/packageloadmetrics.go @@ -28,11 +28,13 @@ const ( EdgePackageMetrics = "package_metrics" // Table holds the table name of the packageloadmetrics in the database. Table = "package_load_metrics" - // PackageMetricsTable is the table that holds the package_metrics relation/edge. The primary key declared below. - PackageMetricsTable = "package_metrics_package_load_metrics" + // PackageMetricsTable is the table that holds the package_metrics relation/edge. + PackageMetricsTable = "package_load_metrics" // PackageMetricsInverseTable is the table name for the PackageMetrics entity. // It exists in this package in order to avoid circular dependency with the "packagemetrics" package. PackageMetricsInverseTable = "package_metrics" + // PackageMetricsColumn is the table column denoting the package_metrics relation/edge. + PackageMetricsColumn = "package_metrics_package_load_metrics" ) // Columns holds all SQL columns for packageloadmetrics fields. @@ -46,11 +48,11 @@ var Columns = []string{ FieldPackageOverhead, } -var ( - // PackageMetricsPrimaryKey and PackageMetricsColumn2 are the table columns denoting the - // primary key for the package_metrics relation (M2M). - PackageMetricsPrimaryKey = []string{"package_metrics_id", "package_load_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "package_load_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "package_metrics_package_load_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -59,6 +61,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -100,23 +107,16 @@ func ByPackageOverhead(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldPackageOverhead, opts...).ToFunc() } -// ByPackageMetricsCount orders the results by package_metrics count. -func ByPackageMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newPackageMetricsStep(), opts...) - } -} - -// ByPackageMetrics orders the results by package_metrics terms. -func ByPackageMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByPackageMetricsField orders the results by package_metrics field. +func ByPackageMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newPackageMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newPackageMetricsStep(), sql.OrderByField(field, opts...)) } } func newPackageMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(PackageMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, PackageMetricsTable, PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, PackageMetricsTable, PackageMetricsColumn), ) } diff --git a/ent/gen/ent/packageloadmetrics/where.go b/ent/gen/ent/packageloadmetrics/where.go index 749987a..97ea694 100644 --- a/ent/gen/ent/packageloadmetrics/where.go +++ b/ent/gen/ent/packageloadmetrics/where.go @@ -413,7 +413,7 @@ func HasPackageMetrics() predicate.PackageLoadMetrics { return predicate.PackageLoadMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, PackageMetricsTable, PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, PackageMetricsTable, PackageMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/packageloadmetrics_create.go b/ent/gen/ent/packageloadmetrics_create.go index 49cef6d..f823e56 100644 --- a/ent/gen/ent/packageloadmetrics_create.go +++ b/ent/gen/ent/packageloadmetrics_create.go @@ -103,19 +103,23 @@ func (plmc *PackageLoadMetricsCreate) SetNillablePackageOverhead(u *uint64) *Pac return plmc } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (plmc *PackageLoadMetricsCreate) AddPackageMetricIDs(ids ...int) *PackageLoadMetricsCreate { - plmc.mutation.AddPackageMetricIDs(ids...) +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (plmc *PackageLoadMetricsCreate) SetPackageMetricsID(id int) *PackageLoadMetricsCreate { + plmc.mutation.SetPackageMetricsID(id) return plmc } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (plmc *PackageLoadMetricsCreate) AddPackageMetrics(p ...*PackageMetrics) *PackageLoadMetricsCreate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (plmc *PackageLoadMetricsCreate) SetNillablePackageMetricsID(id *int) *PackageLoadMetricsCreate { + if id != nil { + plmc = plmc.SetPackageMetricsID(*id) } - return plmc.AddPackageMetricIDs(ids...) + return plmc +} + +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (plmc *PackageLoadMetricsCreate) SetPackageMetrics(p *PackageMetrics) *PackageLoadMetricsCreate { + return plmc.SetPackageMetricsID(p.ID) } // Mutation returns the PackageLoadMetricsMutation object of the builder. @@ -204,10 +208,10 @@ func (plmc *PackageLoadMetricsCreate) createSpec() (*PackageLoadMetrics, *sqlgra } if nodes := plmc.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, + Columns: []string{packageloadmetrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -216,6 +220,7 @@ func (plmc *PackageLoadMetricsCreate) createSpec() (*PackageLoadMetrics, *sqlgra for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.package_metrics_package_load_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/packageloadmetrics_query.go b/ent/gen/ent/packageloadmetrics_query.go index abfc76e..50c18a3 100644 --- a/ent/gen/ent/packageloadmetrics_query.go +++ b/ent/gen/ent/packageloadmetrics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // PackageLoadMetricsQuery is the builder for querying PackageLoadMetrics entities. type PackageLoadMetricsQuery struct { config - ctx *QueryContext - order []packageloadmetrics.OrderOption - inters []Interceptor - predicates []predicate.PackageLoadMetrics - withPackageMetrics *PackageMetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*PackageLoadMetrics) error - withNamedPackageMetrics map[string]*PackageMetricsQuery + ctx *QueryContext + order []packageloadmetrics.OrderOption + inters []Interceptor + predicates []predicate.PackageLoadMetrics + withPackageMetrics *PackageMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*PackageLoadMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (plmq *PackageLoadMetricsQuery) QueryPackageMetrics() *PackageMetricsQuery step := sqlgraph.NewStep( sqlgraph.From(packageloadmetrics.Table, packageloadmetrics.FieldID, selector), sqlgraph.To(packagemetrics.Table, packagemetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, packageloadmetrics.PackageMetricsTable, packageloadmetrics.PackageMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, packageloadmetrics.PackageMetricsTable, packageloadmetrics.PackageMetricsColumn), ) fromU = sqlgraph.SetNeighbors(plmq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (plmq *PackageLoadMetricsQuery) prepareQuery(ctx context.Context) error { func (plmq *PackageLoadMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*PackageLoadMetrics, error) { var ( nodes = []*PackageLoadMetrics{} + withFKs = plmq.withFKs _spec = plmq.querySpec() loadedTypes = [1]bool{ plmq.withPackageMetrics != nil, } ) + if plmq.withPackageMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, packageloadmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*PackageLoadMetrics).scanValues(nil, columns) } @@ -399,18 +405,8 @@ func (plmq *PackageLoadMetricsQuery) sqlAll(ctx context.Context, hooks ...queryH return nodes, nil } if query := plmq.withPackageMetrics; query != nil { - if err := plmq.loadPackageMetrics(ctx, query, nodes, - func(n *PackageLoadMetrics) { n.Edges.PackageMetrics = []*PackageMetrics{} }, - func(n *PackageLoadMetrics, e *PackageMetrics) { - n.Edges.PackageMetrics = append(n.Edges.PackageMetrics, e) - }); err != nil { - return nil, err - } - } - for name, query := range plmq.withNamedPackageMetrics { - if err := plmq.loadPackageMetrics(ctx, query, nodes, - func(n *PackageLoadMetrics) { n.appendNamedPackageMetrics(name) }, - func(n *PackageLoadMetrics, e *PackageMetrics) { n.appendNamedPackageMetrics(name, e) }); err != nil { + if err := plmq.loadPackageMetrics(ctx, query, nodes, nil, + func(n *PackageLoadMetrics, e *PackageMetrics) { n.Edges.PackageMetrics = e }); err != nil { return nil, err } } @@ -423,62 +419,33 @@ func (plmq *PackageLoadMetricsQuery) sqlAll(ctx context.Context, hooks ...queryH } func (plmq *PackageLoadMetricsQuery) loadPackageMetrics(ctx context.Context, query *PackageMetricsQuery, nodes []*PackageLoadMetrics, init func(*PackageLoadMetrics), assign func(*PackageLoadMetrics, *PackageMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*PackageLoadMetrics) - nids := make(map[int]map[*PackageLoadMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*PackageLoadMetrics) + for i := range nodes { + if nodes[i].package_metrics_package_load_metrics == nil { + continue } + fk := *nodes[i].package_metrics_package_load_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(packageloadmetrics.PackageMetricsTable) - s.Join(joinT).On(s.C(packagemetrics.FieldID), joinT.C(packageloadmetrics.PackageMetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(packageloadmetrics.PackageMetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(packageloadmetrics.PackageMetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*PackageLoadMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*PackageMetrics](ctx, query, qr, query.inters) + query.Where(packagemetrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "package_metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "package_metrics_package_load_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -568,20 +535,6 @@ func (plmq *PackageLoadMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector return selector } -// WithNamedPackageMetrics tells the query-builder to eager-load the nodes that are connected to the "package_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (plmq *PackageLoadMetricsQuery) WithNamedPackageMetrics(name string, opts ...func(*PackageMetricsQuery)) *PackageLoadMetricsQuery { - query := (&PackageMetricsClient{config: plmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if plmq.withNamedPackageMetrics == nil { - plmq.withNamedPackageMetrics = make(map[string]*PackageMetricsQuery) - } - plmq.withNamedPackageMetrics[name] = query - return plmq -} - // PackageLoadMetricsGroupBy is the group-by builder for PackageLoadMetrics entities. type PackageLoadMetricsGroupBy struct { selector diff --git a/ent/gen/ent/packageloadmetrics_update.go b/ent/gen/ent/packageloadmetrics_update.go index c507e43..5fad20c 100644 --- a/ent/gen/ent/packageloadmetrics_update.go +++ b/ent/gen/ent/packageloadmetrics_update.go @@ -183,19 +183,23 @@ func (plmu *PackageLoadMetricsUpdate) ClearPackageOverhead() *PackageLoadMetrics return plmu } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (plmu *PackageLoadMetricsUpdate) AddPackageMetricIDs(ids ...int) *PackageLoadMetricsUpdate { - plmu.mutation.AddPackageMetricIDs(ids...) +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (plmu *PackageLoadMetricsUpdate) SetPackageMetricsID(id int) *PackageLoadMetricsUpdate { + plmu.mutation.SetPackageMetricsID(id) return plmu } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (plmu *PackageLoadMetricsUpdate) AddPackageMetrics(p ...*PackageMetrics) *PackageLoadMetricsUpdate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (plmu *PackageLoadMetricsUpdate) SetNillablePackageMetricsID(id *int) *PackageLoadMetricsUpdate { + if id != nil { + plmu = plmu.SetPackageMetricsID(*id) } - return plmu.AddPackageMetricIDs(ids...) + return plmu +} + +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (plmu *PackageLoadMetricsUpdate) SetPackageMetrics(p *PackageMetrics) *PackageLoadMetricsUpdate { + return plmu.SetPackageMetricsID(p.ID) } // Mutation returns the PackageLoadMetricsMutation object of the builder. @@ -203,27 +207,12 @@ func (plmu *PackageLoadMetricsUpdate) Mutation() *PackageLoadMetricsMutation { return plmu.mutation } -// ClearPackageMetrics clears all "package_metrics" edges to the PackageMetrics entity. +// ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. func (plmu *PackageLoadMetricsUpdate) ClearPackageMetrics() *PackageLoadMetricsUpdate { plmu.mutation.ClearPackageMetrics() return plmu } -// RemovePackageMetricIDs removes the "package_metrics" edge to PackageMetrics entities by IDs. -func (plmu *PackageLoadMetricsUpdate) RemovePackageMetricIDs(ids ...int) *PackageLoadMetricsUpdate { - plmu.mutation.RemovePackageMetricIDs(ids...) - return plmu -} - -// RemovePackageMetrics removes "package_metrics" edges to PackageMetrics entities. -func (plmu *PackageLoadMetricsUpdate) RemovePackageMetrics(p ...*PackageMetrics) *PackageLoadMetricsUpdate { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID - } - return plmu.RemovePackageMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (plmu *PackageLoadMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, plmu.sqlSave, plmu.mutation, plmu.hooks) @@ -313,39 +302,23 @@ func (plmu *PackageLoadMetricsUpdate) sqlSave(ctx context.Context) (n int, err e } if plmu.mutation.PackageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := plmu.mutation.RemovedPackageMetricsIDs(); len(nodes) > 0 && !plmu.mutation.PackageMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, + Columns: []string{packageloadmetrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := plmu.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, + Columns: []string{packageloadmetrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), @@ -531,19 +504,23 @@ func (plmuo *PackageLoadMetricsUpdateOne) ClearPackageOverhead() *PackageLoadMet return plmuo } -// AddPackageMetricIDs adds the "package_metrics" edge to the PackageMetrics entity by IDs. -func (plmuo *PackageLoadMetricsUpdateOne) AddPackageMetricIDs(ids ...int) *PackageLoadMetricsUpdateOne { - plmuo.mutation.AddPackageMetricIDs(ids...) +// SetPackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID. +func (plmuo *PackageLoadMetricsUpdateOne) SetPackageMetricsID(id int) *PackageLoadMetricsUpdateOne { + plmuo.mutation.SetPackageMetricsID(id) return plmuo } -// AddPackageMetrics adds the "package_metrics" edges to the PackageMetrics entity. -func (plmuo *PackageLoadMetricsUpdateOne) AddPackageMetrics(p ...*PackageMetrics) *PackageLoadMetricsUpdateOne { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID +// SetNillablePackageMetricsID sets the "package_metrics" edge to the PackageMetrics entity by ID if the given value is not nil. +func (plmuo *PackageLoadMetricsUpdateOne) SetNillablePackageMetricsID(id *int) *PackageLoadMetricsUpdateOne { + if id != nil { + plmuo = plmuo.SetPackageMetricsID(*id) } - return plmuo.AddPackageMetricIDs(ids...) + return plmuo +} + +// SetPackageMetrics sets the "package_metrics" edge to the PackageMetrics entity. +func (plmuo *PackageLoadMetricsUpdateOne) SetPackageMetrics(p *PackageMetrics) *PackageLoadMetricsUpdateOne { + return plmuo.SetPackageMetricsID(p.ID) } // Mutation returns the PackageLoadMetricsMutation object of the builder. @@ -551,27 +528,12 @@ func (plmuo *PackageLoadMetricsUpdateOne) Mutation() *PackageLoadMetricsMutation return plmuo.mutation } -// ClearPackageMetrics clears all "package_metrics" edges to the PackageMetrics entity. +// ClearPackageMetrics clears the "package_metrics" edge to the PackageMetrics entity. func (plmuo *PackageLoadMetricsUpdateOne) ClearPackageMetrics() *PackageLoadMetricsUpdateOne { plmuo.mutation.ClearPackageMetrics() return plmuo } -// RemovePackageMetricIDs removes the "package_metrics" edge to PackageMetrics entities by IDs. -func (plmuo *PackageLoadMetricsUpdateOne) RemovePackageMetricIDs(ids ...int) *PackageLoadMetricsUpdateOne { - plmuo.mutation.RemovePackageMetricIDs(ids...) - return plmuo -} - -// RemovePackageMetrics removes "package_metrics" edges to PackageMetrics entities. -func (plmuo *PackageLoadMetricsUpdateOne) RemovePackageMetrics(p ...*PackageMetrics) *PackageLoadMetricsUpdateOne { - ids := make([]int, len(p)) - for i := range p { - ids[i] = p[i].ID - } - return plmuo.RemovePackageMetricIDs(ids...) -} - // Where appends a list predicates to the PackageLoadMetricsUpdate builder. func (plmuo *PackageLoadMetricsUpdateOne) Where(ps ...predicate.PackageLoadMetrics) *PackageLoadMetricsUpdateOne { plmuo.mutation.Where(ps...) @@ -691,39 +653,23 @@ func (plmuo *PackageLoadMetricsUpdateOne) sqlSave(ctx context.Context) (_node *P } if plmuo.mutation.PackageMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := plmuo.mutation.RemovedPackageMetricsIDs(); len(nodes) > 0 && !plmuo.mutation.PackageMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, + Columns: []string{packageloadmetrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := plmuo.mutation.PackageMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: packageloadmetrics.PackageMetricsTable, - Columns: packageloadmetrics.PackageMetricsPrimaryKey, + Columns: []string{packageloadmetrics.PackageMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packagemetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/packagemetrics.go b/ent/gen/ent/packagemetrics.go index bc6dd4a..9fe194a 100644 --- a/ent/gen/ent/packagemetrics.go +++ b/ent/gen/ent/packagemetrics.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" "github.com/buildbarn/bb-portal/ent/gen/ent/packagemetrics" ) @@ -20,14 +21,15 @@ type PackageMetrics struct { PackagesLoaded int64 `json:"packages_loaded,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the PackageMetricsQuery when eager-loading is set. - Edges PackageMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges PackageMetricsEdges `json:"edges"` + metrics_package_metrics *int + selectValues sql.SelectValues } // PackageMetricsEdges holds the relations/edges for other nodes in the graph. type PackageMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // PackageLoadMetrics holds the value of the package_load_metrics edge. PackageLoadMetrics []*PackageLoadMetrics `json:"package_load_metrics,omitempty"` // loadedTypes holds the information for reporting if a @@ -36,15 +38,16 @@ type PackageMetricsEdges struct { // totalCount holds the count of the edges above. totalCount [2]map[string]int - namedMetrics map[string][]*Metrics namedPackageLoadMetrics map[string][]*PackageLoadMetrics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e PackageMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e PackageMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -65,6 +68,8 @@ func (*PackageMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case packagemetrics.FieldID, packagemetrics.FieldPackagesLoaded: values[i] = new(sql.NullInt64) + case packagemetrics.ForeignKeys[0]: // metrics_package_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -92,6 +97,13 @@ func (pm *PackageMetrics) assignValues(columns []string, values []any) error { } else if value.Valid { pm.PackagesLoaded = value.Int64 } + case packagemetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_package_metrics", value) + } else if value.Valid { + pm.metrics_package_metrics = new(int) + *pm.metrics_package_metrics = int(value.Int64) + } default: pm.selectValues.Set(columns[i], values[i]) } @@ -144,30 +156,6 @@ func (pm *PackageMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (pm *PackageMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if pm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := pm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (pm *PackageMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if pm.Edges.namedMetrics == nil { - pm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - pm.Edges.namedMetrics[name] = []*Metrics{} - } else { - pm.Edges.namedMetrics[name] = append(pm.Edges.namedMetrics[name], edges...) - } -} - // NamedPackageLoadMetrics returns the PackageLoadMetrics named value or an error if the edge was not // loaded in eager-loading with this name. func (pm *PackageMetrics) NamedPackageLoadMetrics(name string) ([]*PackageLoadMetrics, error) { diff --git a/ent/gen/ent/packagemetrics/packagemetrics.go b/ent/gen/ent/packagemetrics/packagemetrics.go index 9fc6f43..fab4a6f 100644 --- a/ent/gen/ent/packagemetrics/packagemetrics.go +++ b/ent/gen/ent/packagemetrics/packagemetrics.go @@ -20,16 +20,20 @@ const ( EdgePackageLoadMetrics = "package_load_metrics" // Table holds the table name of the packagemetrics in the database. Table = "package_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_package_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "package_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" - // PackageLoadMetricsTable is the table that holds the package_load_metrics relation/edge. The primary key declared below. - PackageLoadMetricsTable = "package_metrics_package_load_metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_package_metrics" + // PackageLoadMetricsTable is the table that holds the package_load_metrics relation/edge. + PackageLoadMetricsTable = "package_load_metrics" // PackageLoadMetricsInverseTable is the table name for the PackageLoadMetrics entity. // It exists in this package in order to avoid circular dependency with the "packageloadmetrics" package. PackageLoadMetricsInverseTable = "package_load_metrics" + // PackageLoadMetricsColumn is the table column denoting the package_load_metrics relation/edge. + PackageLoadMetricsColumn = "package_metrics_package_load_metrics" ) // Columns holds all SQL columns for packagemetrics fields. @@ -38,14 +42,11 @@ var Columns = []string{ FieldPackagesLoaded, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "package_metrics_id"} - // PackageLoadMetricsPrimaryKey and PackageLoadMetricsColumn2 are the table columns denoting the - // primary key for the package_load_metrics relation (M2M). - PackageLoadMetricsPrimaryKey = []string{"package_metrics_id", "package_load_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "package_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_package_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -54,6 +55,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -70,17 +76,10 @@ func ByPackagesLoaded(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldPackagesLoaded, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } @@ -101,13 +100,13 @@ func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } func newPackageLoadMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(PackageLoadMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, PackageLoadMetricsTable, PackageLoadMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, PackageLoadMetricsTable, PackageLoadMetricsColumn), ) } diff --git a/ent/gen/ent/packagemetrics/where.go b/ent/gen/ent/packagemetrics/where.go index 4e3af0c..be61ecc 100644 --- a/ent/gen/ent/packagemetrics/where.go +++ b/ent/gen/ent/packagemetrics/where.go @@ -113,7 +113,7 @@ func HasMetrics() predicate.PackageMetrics { return predicate.PackageMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -136,7 +136,7 @@ func HasPackageLoadMetrics() predicate.PackageMetrics { return predicate.PackageMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, PackageLoadMetricsTable, PackageLoadMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, PackageLoadMetricsTable, PackageLoadMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/packagemetrics_create.go b/ent/gen/ent/packagemetrics_create.go index 1e81678..7198396 100644 --- a/ent/gen/ent/packagemetrics_create.go +++ b/ent/gen/ent/packagemetrics_create.go @@ -34,19 +34,23 @@ func (pmc *PackageMetricsCreate) SetNillablePackagesLoaded(i *int64) *PackageMet return pmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (pmc *PackageMetricsCreate) AddMetricIDs(ids ...int) *PackageMetricsCreate { - pmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (pmc *PackageMetricsCreate) SetMetricsID(id int) *PackageMetricsCreate { + pmc.mutation.SetMetricsID(id) return pmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (pmc *PackageMetricsCreate) AddMetrics(m ...*Metrics) *PackageMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (pmc *PackageMetricsCreate) SetNillableMetricsID(id *int) *PackageMetricsCreate { + if id != nil { + pmc = pmc.SetMetricsID(*id) } - return pmc.AddMetricIDs(ids...) + return pmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (pmc *PackageMetricsCreate) SetMetrics(m *Metrics) *PackageMetricsCreate { + return pmc.SetMetricsID(m.ID) } // AddPackageLoadMetricIDs adds the "package_load_metrics" edge to the PackageLoadMetrics entity by IDs. @@ -130,10 +134,10 @@ func (pmc *PackageMetricsCreate) createSpec() (*PackageMetrics, *sqlgraph.Create } if nodes := pmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, + Columns: []string{packagemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -142,14 +146,15 @@ func (pmc *PackageMetricsCreate) createSpec() (*PackageMetrics, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_package_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := pmc.mutation.PackageLoadMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/packagemetrics_query.go b/ent/gen/ent/packagemetrics_query.go index 722de85..a37e53d 100644 --- a/ent/gen/ent/packagemetrics_query.go +++ b/ent/gen/ent/packagemetrics_query.go @@ -26,9 +26,9 @@ type PackageMetricsQuery struct { predicates []predicate.PackageMetrics withMetrics *MetricsQuery withPackageLoadMetrics *PackageLoadMetricsQuery + withFKs bool modifiers []func(*sql.Selector) loadTotal []func(context.Context, []*PackageMetrics) error - withNamedMetrics map[string]*MetricsQuery withNamedPackageLoadMetrics map[string]*PackageLoadMetricsQuery // intermediate query (i.e. traversal path). sql *sql.Selector @@ -80,7 +80,7 @@ func (pmq *PackageMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(packagemetrics.Table, packagemetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, packagemetrics.MetricsTable, packagemetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, packagemetrics.MetricsTable, packagemetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(pmq.driver.Dialect(), step) return fromU, nil @@ -102,7 +102,7 @@ func (pmq *PackageMetricsQuery) QueryPackageLoadMetrics() *PackageLoadMetricsQue step := sqlgraph.NewStep( sqlgraph.From(packagemetrics.Table, packagemetrics.FieldID, selector), sqlgraph.To(packageloadmetrics.Table, packageloadmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, packagemetrics.PackageLoadMetricsTable, packagemetrics.PackageLoadMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, packagemetrics.PackageLoadMetricsTable, packagemetrics.PackageLoadMetricsColumn), ) fromU = sqlgraph.SetNeighbors(pmq.driver.Dialect(), step) return fromU, nil @@ -409,12 +409,19 @@ func (pmq *PackageMetricsQuery) prepareQuery(ctx context.Context) error { func (pmq *PackageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*PackageMetrics, error) { var ( nodes = []*PackageMetrics{} + withFKs = pmq.withFKs _spec = pmq.querySpec() loadedTypes = [2]bool{ pmq.withMetrics != nil, pmq.withPackageLoadMetrics != nil, } ) + if pmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, packagemetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*PackageMetrics).scanValues(nil, columns) } @@ -437,9 +444,8 @@ func (pmq *PackageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := pmq.withMetrics; query != nil { - if err := pmq.loadMetrics(ctx, query, nodes, - func(n *PackageMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *PackageMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { + if err := pmq.loadMetrics(ctx, query, nodes, nil, + func(n *PackageMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -452,13 +458,6 @@ func (pmq *PackageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nil, err } } - for name, query := range pmq.withNamedMetrics { - if err := pmq.loadMetrics(ctx, query, nodes, - func(n *PackageMetrics) { n.appendNamedMetrics(name) }, - func(n *PackageMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { - return nil, err - } - } for name, query := range pmq.withNamedPackageLoadMetrics { if err := pmq.loadPackageLoadMetrics(ctx, query, nodes, func(n *PackageMetrics) { n.appendNamedPackageLoadMetrics(name) }, @@ -475,124 +474,65 @@ func (pmq *PackageMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (pmq *PackageMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*PackageMetrics, init func(*PackageMetrics), assign func(*PackageMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*PackageMetrics) - nids := make(map[int]map[*PackageMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*PackageMetrics) + for i := range nodes { + if nodes[i].metrics_package_metrics == nil { + continue + } + fk := *nodes[i].metrics_package_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(packagemetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(packagemetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(packagemetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(packagemetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*PackageMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_package_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil } func (pmq *PackageMetricsQuery) loadPackageLoadMetrics(ctx context.Context, query *PackageLoadMetricsQuery, nodes []*PackageMetrics, init func(*PackageMetrics), assign func(*PackageMetrics, *PackageLoadMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*PackageMetrics) - nids := make(map[int]map[*PackageMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*PackageMetrics) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(packagemetrics.PackageLoadMetricsTable) - s.Join(joinT).On(s.C(packageloadmetrics.FieldID), joinT.C(packagemetrics.PackageLoadMetricsPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(packagemetrics.PackageLoadMetricsPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(packagemetrics.PackageLoadMetricsPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*PackageMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*PackageLoadMetrics](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.PackageLoadMetrics(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(packagemetrics.PackageLoadMetricsColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "package_load_metrics" node returned %v`, n.ID) + fk := n.package_metrics_package_load_metrics + if fk == nil { + return fmt.Errorf(`foreign-key "package_metrics_package_load_metrics" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "package_metrics_package_load_metrics" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -681,20 +621,6 @@ func (pmq *PackageMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (pmq *PackageMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *PackageMetricsQuery { - query := (&MetricsClient{config: pmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if pmq.withNamedMetrics == nil { - pmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - pmq.withNamedMetrics[name] = query - return pmq -} - // WithNamedPackageLoadMetrics tells the query-builder to eager-load the nodes that are connected to the "package_load_metrics" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (pmq *PackageMetricsQuery) WithNamedPackageLoadMetrics(name string, opts ...func(*PackageLoadMetricsQuery)) *PackageMetricsQuery { diff --git a/ent/gen/ent/packagemetrics_update.go b/ent/gen/ent/packagemetrics_update.go index 92ea8d9..f38f3f0 100644 --- a/ent/gen/ent/packagemetrics_update.go +++ b/ent/gen/ent/packagemetrics_update.go @@ -56,19 +56,23 @@ func (pmu *PackageMetricsUpdate) ClearPackagesLoaded() *PackageMetricsUpdate { return pmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (pmu *PackageMetricsUpdate) AddMetricIDs(ids ...int) *PackageMetricsUpdate { - pmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (pmu *PackageMetricsUpdate) SetMetricsID(id int) *PackageMetricsUpdate { + pmu.mutation.SetMetricsID(id) return pmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (pmu *PackageMetricsUpdate) AddMetrics(m ...*Metrics) *PackageMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (pmu *PackageMetricsUpdate) SetNillableMetricsID(id *int) *PackageMetricsUpdate { + if id != nil { + pmu = pmu.SetMetricsID(*id) } - return pmu.AddMetricIDs(ids...) + return pmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (pmu *PackageMetricsUpdate) SetMetrics(m *Metrics) *PackageMetricsUpdate { + return pmu.SetMetricsID(m.ID) } // AddPackageLoadMetricIDs adds the "package_load_metrics" edge to the PackageLoadMetrics entity by IDs. @@ -91,27 +95,12 @@ func (pmu *PackageMetricsUpdate) Mutation() *PackageMetricsMutation { return pmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (pmu *PackageMetricsUpdate) ClearMetrics() *PackageMetricsUpdate { pmu.mutation.ClearMetrics() return pmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (pmu *PackageMetricsUpdate) RemoveMetricIDs(ids ...int) *PackageMetricsUpdate { - pmu.mutation.RemoveMetricIDs(ids...) - return pmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (pmu *PackageMetricsUpdate) RemoveMetrics(m ...*Metrics) *PackageMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return pmu.RemoveMetricIDs(ids...) -} - // ClearPackageLoadMetrics clears all "package_load_metrics" edges to the PackageLoadMetrics entity. func (pmu *PackageMetricsUpdate) ClearPackageLoadMetrics() *PackageMetricsUpdate { pmu.mutation.ClearPackageLoadMetrics() @@ -180,39 +169,23 @@ func (pmu *PackageMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if pmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := pmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !pmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, + Columns: []string{packagemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := pmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, + Columns: []string{packagemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -225,10 +198,10 @@ func (pmu *PackageMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if pmu.mutation.PackageLoadMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), @@ -238,10 +211,10 @@ func (pmu *PackageMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := pmu.mutation.RemovedPackageLoadMetricsIDs(); len(nodes) > 0 && !pmu.mutation.PackageLoadMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), @@ -254,10 +227,10 @@ func (pmu *PackageMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := pmu.mutation.PackageLoadMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), @@ -315,19 +288,23 @@ func (pmuo *PackageMetricsUpdateOne) ClearPackagesLoaded() *PackageMetricsUpdate return pmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (pmuo *PackageMetricsUpdateOne) AddMetricIDs(ids ...int) *PackageMetricsUpdateOne { - pmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (pmuo *PackageMetricsUpdateOne) SetMetricsID(id int) *PackageMetricsUpdateOne { + pmuo.mutation.SetMetricsID(id) return pmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (pmuo *PackageMetricsUpdateOne) AddMetrics(m ...*Metrics) *PackageMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (pmuo *PackageMetricsUpdateOne) SetNillableMetricsID(id *int) *PackageMetricsUpdateOne { + if id != nil { + pmuo = pmuo.SetMetricsID(*id) } - return pmuo.AddMetricIDs(ids...) + return pmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (pmuo *PackageMetricsUpdateOne) SetMetrics(m *Metrics) *PackageMetricsUpdateOne { + return pmuo.SetMetricsID(m.ID) } // AddPackageLoadMetricIDs adds the "package_load_metrics" edge to the PackageLoadMetrics entity by IDs. @@ -350,27 +327,12 @@ func (pmuo *PackageMetricsUpdateOne) Mutation() *PackageMetricsMutation { return pmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (pmuo *PackageMetricsUpdateOne) ClearMetrics() *PackageMetricsUpdateOne { pmuo.mutation.ClearMetrics() return pmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (pmuo *PackageMetricsUpdateOne) RemoveMetricIDs(ids ...int) *PackageMetricsUpdateOne { - pmuo.mutation.RemoveMetricIDs(ids...) - return pmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (pmuo *PackageMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *PackageMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return pmuo.RemoveMetricIDs(ids...) -} - // ClearPackageLoadMetrics clears all "package_load_metrics" edges to the PackageLoadMetrics entity. func (pmuo *PackageMetricsUpdateOne) ClearPackageLoadMetrics() *PackageMetricsUpdateOne { pmuo.mutation.ClearPackageLoadMetrics() @@ -469,39 +431,23 @@ func (pmuo *PackageMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Packag } if pmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := pmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !pmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, + Columns: []string{packagemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := pmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: packagemetrics.MetricsTable, - Columns: packagemetrics.MetricsPrimaryKey, + Columns: []string{packagemetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -514,10 +460,10 @@ func (pmuo *PackageMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Packag } if pmuo.mutation.PackageLoadMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), @@ -527,10 +473,10 @@ func (pmuo *PackageMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Packag } if nodes := pmuo.mutation.RemovedPackageLoadMetricsIDs(); len(nodes) > 0 && !pmuo.mutation.PackageLoadMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), @@ -543,10 +489,10 @@ func (pmuo *PackageMetricsUpdateOne) sqlSave(ctx context.Context) (_node *Packag } if nodes := pmuo.mutation.PackageLoadMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: packagemetrics.PackageLoadMetricsTable, - Columns: packagemetrics.PackageLoadMetricsPrimaryKey, + Columns: []string{packagemetrics.PackageLoadMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(packageloadmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/racestatistics.go b/ent/gen/ent/racestatistics.go index df118ba..b253345 100644 --- a/ent/gen/ent/racestatistics.go +++ b/ent/gen/ent/racestatistics.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/dynamicexecutionmetrics" "github.com/buildbarn/bb-portal/ent/gen/ent/racestatistics" ) @@ -28,28 +29,29 @@ type RaceStatistics struct { RenoteWins int64 `json:"renote_wins,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the RaceStatisticsQuery when eager-loading is set. - Edges RaceStatisticsEdges `json:"edges"` - selectValues sql.SelectValues + Edges RaceStatisticsEdges `json:"edges"` + dynamic_execution_metrics_race_statistics *int + selectValues sql.SelectValues } // RaceStatisticsEdges holds the relations/edges for other nodes in the graph. type RaceStatisticsEdges struct { // DynamicExecutionMetrics holds the value of the dynamic_execution_metrics edge. - DynamicExecutionMetrics []*DynamicExecutionMetrics `json:"dynamic_execution_metrics,omitempty"` + DynamicExecutionMetrics *DynamicExecutionMetrics `json:"dynamic_execution_metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedDynamicExecutionMetrics map[string][]*DynamicExecutionMetrics } // DynamicExecutionMetricsOrErr returns the DynamicExecutionMetrics value or an error if the edge -// was not loaded in eager-loading. -func (e RaceStatisticsEdges) DynamicExecutionMetricsOrErr() ([]*DynamicExecutionMetrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e RaceStatisticsEdges) DynamicExecutionMetricsOrErr() (*DynamicExecutionMetrics, error) { + if e.DynamicExecutionMetrics != nil { return e.DynamicExecutionMetrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: dynamicexecutionmetrics.Label} } return nil, &NotLoadedError{edge: "dynamic_execution_metrics"} } @@ -63,6 +65,8 @@ func (*RaceStatistics) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case racestatistics.FieldMnemonic, racestatistics.FieldLocalRunner, racestatistics.FieldRemoteRunner: values[i] = new(sql.NullString) + case racestatistics.ForeignKeys[0]: // dynamic_execution_metrics_race_statistics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -114,6 +118,13 @@ func (rs *RaceStatistics) assignValues(columns []string, values []any) error { } else if value.Valid { rs.RenoteWins = value.Int64 } + case racestatistics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field dynamic_execution_metrics_race_statistics", value) + } else if value.Valid { + rs.dynamic_execution_metrics_race_statistics = new(int) + *rs.dynamic_execution_metrics_race_statistics = int(value.Int64) + } default: rs.selectValues.Set(columns[i], values[i]) } @@ -173,29 +184,5 @@ func (rs *RaceStatistics) String() string { return builder.String() } -// NamedDynamicExecutionMetrics returns the DynamicExecutionMetrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (rs *RaceStatistics) NamedDynamicExecutionMetrics(name string) ([]*DynamicExecutionMetrics, error) { - if rs.Edges.namedDynamicExecutionMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := rs.Edges.namedDynamicExecutionMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (rs *RaceStatistics) appendNamedDynamicExecutionMetrics(name string, edges ...*DynamicExecutionMetrics) { - if rs.Edges.namedDynamicExecutionMetrics == nil { - rs.Edges.namedDynamicExecutionMetrics = make(map[string][]*DynamicExecutionMetrics) - } - if len(edges) == 0 { - rs.Edges.namedDynamicExecutionMetrics[name] = []*DynamicExecutionMetrics{} - } else { - rs.Edges.namedDynamicExecutionMetrics[name] = append(rs.Edges.namedDynamicExecutionMetrics[name], edges...) - } -} - // RaceStatisticsSlice is a parsable slice of RaceStatistics. type RaceStatisticsSlice []*RaceStatistics diff --git a/ent/gen/ent/racestatistics/racestatistics.go b/ent/gen/ent/racestatistics/racestatistics.go index 9e4eba6..f8e7b8f 100644 --- a/ent/gen/ent/racestatistics/racestatistics.go +++ b/ent/gen/ent/racestatistics/racestatistics.go @@ -26,11 +26,13 @@ const ( EdgeDynamicExecutionMetrics = "dynamic_execution_metrics" // Table holds the table name of the racestatistics in the database. Table = "race_statistics" - // DynamicExecutionMetricsTable is the table that holds the dynamic_execution_metrics relation/edge. The primary key declared below. - DynamicExecutionMetricsTable = "dynamic_execution_metrics_race_statistics" + // DynamicExecutionMetricsTable is the table that holds the dynamic_execution_metrics relation/edge. + DynamicExecutionMetricsTable = "race_statistics" // DynamicExecutionMetricsInverseTable is the table name for the DynamicExecutionMetrics entity. // It exists in this package in order to avoid circular dependency with the "dynamicexecutionmetrics" package. DynamicExecutionMetricsInverseTable = "dynamic_execution_metrics" + // DynamicExecutionMetricsColumn is the table column denoting the dynamic_execution_metrics relation/edge. + DynamicExecutionMetricsColumn = "dynamic_execution_metrics_race_statistics" ) // Columns holds all SQL columns for racestatistics fields. @@ -43,11 +45,11 @@ var Columns = []string{ FieldRenoteWins, } -var ( - // DynamicExecutionMetricsPrimaryKey and DynamicExecutionMetricsColumn2 are the table columns denoting the - // primary key for the dynamic_execution_metrics relation (M2M). - DynamicExecutionMetricsPrimaryKey = []string{"dynamic_execution_metrics_id", "race_statistics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "race_statistics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "dynamic_execution_metrics_race_statistics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -56,6 +58,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -92,23 +99,16 @@ func ByRenoteWins(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldRenoteWins, opts...).ToFunc() } -// ByDynamicExecutionMetricsCount orders the results by dynamic_execution_metrics count. -func ByDynamicExecutionMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newDynamicExecutionMetricsStep(), opts...) - } -} - -// ByDynamicExecutionMetrics orders the results by dynamic_execution_metrics terms. -func ByDynamicExecutionMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByDynamicExecutionMetricsField orders the results by dynamic_execution_metrics field. +func ByDynamicExecutionMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newDynamicExecutionMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newDynamicExecutionMetricsStep(), sql.OrderByField(field, opts...)) } } func newDynamicExecutionMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(DynamicExecutionMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, DynamicExecutionMetricsTable, DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, DynamicExecutionMetricsTable, DynamicExecutionMetricsColumn), ) } diff --git a/ent/gen/ent/racestatistics/where.go b/ent/gen/ent/racestatistics/where.go index a889565..a701ccf 100644 --- a/ent/gen/ent/racestatistics/where.go +++ b/ent/gen/ent/racestatistics/where.go @@ -408,7 +408,7 @@ func HasDynamicExecutionMetrics() predicate.RaceStatistics { return predicate.RaceStatistics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, DynamicExecutionMetricsTable, DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, DynamicExecutionMetricsTable, DynamicExecutionMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/racestatistics_create.go b/ent/gen/ent/racestatistics_create.go index d6b5286..c390089 100644 --- a/ent/gen/ent/racestatistics_create.go +++ b/ent/gen/ent/racestatistics_create.go @@ -89,19 +89,23 @@ func (rsc *RaceStatisticsCreate) SetNillableRenoteWins(i *int64) *RaceStatistics return rsc } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (rsc *RaceStatisticsCreate) AddDynamicExecutionMetricIDs(ids ...int) *RaceStatisticsCreate { - rsc.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (rsc *RaceStatisticsCreate) SetDynamicExecutionMetricsID(id int) *RaceStatisticsCreate { + rsc.mutation.SetDynamicExecutionMetricsID(id) return rsc } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (rsc *RaceStatisticsCreate) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *RaceStatisticsCreate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (rsc *RaceStatisticsCreate) SetNillableDynamicExecutionMetricsID(id *int) *RaceStatisticsCreate { + if id != nil { + rsc = rsc.SetDynamicExecutionMetricsID(*id) } - return rsc.AddDynamicExecutionMetricIDs(ids...) + return rsc +} + +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (rsc *RaceStatisticsCreate) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *RaceStatisticsCreate { + return rsc.SetDynamicExecutionMetricsID(d.ID) } // Mutation returns the RaceStatisticsMutation object of the builder. @@ -186,10 +190,10 @@ func (rsc *RaceStatisticsCreate) createSpec() (*RaceStatistics, *sqlgraph.Create } if nodes := rsc.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{racestatistics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), @@ -198,6 +202,7 @@ func (rsc *RaceStatisticsCreate) createSpec() (*RaceStatistics, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.dynamic_execution_metrics_race_statistics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/racestatistics_query.go b/ent/gen/ent/racestatistics_query.go index b149e78..05dab9a 100644 --- a/ent/gen/ent/racestatistics_query.go +++ b/ent/gen/ent/racestatistics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // RaceStatisticsQuery is the builder for querying RaceStatistics entities. type RaceStatisticsQuery struct { config - ctx *QueryContext - order []racestatistics.OrderOption - inters []Interceptor - predicates []predicate.RaceStatistics - withDynamicExecutionMetrics *DynamicExecutionMetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*RaceStatistics) error - withNamedDynamicExecutionMetrics map[string]*DynamicExecutionMetricsQuery + ctx *QueryContext + order []racestatistics.OrderOption + inters []Interceptor + predicates []predicate.RaceStatistics + withDynamicExecutionMetrics *DynamicExecutionMetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*RaceStatistics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (rsq *RaceStatisticsQuery) QueryDynamicExecutionMetrics() *DynamicExecution step := sqlgraph.NewStep( sqlgraph.From(racestatistics.Table, racestatistics.FieldID, selector), sqlgraph.To(dynamicexecutionmetrics.Table, dynamicexecutionmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, racestatistics.DynamicExecutionMetricsTable, racestatistics.DynamicExecutionMetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, racestatistics.DynamicExecutionMetricsTable, racestatistics.DynamicExecutionMetricsColumn), ) fromU = sqlgraph.SetNeighbors(rsq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (rsq *RaceStatisticsQuery) prepareQuery(ctx context.Context) error { func (rsq *RaceStatisticsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*RaceStatistics, error) { var ( nodes = []*RaceStatistics{} + withFKs = rsq.withFKs _spec = rsq.querySpec() loadedTypes = [1]bool{ rsq.withDynamicExecutionMetrics != nil, } ) + if rsq.withDynamicExecutionMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, racestatistics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*RaceStatistics).scanValues(nil, columns) } @@ -399,18 +405,8 @@ func (rsq *RaceStatisticsQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := rsq.withDynamicExecutionMetrics; query != nil { - if err := rsq.loadDynamicExecutionMetrics(ctx, query, nodes, - func(n *RaceStatistics) { n.Edges.DynamicExecutionMetrics = []*DynamicExecutionMetrics{} }, - func(n *RaceStatistics, e *DynamicExecutionMetrics) { - n.Edges.DynamicExecutionMetrics = append(n.Edges.DynamicExecutionMetrics, e) - }); err != nil { - return nil, err - } - } - for name, query := range rsq.withNamedDynamicExecutionMetrics { - if err := rsq.loadDynamicExecutionMetrics(ctx, query, nodes, - func(n *RaceStatistics) { n.appendNamedDynamicExecutionMetrics(name) }, - func(n *RaceStatistics, e *DynamicExecutionMetrics) { n.appendNamedDynamicExecutionMetrics(name, e) }); err != nil { + if err := rsq.loadDynamicExecutionMetrics(ctx, query, nodes, nil, + func(n *RaceStatistics, e *DynamicExecutionMetrics) { n.Edges.DynamicExecutionMetrics = e }); err != nil { return nil, err } } @@ -423,62 +419,33 @@ func (rsq *RaceStatisticsQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (rsq *RaceStatisticsQuery) loadDynamicExecutionMetrics(ctx context.Context, query *DynamicExecutionMetricsQuery, nodes []*RaceStatistics, init func(*RaceStatistics), assign func(*RaceStatistics, *DynamicExecutionMetrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*RaceStatistics) - nids := make(map[int]map[*RaceStatistics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*RaceStatistics) + for i := range nodes { + if nodes[i].dynamic_execution_metrics_race_statistics == nil { + continue } + fk := *nodes[i].dynamic_execution_metrics_race_statistics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(racestatistics.DynamicExecutionMetricsTable) - s.Join(joinT).On(s.C(dynamicexecutionmetrics.FieldID), joinT.C(racestatistics.DynamicExecutionMetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(racestatistics.DynamicExecutionMetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(racestatistics.DynamicExecutionMetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*RaceStatistics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*DynamicExecutionMetrics](ctx, query, qr, query.inters) + query.Where(dynamicexecutionmetrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "dynamic_execution_metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "dynamic_execution_metrics_race_statistics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -568,20 +535,6 @@ func (rsq *RaceStatisticsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedDynamicExecutionMetrics tells the query-builder to eager-load the nodes that are connected to the "dynamic_execution_metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (rsq *RaceStatisticsQuery) WithNamedDynamicExecutionMetrics(name string, opts ...func(*DynamicExecutionMetricsQuery)) *RaceStatisticsQuery { - query := (&DynamicExecutionMetricsClient{config: rsq.config}).Query() - for _, opt := range opts { - opt(query) - } - if rsq.withNamedDynamicExecutionMetrics == nil { - rsq.withNamedDynamicExecutionMetrics = make(map[string]*DynamicExecutionMetricsQuery) - } - rsq.withNamedDynamicExecutionMetrics[name] = query - return rsq -} - // RaceStatisticsGroupBy is the group-by builder for RaceStatistics entities. type RaceStatisticsGroupBy struct { selector diff --git a/ent/gen/ent/racestatistics_update.go b/ent/gen/ent/racestatistics_update.go index 58ec350..6796d54 100644 --- a/ent/gen/ent/racestatistics_update.go +++ b/ent/gen/ent/racestatistics_update.go @@ -142,19 +142,23 @@ func (rsu *RaceStatisticsUpdate) ClearRenoteWins() *RaceStatisticsUpdate { return rsu } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (rsu *RaceStatisticsUpdate) AddDynamicExecutionMetricIDs(ids ...int) *RaceStatisticsUpdate { - rsu.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (rsu *RaceStatisticsUpdate) SetDynamicExecutionMetricsID(id int) *RaceStatisticsUpdate { + rsu.mutation.SetDynamicExecutionMetricsID(id) return rsu } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (rsu *RaceStatisticsUpdate) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *RaceStatisticsUpdate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (rsu *RaceStatisticsUpdate) SetNillableDynamicExecutionMetricsID(id *int) *RaceStatisticsUpdate { + if id != nil { + rsu = rsu.SetDynamicExecutionMetricsID(*id) } - return rsu.AddDynamicExecutionMetricIDs(ids...) + return rsu +} + +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (rsu *RaceStatisticsUpdate) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *RaceStatisticsUpdate { + return rsu.SetDynamicExecutionMetricsID(d.ID) } // Mutation returns the RaceStatisticsMutation object of the builder. @@ -162,27 +166,12 @@ func (rsu *RaceStatisticsUpdate) Mutation() *RaceStatisticsMutation { return rsu.mutation } -// ClearDynamicExecutionMetrics clears all "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. +// ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. func (rsu *RaceStatisticsUpdate) ClearDynamicExecutionMetrics() *RaceStatisticsUpdate { rsu.mutation.ClearDynamicExecutionMetrics() return rsu } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to DynamicExecutionMetrics entities by IDs. -func (rsu *RaceStatisticsUpdate) RemoveDynamicExecutionMetricIDs(ids ...int) *RaceStatisticsUpdate { - rsu.mutation.RemoveDynamicExecutionMetricIDs(ids...) - return rsu -} - -// RemoveDynamicExecutionMetrics removes "dynamic_execution_metrics" edges to DynamicExecutionMetrics entities. -func (rsu *RaceStatisticsUpdate) RemoveDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *RaceStatisticsUpdate { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID - } - return rsu.RemoveDynamicExecutionMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (rsu *RaceStatisticsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, rsu.sqlSave, rsu.mutation, rsu.hooks) @@ -257,39 +246,23 @@ func (rsu *RaceStatisticsUpdate) sqlSave(ctx context.Context) (n int, err error) } if rsu.mutation.DynamicExecutionMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := rsu.mutation.RemovedDynamicExecutionMetricsIDs(); len(nodes) > 0 && !rsu.mutation.DynamicExecutionMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{racestatistics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := rsu.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{racestatistics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), @@ -434,19 +407,23 @@ func (rsuo *RaceStatisticsUpdateOne) ClearRenoteWins() *RaceStatisticsUpdateOne return rsuo } -// AddDynamicExecutionMetricIDs adds the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by IDs. -func (rsuo *RaceStatisticsUpdateOne) AddDynamicExecutionMetricIDs(ids ...int) *RaceStatisticsUpdateOne { - rsuo.mutation.AddDynamicExecutionMetricIDs(ids...) +// SetDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID. +func (rsuo *RaceStatisticsUpdateOne) SetDynamicExecutionMetricsID(id int) *RaceStatisticsUpdateOne { + rsuo.mutation.SetDynamicExecutionMetricsID(id) return rsuo } -// AddDynamicExecutionMetrics adds the "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. -func (rsuo *RaceStatisticsUpdateOne) AddDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *RaceStatisticsUpdateOne { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID +// SetNillableDynamicExecutionMetricsID sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity by ID if the given value is not nil. +func (rsuo *RaceStatisticsUpdateOne) SetNillableDynamicExecutionMetricsID(id *int) *RaceStatisticsUpdateOne { + if id != nil { + rsuo = rsuo.SetDynamicExecutionMetricsID(*id) } - return rsuo.AddDynamicExecutionMetricIDs(ids...) + return rsuo +} + +// SetDynamicExecutionMetrics sets the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. +func (rsuo *RaceStatisticsUpdateOne) SetDynamicExecutionMetrics(d *DynamicExecutionMetrics) *RaceStatisticsUpdateOne { + return rsuo.SetDynamicExecutionMetricsID(d.ID) } // Mutation returns the RaceStatisticsMutation object of the builder. @@ -454,27 +431,12 @@ func (rsuo *RaceStatisticsUpdateOne) Mutation() *RaceStatisticsMutation { return rsuo.mutation } -// ClearDynamicExecutionMetrics clears all "dynamic_execution_metrics" edges to the DynamicExecutionMetrics entity. +// ClearDynamicExecutionMetrics clears the "dynamic_execution_metrics" edge to the DynamicExecutionMetrics entity. func (rsuo *RaceStatisticsUpdateOne) ClearDynamicExecutionMetrics() *RaceStatisticsUpdateOne { rsuo.mutation.ClearDynamicExecutionMetrics() return rsuo } -// RemoveDynamicExecutionMetricIDs removes the "dynamic_execution_metrics" edge to DynamicExecutionMetrics entities by IDs. -func (rsuo *RaceStatisticsUpdateOne) RemoveDynamicExecutionMetricIDs(ids ...int) *RaceStatisticsUpdateOne { - rsuo.mutation.RemoveDynamicExecutionMetricIDs(ids...) - return rsuo -} - -// RemoveDynamicExecutionMetrics removes "dynamic_execution_metrics" edges to DynamicExecutionMetrics entities. -func (rsuo *RaceStatisticsUpdateOne) RemoveDynamicExecutionMetrics(d ...*DynamicExecutionMetrics) *RaceStatisticsUpdateOne { - ids := make([]int, len(d)) - for i := range d { - ids[i] = d[i].ID - } - return rsuo.RemoveDynamicExecutionMetricIDs(ids...) -} - // Where appends a list predicates to the RaceStatisticsUpdate builder. func (rsuo *RaceStatisticsUpdateOne) Where(ps ...predicate.RaceStatistics) *RaceStatisticsUpdateOne { rsuo.mutation.Where(ps...) @@ -579,39 +541,23 @@ func (rsuo *RaceStatisticsUpdateOne) sqlSave(ctx context.Context) (_node *RaceSt } if rsuo.mutation.DynamicExecutionMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := rsuo.mutation.RemovedDynamicExecutionMetricsIDs(); len(nodes) > 0 && !rsuo.mutation.DynamicExecutionMetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{racestatistics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := rsuo.mutation.DynamicExecutionMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: racestatistics.DynamicExecutionMetricsTable, - Columns: racestatistics.DynamicExecutionMetricsPrimaryKey, + Columns: []string{racestatistics.DynamicExecutionMetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(dynamicexecutionmetrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/resourceusage.go b/ent/gen/ent/resourceusage.go index cca640f..980a790 100644 --- a/ent/gen/ent/resourceusage.go +++ b/ent/gen/ent/resourceusage.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/exectioninfo" "github.com/buildbarn/bb-portal/ent/gen/ent/resourceusage" ) @@ -22,28 +23,29 @@ type ResourceUsage struct { Value string `json:"value,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the ResourceUsageQuery when eager-loading is set. - Edges ResourceUsageEdges `json:"edges"` - selectValues sql.SelectValues + Edges ResourceUsageEdges `json:"edges"` + exection_info_resource_usage *int + selectValues sql.SelectValues } // ResourceUsageEdges holds the relations/edges for other nodes in the graph. type ResourceUsageEdges struct { // ExecutionInfo holds the value of the execution_info edge. - ExecutionInfo []*ExectionInfo `json:"execution_info,omitempty"` + ExecutionInfo *ExectionInfo `json:"execution_info,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedExecutionInfo map[string][]*ExectionInfo } // ExecutionInfoOrErr returns the ExecutionInfo value or an error if the edge -// was not loaded in eager-loading. -func (e ResourceUsageEdges) ExecutionInfoOrErr() ([]*ExectionInfo, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e ResourceUsageEdges) ExecutionInfoOrErr() (*ExectionInfo, error) { + if e.ExecutionInfo != nil { return e.ExecutionInfo, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: exectioninfo.Label} } return nil, &NotLoadedError{edge: "execution_info"} } @@ -57,6 +59,8 @@ func (*ResourceUsage) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case resourceusage.FieldName, resourceusage.FieldValue: values[i] = new(sql.NullString) + case resourceusage.ForeignKeys[0]: // exection_info_resource_usage + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -90,6 +94,13 @@ func (ru *ResourceUsage) assignValues(columns []string, values []any) error { } else if value.Valid { ru.Value = value.String } + case resourceusage.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field exection_info_resource_usage", value) + } else if value.Valid { + ru.exection_info_resource_usage = new(int) + *ru.exection_info_resource_usage = int(value.Int64) + } default: ru.selectValues.Set(columns[i], values[i]) } @@ -140,29 +151,5 @@ func (ru *ResourceUsage) String() string { return builder.String() } -// NamedExecutionInfo returns the ExecutionInfo named value or an error if the edge was not -// loaded in eager-loading with this name. -func (ru *ResourceUsage) NamedExecutionInfo(name string) ([]*ExectionInfo, error) { - if ru.Edges.namedExecutionInfo == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := ru.Edges.namedExecutionInfo[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (ru *ResourceUsage) appendNamedExecutionInfo(name string, edges ...*ExectionInfo) { - if ru.Edges.namedExecutionInfo == nil { - ru.Edges.namedExecutionInfo = make(map[string][]*ExectionInfo) - } - if len(edges) == 0 { - ru.Edges.namedExecutionInfo[name] = []*ExectionInfo{} - } else { - ru.Edges.namedExecutionInfo[name] = append(ru.Edges.namedExecutionInfo[name], edges...) - } -} - // ResourceUsages is a parsable slice of ResourceUsage. type ResourceUsages []*ResourceUsage diff --git a/ent/gen/ent/resourceusage/resourceusage.go b/ent/gen/ent/resourceusage/resourceusage.go index b79cd45..6e8eb7a 100644 --- a/ent/gen/ent/resourceusage/resourceusage.go +++ b/ent/gen/ent/resourceusage/resourceusage.go @@ -20,11 +20,13 @@ const ( EdgeExecutionInfo = "execution_info" // Table holds the table name of the resourceusage in the database. Table = "resource_usages" - // ExecutionInfoTable is the table that holds the execution_info relation/edge. The primary key declared below. - ExecutionInfoTable = "exection_info_resource_usage" + // ExecutionInfoTable is the table that holds the execution_info relation/edge. + ExecutionInfoTable = "resource_usages" // ExecutionInfoInverseTable is the table name for the ExectionInfo entity. // It exists in this package in order to avoid circular dependency with the "exectioninfo" package. ExecutionInfoInverseTable = "exection_infos" + // ExecutionInfoColumn is the table column denoting the execution_info relation/edge. + ExecutionInfoColumn = "exection_info_resource_usage" ) // Columns holds all SQL columns for resourceusage fields. @@ -34,11 +36,11 @@ var Columns = []string{ FieldValue, } -var ( - // ExecutionInfoPrimaryKey and ExecutionInfoColumn2 are the table columns denoting the - // primary key for the execution_info relation (M2M). - ExecutionInfoPrimaryKey = []string{"exection_info_id", "resource_usage_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "resource_usages" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "exection_info_resource_usage", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -47,6 +49,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -68,23 +75,16 @@ func ByValue(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldValue, opts...).ToFunc() } -// ByExecutionInfoCount orders the results by execution_info count. -func ByExecutionInfoCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newExecutionInfoStep(), opts...) - } -} - -// ByExecutionInfo orders the results by execution_info terms. -func ByExecutionInfo(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByExecutionInfoField orders the results by execution_info field. +func ByExecutionInfoField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newExecutionInfoStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newExecutionInfoStep(), sql.OrderByField(field, opts...)) } } func newExecutionInfoStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ExecutionInfoInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ExecutionInfoTable, ExecutionInfoPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ExecutionInfoTable, ExecutionInfoColumn), ) } diff --git a/ent/gen/ent/resourceusage/where.go b/ent/gen/ent/resourceusage/where.go index e565254..aa73f54 100644 --- a/ent/gen/ent/resourceusage/where.go +++ b/ent/gen/ent/resourceusage/where.go @@ -218,7 +218,7 @@ func HasExecutionInfo() predicate.ResourceUsage { return predicate.ResourceUsage(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ExecutionInfoTable, ExecutionInfoPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ExecutionInfoTable, ExecutionInfoColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/resourceusage_create.go b/ent/gen/ent/resourceusage_create.go index d783a8f..441ab81 100644 --- a/ent/gen/ent/resourceusage_create.go +++ b/ent/gen/ent/resourceusage_create.go @@ -47,19 +47,23 @@ func (ruc *ResourceUsageCreate) SetNillableValue(s *string) *ResourceUsageCreate return ruc } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (ruc *ResourceUsageCreate) AddExecutionInfoIDs(ids ...int) *ResourceUsageCreate { - ruc.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (ruc *ResourceUsageCreate) SetExecutionInfoID(id int) *ResourceUsageCreate { + ruc.mutation.SetExecutionInfoID(id) return ruc } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (ruc *ResourceUsageCreate) AddExecutionInfo(e ...*ExectionInfo) *ResourceUsageCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (ruc *ResourceUsageCreate) SetNillableExecutionInfoID(id *int) *ResourceUsageCreate { + if id != nil { + ruc = ruc.SetExecutionInfoID(*id) } - return ruc.AddExecutionInfoIDs(ids...) + return ruc +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (ruc *ResourceUsageCreate) SetExecutionInfo(e *ExectionInfo) *ResourceUsageCreate { + return ruc.SetExecutionInfoID(e.ID) } // Mutation returns the ResourceUsageMutation object of the builder. @@ -132,10 +136,10 @@ func (ruc *ResourceUsageCreate) createSpec() (*ResourceUsage, *sqlgraph.CreateSp } if nodes := ruc.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, + Columns: []string{resourceusage.ExecutionInfoColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (ruc *ResourceUsageCreate) createSpec() (*ResourceUsage, *sqlgraph.CreateSp for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.exection_info_resource_usage = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/resourceusage_query.go b/ent/gen/ent/resourceusage_query.go index cec20fc..fb137b9 100644 --- a/ent/gen/ent/resourceusage_query.go +++ b/ent/gen/ent/resourceusage_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // ResourceUsageQuery is the builder for querying ResourceUsage entities. type ResourceUsageQuery struct { config - ctx *QueryContext - order []resourceusage.OrderOption - inters []Interceptor - predicates []predicate.ResourceUsage - withExecutionInfo *ExectionInfoQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*ResourceUsage) error - withNamedExecutionInfo map[string]*ExectionInfoQuery + ctx *QueryContext + order []resourceusage.OrderOption + inters []Interceptor + predicates []predicate.ResourceUsage + withExecutionInfo *ExectionInfoQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*ResourceUsage) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (ruq *ResourceUsageQuery) QueryExecutionInfo() *ExectionInfoQuery { step := sqlgraph.NewStep( sqlgraph.From(resourceusage.Table, resourceusage.FieldID, selector), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, resourceusage.ExecutionInfoTable, resourceusage.ExecutionInfoPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, resourceusage.ExecutionInfoTable, resourceusage.ExecutionInfoColumn), ) fromU = sqlgraph.SetNeighbors(ruq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (ruq *ResourceUsageQuery) prepareQuery(ctx context.Context) error { func (ruq *ResourceUsageQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*ResourceUsage, error) { var ( nodes = []*ResourceUsage{} + withFKs = ruq.withFKs _spec = ruq.querySpec() loadedTypes = [1]bool{ ruq.withExecutionInfo != nil, } ) + if ruq.withExecutionInfo != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, resourceusage.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*ResourceUsage).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (ruq *ResourceUsageQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nodes, nil } if query := ruq.withExecutionInfo; query != nil { - if err := ruq.loadExecutionInfo(ctx, query, nodes, - func(n *ResourceUsage) { n.Edges.ExecutionInfo = []*ExectionInfo{} }, - func(n *ResourceUsage, e *ExectionInfo) { n.Edges.ExecutionInfo = append(n.Edges.ExecutionInfo, e) }); err != nil { - return nil, err - } - } - for name, query := range ruq.withNamedExecutionInfo { - if err := ruq.loadExecutionInfo(ctx, query, nodes, - func(n *ResourceUsage) { n.appendNamedExecutionInfo(name) }, - func(n *ResourceUsage, e *ExectionInfo) { n.appendNamedExecutionInfo(name, e) }); err != nil { + if err := ruq.loadExecutionInfo(ctx, query, nodes, nil, + func(n *ResourceUsage, e *ExectionInfo) { n.Edges.ExecutionInfo = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (ruq *ResourceUsageQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( } func (ruq *ResourceUsageQuery) loadExecutionInfo(ctx context.Context, query *ExectionInfoQuery, nodes []*ResourceUsage, init func(*ResourceUsage), assign func(*ResourceUsage, *ExectionInfo)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*ResourceUsage) - nids := make(map[int]map[*ResourceUsage]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*ResourceUsage) + for i := range nodes { + if nodes[i].exection_info_resource_usage == nil { + continue } + fk := *nodes[i].exection_info_resource_usage + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(resourceusage.ExecutionInfoTable) - s.Join(joinT).On(s.C(exectioninfo.FieldID), joinT.C(resourceusage.ExecutionInfoPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(resourceusage.ExecutionInfoPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(resourceusage.ExecutionInfoPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*ResourceUsage]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ExectionInfo](ctx, query, qr, query.inters) + query.Where(exectioninfo.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "execution_info" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "exection_info_resource_usage" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (ruq *ResourceUsageQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedExecutionInfo tells the query-builder to eager-load the nodes that are connected to the "execution_info" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (ruq *ResourceUsageQuery) WithNamedExecutionInfo(name string, opts ...func(*ExectionInfoQuery)) *ResourceUsageQuery { - query := (&ExectionInfoClient{config: ruq.config}).Query() - for _, opt := range opts { - opt(query) - } - if ruq.withNamedExecutionInfo == nil { - ruq.withNamedExecutionInfo = make(map[string]*ExectionInfoQuery) - } - ruq.withNamedExecutionInfo[name] = query - return ruq -} - // ResourceUsageGroupBy is the group-by builder for ResourceUsage entities. type ResourceUsageGroupBy struct { selector diff --git a/ent/gen/ent/resourceusage_update.go b/ent/gen/ent/resourceusage_update.go index 442eae3..a446551 100644 --- a/ent/gen/ent/resourceusage_update.go +++ b/ent/gen/ent/resourceusage_update.go @@ -68,19 +68,23 @@ func (ruu *ResourceUsageUpdate) ClearValue() *ResourceUsageUpdate { return ruu } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (ruu *ResourceUsageUpdate) AddExecutionInfoIDs(ids ...int) *ResourceUsageUpdate { - ruu.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (ruu *ResourceUsageUpdate) SetExecutionInfoID(id int) *ResourceUsageUpdate { + ruu.mutation.SetExecutionInfoID(id) return ruu } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (ruu *ResourceUsageUpdate) AddExecutionInfo(e ...*ExectionInfo) *ResourceUsageUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (ruu *ResourceUsageUpdate) SetNillableExecutionInfoID(id *int) *ResourceUsageUpdate { + if id != nil { + ruu = ruu.SetExecutionInfoID(*id) } - return ruu.AddExecutionInfoIDs(ids...) + return ruu +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (ruu *ResourceUsageUpdate) SetExecutionInfo(e *ExectionInfo) *ResourceUsageUpdate { + return ruu.SetExecutionInfoID(e.ID) } // Mutation returns the ResourceUsageMutation object of the builder. @@ -88,27 +92,12 @@ func (ruu *ResourceUsageUpdate) Mutation() *ResourceUsageMutation { return ruu.mutation } -// ClearExecutionInfo clears all "execution_info" edges to the ExectionInfo entity. +// ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. func (ruu *ResourceUsageUpdate) ClearExecutionInfo() *ResourceUsageUpdate { ruu.mutation.ClearExecutionInfo() return ruu } -// RemoveExecutionInfoIDs removes the "execution_info" edge to ExectionInfo entities by IDs. -func (ruu *ResourceUsageUpdate) RemoveExecutionInfoIDs(ids ...int) *ResourceUsageUpdate { - ruu.mutation.RemoveExecutionInfoIDs(ids...) - return ruu -} - -// RemoveExecutionInfo removes "execution_info" edges to ExectionInfo entities. -func (ruu *ResourceUsageUpdate) RemoveExecutionInfo(e ...*ExectionInfo) *ResourceUsageUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return ruu.RemoveExecutionInfoIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (ruu *ResourceUsageUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, ruu.sqlSave, ruu.mutation, ruu.hooks) @@ -159,39 +148,23 @@ func (ruu *ResourceUsageUpdate) sqlSave(ctx context.Context) (n int, err error) } if ruu.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := ruu.mutation.RemovedExecutionInfoIDs(); len(nodes) > 0 && !ruu.mutation.ExecutionInfoCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, + Columns: []string{resourceusage.ExecutionInfoColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := ruu.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, + Columns: []string{resourceusage.ExecutionInfoColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), @@ -262,19 +235,23 @@ func (ruuo *ResourceUsageUpdateOne) ClearValue() *ResourceUsageUpdateOne { return ruuo } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (ruuo *ResourceUsageUpdateOne) AddExecutionInfoIDs(ids ...int) *ResourceUsageUpdateOne { - ruuo.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (ruuo *ResourceUsageUpdateOne) SetExecutionInfoID(id int) *ResourceUsageUpdateOne { + ruuo.mutation.SetExecutionInfoID(id) return ruuo } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (ruuo *ResourceUsageUpdateOne) AddExecutionInfo(e ...*ExectionInfo) *ResourceUsageUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (ruuo *ResourceUsageUpdateOne) SetNillableExecutionInfoID(id *int) *ResourceUsageUpdateOne { + if id != nil { + ruuo = ruuo.SetExecutionInfoID(*id) } - return ruuo.AddExecutionInfoIDs(ids...) + return ruuo +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (ruuo *ResourceUsageUpdateOne) SetExecutionInfo(e *ExectionInfo) *ResourceUsageUpdateOne { + return ruuo.SetExecutionInfoID(e.ID) } // Mutation returns the ResourceUsageMutation object of the builder. @@ -282,27 +259,12 @@ func (ruuo *ResourceUsageUpdateOne) Mutation() *ResourceUsageMutation { return ruuo.mutation } -// ClearExecutionInfo clears all "execution_info" edges to the ExectionInfo entity. +// ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. func (ruuo *ResourceUsageUpdateOne) ClearExecutionInfo() *ResourceUsageUpdateOne { ruuo.mutation.ClearExecutionInfo() return ruuo } -// RemoveExecutionInfoIDs removes the "execution_info" edge to ExectionInfo entities by IDs. -func (ruuo *ResourceUsageUpdateOne) RemoveExecutionInfoIDs(ids ...int) *ResourceUsageUpdateOne { - ruuo.mutation.RemoveExecutionInfoIDs(ids...) - return ruuo -} - -// RemoveExecutionInfo removes "execution_info" edges to ExectionInfo entities. -func (ruuo *ResourceUsageUpdateOne) RemoveExecutionInfo(e ...*ExectionInfo) *ResourceUsageUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return ruuo.RemoveExecutionInfoIDs(ids...) -} - // Where appends a list predicates to the ResourceUsageUpdate builder. func (ruuo *ResourceUsageUpdateOne) Where(ps ...predicate.ResourceUsage) *ResourceUsageUpdateOne { ruuo.mutation.Where(ps...) @@ -383,39 +345,23 @@ func (ruuo *ResourceUsageUpdateOne) sqlSave(ctx context.Context) (_node *Resourc } if ruuo.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := ruuo.mutation.RemovedExecutionInfoIDs(); len(nodes) > 0 && !ruuo.mutation.ExecutionInfoCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, + Columns: []string{resourceusage.ExecutionInfoColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := ruuo.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: resourceusage.ExecutionInfoTable, - Columns: resourceusage.ExecutionInfoPrimaryKey, + Columns: []string{resourceusage.ExecutionInfoColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), diff --git a/ent/gen/ent/runnercount.go b/ent/gen/ent/runnercount.go index fe0612c..74210ea 100644 --- a/ent/gen/ent/runnercount.go +++ b/ent/gen/ent/runnercount.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/actionsummary" "github.com/buildbarn/bb-portal/ent/gen/ent/runnercount" ) @@ -24,28 +25,29 @@ type RunnerCount struct { ActionsExecuted int64 `json:"actions_executed,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the RunnerCountQuery when eager-loading is set. - Edges RunnerCountEdges `json:"edges"` - selectValues sql.SelectValues + Edges RunnerCountEdges `json:"edges"` + action_summary_runner_count *int + selectValues sql.SelectValues } // RunnerCountEdges holds the relations/edges for other nodes in the graph. type RunnerCountEdges struct { // ActionSummary holds the value of the action_summary edge. - ActionSummary []*ActionSummary `json:"action_summary,omitempty"` + ActionSummary *ActionSummary `json:"action_summary,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedActionSummary map[string][]*ActionSummary } // ActionSummaryOrErr returns the ActionSummary value or an error if the edge -// was not loaded in eager-loading. -func (e RunnerCountEdges) ActionSummaryOrErr() ([]*ActionSummary, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e RunnerCountEdges) ActionSummaryOrErr() (*ActionSummary, error) { + if e.ActionSummary != nil { return e.ActionSummary, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: actionsummary.Label} } return nil, &NotLoadedError{edge: "action_summary"} } @@ -59,6 +61,8 @@ func (*RunnerCount) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case runnercount.FieldName, runnercount.FieldExecKind: values[i] = new(sql.NullString) + case runnercount.ForeignKeys[0]: // action_summary_runner_count + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -98,6 +102,13 @@ func (rc *RunnerCount) assignValues(columns []string, values []any) error { } else if value.Valid { rc.ActionsExecuted = value.Int64 } + case runnercount.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field action_summary_runner_count", value) + } else if value.Valid { + rc.action_summary_runner_count = new(int) + *rc.action_summary_runner_count = int(value.Int64) + } default: rc.selectValues.Set(columns[i], values[i]) } @@ -151,29 +162,5 @@ func (rc *RunnerCount) String() string { return builder.String() } -// NamedActionSummary returns the ActionSummary named value or an error if the edge was not -// loaded in eager-loading with this name. -func (rc *RunnerCount) NamedActionSummary(name string) ([]*ActionSummary, error) { - if rc.Edges.namedActionSummary == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := rc.Edges.namedActionSummary[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (rc *RunnerCount) appendNamedActionSummary(name string, edges ...*ActionSummary) { - if rc.Edges.namedActionSummary == nil { - rc.Edges.namedActionSummary = make(map[string][]*ActionSummary) - } - if len(edges) == 0 { - rc.Edges.namedActionSummary[name] = []*ActionSummary{} - } else { - rc.Edges.namedActionSummary[name] = append(rc.Edges.namedActionSummary[name], edges...) - } -} - // RunnerCounts is a parsable slice of RunnerCount. type RunnerCounts []*RunnerCount diff --git a/ent/gen/ent/runnercount/runnercount.go b/ent/gen/ent/runnercount/runnercount.go index d9ff7c7..87d2404 100644 --- a/ent/gen/ent/runnercount/runnercount.go +++ b/ent/gen/ent/runnercount/runnercount.go @@ -22,11 +22,13 @@ const ( EdgeActionSummary = "action_summary" // Table holds the table name of the runnercount in the database. Table = "runner_counts" - // ActionSummaryTable is the table that holds the action_summary relation/edge. The primary key declared below. - ActionSummaryTable = "action_summary_runner_count" + // ActionSummaryTable is the table that holds the action_summary relation/edge. + ActionSummaryTable = "runner_counts" // ActionSummaryInverseTable is the table name for the ActionSummary entity. // It exists in this package in order to avoid circular dependency with the "actionsummary" package. ActionSummaryInverseTable = "action_summaries" + // ActionSummaryColumn is the table column denoting the action_summary relation/edge. + ActionSummaryColumn = "action_summary_runner_count" ) // Columns holds all SQL columns for runnercount fields. @@ -37,11 +39,11 @@ var Columns = []string{ FieldActionsExecuted, } -var ( - // ActionSummaryPrimaryKey and ActionSummaryColumn2 are the table columns denoting the - // primary key for the action_summary relation (M2M). - ActionSummaryPrimaryKey = []string{"action_summary_id", "runner_count_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "runner_counts" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "action_summary_runner_count", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -50,6 +52,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -76,23 +83,16 @@ func ByActionsExecuted(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldActionsExecuted, opts...).ToFunc() } -// ByActionSummaryCount orders the results by action_summary count. -func ByActionSummaryCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newActionSummaryStep(), opts...) - } -} - -// ByActionSummary orders the results by action_summary terms. -func ByActionSummary(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByActionSummaryField orders the results by action_summary field. +func ByActionSummaryField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newActionSummaryStep(), sql.OrderByField(field, opts...)) } } func newActionSummaryStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ActionSummaryInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionSummaryTable, ActionSummaryColumn), ) } diff --git a/ent/gen/ent/runnercount/where.go b/ent/gen/ent/runnercount/where.go index 0ff62fd..b3c1a04 100644 --- a/ent/gen/ent/runnercount/where.go +++ b/ent/gen/ent/runnercount/where.go @@ -273,7 +273,7 @@ func HasActionSummary() predicate.RunnerCount { return predicate.RunnerCount(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, ActionSummaryTable, ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, ActionSummaryTable, ActionSummaryColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/runnercount_create.go b/ent/gen/ent/runnercount_create.go index 949cd17..51e0214 100644 --- a/ent/gen/ent/runnercount_create.go +++ b/ent/gen/ent/runnercount_create.go @@ -61,19 +61,23 @@ func (rcc *RunnerCountCreate) SetNillableActionsExecuted(i *int64) *RunnerCountC return rcc } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (rcc *RunnerCountCreate) AddActionSummaryIDs(ids ...int) *RunnerCountCreate { - rcc.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (rcc *RunnerCountCreate) SetActionSummaryID(id int) *RunnerCountCreate { + rcc.mutation.SetActionSummaryID(id) return rcc } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (rcc *RunnerCountCreate) AddActionSummary(a ...*ActionSummary) *RunnerCountCreate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (rcc *RunnerCountCreate) SetNillableActionSummaryID(id *int) *RunnerCountCreate { + if id != nil { + rcc = rcc.SetActionSummaryID(*id) } - return rcc.AddActionSummaryIDs(ids...) + return rcc +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (rcc *RunnerCountCreate) SetActionSummary(a *ActionSummary) *RunnerCountCreate { + return rcc.SetActionSummaryID(a.ID) } // Mutation returns the RunnerCountMutation object of the builder. @@ -150,10 +154,10 @@ func (rcc *RunnerCountCreate) createSpec() (*RunnerCount, *sqlgraph.CreateSpec) } if nodes := rcc.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, + Columns: []string{runnercount.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -162,6 +166,7 @@ func (rcc *RunnerCountCreate) createSpec() (*RunnerCount, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.action_summary_runner_count = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/runnercount_query.go b/ent/gen/ent/runnercount_query.go index 15b3b23..a7d6b52 100644 --- a/ent/gen/ent/runnercount_query.go +++ b/ent/gen/ent/runnercount_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // RunnerCountQuery is the builder for querying RunnerCount entities. type RunnerCountQuery struct { config - ctx *QueryContext - order []runnercount.OrderOption - inters []Interceptor - predicates []predicate.RunnerCount - withActionSummary *ActionSummaryQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*RunnerCount) error - withNamedActionSummary map[string]*ActionSummaryQuery + ctx *QueryContext + order []runnercount.OrderOption + inters []Interceptor + predicates []predicate.RunnerCount + withActionSummary *ActionSummaryQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*RunnerCount) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (rcq *RunnerCountQuery) QueryActionSummary() *ActionSummaryQuery { step := sqlgraph.NewStep( sqlgraph.From(runnercount.Table, runnercount.FieldID, selector), sqlgraph.To(actionsummary.Table, actionsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, runnercount.ActionSummaryTable, runnercount.ActionSummaryPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, runnercount.ActionSummaryTable, runnercount.ActionSummaryColumn), ) fromU = sqlgraph.SetNeighbors(rcq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (rcq *RunnerCountQuery) prepareQuery(ctx context.Context) error { func (rcq *RunnerCountQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*RunnerCount, error) { var ( nodes = []*RunnerCount{} + withFKs = rcq.withFKs _spec = rcq.querySpec() loadedTypes = [1]bool{ rcq.withActionSummary != nil, } ) + if rcq.withActionSummary != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, runnercount.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*RunnerCount).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (rcq *RunnerCountQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nodes, nil } if query := rcq.withActionSummary; query != nil { - if err := rcq.loadActionSummary(ctx, query, nodes, - func(n *RunnerCount) { n.Edges.ActionSummary = []*ActionSummary{} }, - func(n *RunnerCount, e *ActionSummary) { n.Edges.ActionSummary = append(n.Edges.ActionSummary, e) }); err != nil { - return nil, err - } - } - for name, query := range rcq.withNamedActionSummary { - if err := rcq.loadActionSummary(ctx, query, nodes, - func(n *RunnerCount) { n.appendNamedActionSummary(name) }, - func(n *RunnerCount, e *ActionSummary) { n.appendNamedActionSummary(name, e) }); err != nil { + if err := rcq.loadActionSummary(ctx, query, nodes, nil, + func(n *RunnerCount, e *ActionSummary) { n.Edges.ActionSummary = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (rcq *RunnerCountQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] } func (rcq *RunnerCountQuery) loadActionSummary(ctx context.Context, query *ActionSummaryQuery, nodes []*RunnerCount, init func(*RunnerCount), assign func(*RunnerCount, *ActionSummary)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*RunnerCount) - nids := make(map[int]map[*RunnerCount]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*RunnerCount) + for i := range nodes { + if nodes[i].action_summary_runner_count == nil { + continue } + fk := *nodes[i].action_summary_runner_count + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(runnercount.ActionSummaryTable) - s.Join(joinT).On(s.C(actionsummary.FieldID), joinT.C(runnercount.ActionSummaryPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(runnercount.ActionSummaryPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(runnercount.ActionSummaryPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*RunnerCount]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*ActionSummary](ctx, query, qr, query.inters) + query.Where(actionsummary.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "action_summary" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "action_summary_runner_count" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (rcq *RunnerCountQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedActionSummary tells the query-builder to eager-load the nodes that are connected to the "action_summary" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (rcq *RunnerCountQuery) WithNamedActionSummary(name string, opts ...func(*ActionSummaryQuery)) *RunnerCountQuery { - query := (&ActionSummaryClient{config: rcq.config}).Query() - for _, opt := range opts { - opt(query) - } - if rcq.withNamedActionSummary == nil { - rcq.withNamedActionSummary = make(map[string]*ActionSummaryQuery) - } - rcq.withNamedActionSummary[name] = query - return rcq -} - // RunnerCountGroupBy is the group-by builder for RunnerCount entities. type RunnerCountGroupBy struct { selector diff --git a/ent/gen/ent/runnercount_update.go b/ent/gen/ent/runnercount_update.go index e912b8d..e10f01a 100644 --- a/ent/gen/ent/runnercount_update.go +++ b/ent/gen/ent/runnercount_update.go @@ -95,19 +95,23 @@ func (rcu *RunnerCountUpdate) ClearActionsExecuted() *RunnerCountUpdate { return rcu } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (rcu *RunnerCountUpdate) AddActionSummaryIDs(ids ...int) *RunnerCountUpdate { - rcu.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (rcu *RunnerCountUpdate) SetActionSummaryID(id int) *RunnerCountUpdate { + rcu.mutation.SetActionSummaryID(id) return rcu } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (rcu *RunnerCountUpdate) AddActionSummary(a ...*ActionSummary) *RunnerCountUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (rcu *RunnerCountUpdate) SetNillableActionSummaryID(id *int) *RunnerCountUpdate { + if id != nil { + rcu = rcu.SetActionSummaryID(*id) } - return rcu.AddActionSummaryIDs(ids...) + return rcu +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (rcu *RunnerCountUpdate) SetActionSummary(a *ActionSummary) *RunnerCountUpdate { + return rcu.SetActionSummaryID(a.ID) } // Mutation returns the RunnerCountMutation object of the builder. @@ -115,27 +119,12 @@ func (rcu *RunnerCountUpdate) Mutation() *RunnerCountMutation { return rcu.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (rcu *RunnerCountUpdate) ClearActionSummary() *RunnerCountUpdate { rcu.mutation.ClearActionSummary() return rcu } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (rcu *RunnerCountUpdate) RemoveActionSummaryIDs(ids ...int) *RunnerCountUpdate { - rcu.mutation.RemoveActionSummaryIDs(ids...) - return rcu -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (rcu *RunnerCountUpdate) RemoveActionSummary(a ...*ActionSummary) *RunnerCountUpdate { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return rcu.RemoveActionSummaryIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (rcu *RunnerCountUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, rcu.sqlSave, rcu.mutation, rcu.hooks) @@ -195,39 +184,23 @@ func (rcu *RunnerCountUpdate) sqlSave(ctx context.Context) (n int, err error) { } if rcu.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := rcu.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !rcu.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, + Columns: []string{runnercount.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := rcu.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, + Columns: []string{runnercount.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), @@ -325,19 +298,23 @@ func (rcuo *RunnerCountUpdateOne) ClearActionsExecuted() *RunnerCountUpdateOne { return rcuo } -// AddActionSummaryIDs adds the "action_summary" edge to the ActionSummary entity by IDs. -func (rcuo *RunnerCountUpdateOne) AddActionSummaryIDs(ids ...int) *RunnerCountUpdateOne { - rcuo.mutation.AddActionSummaryIDs(ids...) +// SetActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID. +func (rcuo *RunnerCountUpdateOne) SetActionSummaryID(id int) *RunnerCountUpdateOne { + rcuo.mutation.SetActionSummaryID(id) return rcuo } -// AddActionSummary adds the "action_summary" edges to the ActionSummary entity. -func (rcuo *RunnerCountUpdateOne) AddActionSummary(a ...*ActionSummary) *RunnerCountUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID +// SetNillableActionSummaryID sets the "action_summary" edge to the ActionSummary entity by ID if the given value is not nil. +func (rcuo *RunnerCountUpdateOne) SetNillableActionSummaryID(id *int) *RunnerCountUpdateOne { + if id != nil { + rcuo = rcuo.SetActionSummaryID(*id) } - return rcuo.AddActionSummaryIDs(ids...) + return rcuo +} + +// SetActionSummary sets the "action_summary" edge to the ActionSummary entity. +func (rcuo *RunnerCountUpdateOne) SetActionSummary(a *ActionSummary) *RunnerCountUpdateOne { + return rcuo.SetActionSummaryID(a.ID) } // Mutation returns the RunnerCountMutation object of the builder. @@ -345,27 +322,12 @@ func (rcuo *RunnerCountUpdateOne) Mutation() *RunnerCountMutation { return rcuo.mutation } -// ClearActionSummary clears all "action_summary" edges to the ActionSummary entity. +// ClearActionSummary clears the "action_summary" edge to the ActionSummary entity. func (rcuo *RunnerCountUpdateOne) ClearActionSummary() *RunnerCountUpdateOne { rcuo.mutation.ClearActionSummary() return rcuo } -// RemoveActionSummaryIDs removes the "action_summary" edge to ActionSummary entities by IDs. -func (rcuo *RunnerCountUpdateOne) RemoveActionSummaryIDs(ids ...int) *RunnerCountUpdateOne { - rcuo.mutation.RemoveActionSummaryIDs(ids...) - return rcuo -} - -// RemoveActionSummary removes "action_summary" edges to ActionSummary entities. -func (rcuo *RunnerCountUpdateOne) RemoveActionSummary(a ...*ActionSummary) *RunnerCountUpdateOne { - ids := make([]int, len(a)) - for i := range a { - ids[i] = a[i].ID - } - return rcuo.RemoveActionSummaryIDs(ids...) -} - // Where appends a list predicates to the RunnerCountUpdate builder. func (rcuo *RunnerCountUpdateOne) Where(ps ...predicate.RunnerCount) *RunnerCountUpdateOne { rcuo.mutation.Where(ps...) @@ -455,39 +417,23 @@ func (rcuo *RunnerCountUpdateOne) sqlSave(ctx context.Context) (_node *RunnerCou } if rcuo.mutation.ActionSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := rcuo.mutation.RemovedActionSummaryIDs(); len(nodes) > 0 && !rcuo.mutation.ActionSummaryCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, + Columns: []string{runnercount.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := rcuo.mutation.ActionSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: runnercount.ActionSummaryTable, - Columns: runnercount.ActionSummaryPrimaryKey, + Columns: []string{runnercount.ActionSummaryColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(actionsummary.FieldID, field.TypeInt), diff --git a/ent/gen/ent/schema-viz.html b/ent/gen/ent/schema-viz.html index 7ab3a9f..ec1df79 100644 --- a/ent/gen/ent/schema-viz.html +++ b/ent/gen/ent/schema-viz.html @@ -70,7 +70,7 @@ } - const entGraph = JSON.parse("{\"nodes\":[{\"id\":\"ActionCacheStatistics\",\"fields\":[{\"name\":\"size_in_bytes\",\"type\":\"uint64\"},{\"name\":\"save_time_in_ms\",\"type\":\"uint64\"},{\"name\":\"load_time_in_ms\",\"type\":\"int64\"},{\"name\":\"hits\",\"type\":\"int32\"},{\"name\":\"misses\",\"type\":\"int32\"}]},{\"id\":\"ActionData\",\"fields\":[{\"name\":\"mnemonic\",\"type\":\"string\"},{\"name\":\"actions_executed\",\"type\":\"int64\"},{\"name\":\"actions_created\",\"type\":\"int64\"},{\"name\":\"first_started_ms\",\"type\":\"int64\"},{\"name\":\"last_ended_ms\",\"type\":\"int64\"},{\"name\":\"system_time\",\"type\":\"int64\"},{\"name\":\"user_time\",\"type\":\"int64\"}]},{\"id\":\"ActionSummary\",\"fields\":[{\"name\":\"actions_created\",\"type\":\"int64\"},{\"name\":\"actions_created_not_including_aspects\",\"type\":\"int64\"},{\"name\":\"actions_executed\",\"type\":\"int64\"},{\"name\":\"remote_cache_hits\",\"type\":\"int64\"}]},{\"id\":\"ArtifactMetrics\",\"fields\":null},{\"id\":\"BazelInvocation\",\"fields\":[{\"name\":\"invocation_id\",\"type\":\"uuid.UUID\"},{\"name\":\"started_at\",\"type\":\"time.Time\"},{\"name\":\"ended_at\",\"type\":\"time.Time\"},{\"name\":\"change_number\",\"type\":\"int\"},{\"name\":\"patchset_number\",\"type\":\"int\"},{\"name\":\"summary\",\"type\":\"summary.InvocationSummary\"},{\"name\":\"bep_completed\",\"type\":\"bool\"},{\"name\":\"step_label\",\"type\":\"string\"},{\"name\":\"related_files\",\"type\":\"map[string]string\"},{\"name\":\"user_email\",\"type\":\"string\"},{\"name\":\"user_ldap\",\"type\":\"string\"},{\"name\":\"build_logs\",\"type\":\"string\"},{\"name\":\"cpu\",\"type\":\"string\"},{\"name\":\"platform_name\",\"type\":\"string\"},{\"name\":\"configuration_mnemonic\",\"type\":\"string\"},{\"name\":\"num_fetches\",\"type\":\"int64\"},{\"name\":\"profile_name\",\"type\":\"string\"}]},{\"id\":\"BazelInvocationProblem\",\"fields\":[{\"name\":\"problem_type\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"bep_events\",\"type\":\"json.RawMessage\"}]},{\"id\":\"Blob\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"size_bytes\",\"type\":\"int64\"},{\"name\":\"archiving_status\",\"type\":\"blob.ArchivingStatus\"},{\"name\":\"reason\",\"type\":\"string\"},{\"name\":\"archive_url\",\"type\":\"string\"}]},{\"id\":\"Build\",\"fields\":[{\"name\":\"build_url\",\"type\":\"string\"},{\"name\":\"build_uuid\",\"type\":\"uuid.UUID\"},{\"name\":\"env\",\"type\":\"map[string]string\"}]},{\"id\":\"BuildGraphMetrics\",\"fields\":[{\"name\":\"action_lookup_value_count\",\"type\":\"int32\"},{\"name\":\"action_lookup_value_count_not_including_aspects\",\"type\":\"int32\"},{\"name\":\"action_count\",\"type\":\"int32\"},{\"name\":\"action_count_not_including_aspects\",\"type\":\"int32\"},{\"name\":\"input_file_configured_target_count\",\"type\":\"int32\"},{\"name\":\"output_file_configured_target_count\",\"type\":\"int32\"},{\"name\":\"other_configured_target_count\",\"type\":\"int32\"},{\"name\":\"output_artifact_count\",\"type\":\"int32\"},{\"name\":\"post_invocation_skyframe_node_count\",\"type\":\"int32\"}]},{\"id\":\"CumulativeMetrics\",\"fields\":[{\"name\":\"num_analyses\",\"type\":\"int32\"},{\"name\":\"num_builds\",\"type\":\"int32\"}]},{\"id\":\"DynamicExecutionMetrics\",\"fields\":null},{\"id\":\"EvaluationStat\",\"fields\":[{\"name\":\"skyfunction_name\",\"type\":\"string\"},{\"name\":\"count\",\"type\":\"int64\"}]},{\"id\":\"EventFile\",\"fields\":[{\"name\":\"url\",\"type\":\"string\"},{\"name\":\"mod_time\",\"type\":\"time.Time\"},{\"name\":\"protocol\",\"type\":\"string\"},{\"name\":\"mime_type\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"reason\",\"type\":\"string\"}]},{\"id\":\"ExectionInfo\",\"fields\":[{\"name\":\"timeout_seconds\",\"type\":\"int32\"},{\"name\":\"strategy\",\"type\":\"string\"},{\"name\":\"cached_remotely\",\"type\":\"bool\"},{\"name\":\"exit_code\",\"type\":\"int32\"},{\"name\":\"hostname\",\"type\":\"string\"}]},{\"id\":\"FilesMetric\",\"fields\":[{\"name\":\"size_in_bytes\",\"type\":\"int64\"},{\"name\":\"count\",\"type\":\"int32\"}]},{\"id\":\"GarbageMetrics\",\"fields\":[{\"name\":\"type\",\"type\":\"string\"},{\"name\":\"garbage_collected\",\"type\":\"int64\"}]},{\"id\":\"MemoryMetrics\",\"fields\":[{\"name\":\"peak_post_gc_heap_size\",\"type\":\"int64\"},{\"name\":\"used_heap_size_post_build\",\"type\":\"int64\"},{\"name\":\"peak_post_gc_tenured_space_heap_size\",\"type\":\"int64\"}]},{\"id\":\"Metrics\",\"fields\":null},{\"id\":\"MissDetail\",\"fields\":[{\"name\":\"reason\",\"type\":\"missdetail.Reason\"},{\"name\":\"count\",\"type\":\"int32\"}]},{\"id\":\"NamedSetOfFiles\",\"fields\":null},{\"id\":\"NetworkMetrics\",\"fields\":null},{\"id\":\"OutputGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"incomplete\",\"type\":\"bool\"}]},{\"id\":\"PackageLoadMetrics\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"load_duration\",\"type\":\"int64\"},{\"name\":\"num_targets\",\"type\":\"uint64\"},{\"name\":\"computation_steps\",\"type\":\"uint64\"},{\"name\":\"num_transitive_loads\",\"type\":\"uint64\"},{\"name\":\"package_overhead\",\"type\":\"uint64\"}]},{\"id\":\"PackageMetrics\",\"fields\":[{\"name\":\"packages_loaded\",\"type\":\"int64\"}]},{\"id\":\"RaceStatistics\",\"fields\":[{\"name\":\"mnemonic\",\"type\":\"string\"},{\"name\":\"local_runner\",\"type\":\"string\"},{\"name\":\"remote_runner\",\"type\":\"string\"},{\"name\":\"local_wins\",\"type\":\"int64\"},{\"name\":\"renote_wins\",\"type\":\"int64\"}]},{\"id\":\"ResourceUsage\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]},{\"id\":\"RunnerCount\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"exec_kind\",\"type\":\"string\"},{\"name\":\"actions_executed\",\"type\":\"int64\"}]},{\"id\":\"SystemNetworkStats\",\"fields\":[{\"name\":\"bytes_sent\",\"type\":\"uint64\"},{\"name\":\"bytes_recv\",\"type\":\"uint64\"},{\"name\":\"packets_sent\",\"type\":\"uint64\"},{\"name\":\"packets_recv\",\"type\":\"uint64\"},{\"name\":\"peak_bytes_sent_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_bytes_recv_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_packets_sent_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_packets_recv_per_sec\",\"type\":\"uint64\"}]},{\"id\":\"TargetComplete\",\"fields\":[{\"name\":\"success\",\"type\":\"bool\"},{\"name\":\"tag\",\"type\":\"[]string\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"end_time_in_ms\",\"type\":\"int64\"},{\"name\":\"test_timeout_seconds\",\"type\":\"int64\"},{\"name\":\"test_timeout\",\"type\":\"int64\"},{\"name\":\"test_size\",\"type\":\"targetcomplete.TestSize\"}]},{\"id\":\"TargetConfigured\",\"fields\":[{\"name\":\"tag\",\"type\":\"[]string\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"start_time_in_ms\",\"type\":\"int64\"},{\"name\":\"test_size\",\"type\":\"targetconfigured.TestSize\"}]},{\"id\":\"TargetMetrics\",\"fields\":[{\"name\":\"targets_loaded\",\"type\":\"int64\"},{\"name\":\"targets_configured\",\"type\":\"int64\"},{\"name\":\"targets_configured_not_including_aspects\",\"type\":\"int64\"}]},{\"id\":\"TargetPair\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"duration_in_ms\",\"type\":\"int64\"},{\"name\":\"success\",\"type\":\"bool\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"test_size\",\"type\":\"targetpair.TestSize\"},{\"name\":\"abort_reason\",\"type\":\"targetpair.AbortReason\"}]},{\"id\":\"TestCollection\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"overall_status\",\"type\":\"testcollection.OverallStatus\"},{\"name\":\"strategy\",\"type\":\"string\"},{\"name\":\"cached_locally\",\"type\":\"bool\"},{\"name\":\"cached_remotely\",\"type\":\"bool\"},{\"name\":\"duration_ms\",\"type\":\"int64\"}]},{\"id\":\"TestFile\",\"fields\":[{\"name\":\"digest\",\"type\":\"string\"},{\"name\":\"file\",\"type\":\"string\"},{\"name\":\"length\",\"type\":\"int64\"},{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"prefix\",\"type\":\"[]string\"}]},{\"id\":\"TestResultBES\",\"fields\":[{\"name\":\"test_status\",\"type\":\"testresultbes.TestStatus\"},{\"name\":\"status_details\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"warning\",\"type\":\"[]string\"},{\"name\":\"cached_locally\",\"type\":\"bool\"},{\"name\":\"test_attempt_start_millis_epoch\",\"type\":\"int64\"},{\"name\":\"test_attempt_start\",\"type\":\"string\"},{\"name\":\"test_attempt_duration_millis\",\"type\":\"int64\"},{\"name\":\"test_attempt_duration\",\"type\":\"int64\"}]},{\"id\":\"TestSummary\",\"fields\":[{\"name\":\"overall_status\",\"type\":\"testsummary.OverallStatus\"},{\"name\":\"total_run_count\",\"type\":\"int32\"},{\"name\":\"run_count\",\"type\":\"int32\"},{\"name\":\"attempt_count\",\"type\":\"int32\"},{\"name\":\"shard_count\",\"type\":\"int32\"},{\"name\":\"total_num_cached\",\"type\":\"int32\"},{\"name\":\"first_start_time\",\"type\":\"int64\"},{\"name\":\"last_stop_time\",\"type\":\"int64\"},{\"name\":\"total_run_duration\",\"type\":\"int64\"},{\"name\":\"label\",\"type\":\"string\"}]},{\"id\":\"TimingBreakdown\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"}]},{\"id\":\"TimingChild\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"}]},{\"id\":\"TimingMetrics\",\"fields\":[{\"name\":\"cpu_time_in_ms\",\"type\":\"int64\"},{\"name\":\"wall_time_in_ms\",\"type\":\"int64\"},{\"name\":\"analysis_phase_time_in_ms\",\"type\":\"int64\"},{\"name\":\"execution_phase_time_in_ms\",\"type\":\"int64\"},{\"name\":\"actions_execution_start_in_ms\",\"type\":\"int64\"}]}],\"edges\":[{\"from\":\"ActionCacheStatistics\",\"to\":\"MissDetail\",\"label\":\"miss_details\"},{\"from\":\"ActionSummary\",\"to\":\"ActionData\",\"label\":\"action_data\"},{\"from\":\"ActionSummary\",\"to\":\"RunnerCount\",\"label\":\"runner_count\"},{\"from\":\"ActionSummary\",\"to\":\"ActionCacheStatistics\",\"label\":\"action_cache_statistics\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"source_artifacts_read\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"output_artifacts_seen\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"output_artifacts_from_action_cache\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"top_level_artifacts\"},{\"from\":\"BazelInvocation\",\"to\":\"BazelInvocationProblem\",\"label\":\"problems\"},{\"from\":\"BazelInvocation\",\"to\":\"Metrics\",\"label\":\"metrics\"},{\"from\":\"BazelInvocation\",\"to\":\"TestCollection\",\"label\":\"test_collection\"},{\"from\":\"BazelInvocation\",\"to\":\"TargetPair\",\"label\":\"targets\"},{\"from\":\"Build\",\"to\":\"BazelInvocation\",\"label\":\"invocations\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"dirtied_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"changed_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"built_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"cleaned_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"evaluated_values\"},{\"from\":\"DynamicExecutionMetrics\",\"to\":\"RaceStatistics\",\"label\":\"race_statistics\"},{\"from\":\"EventFile\",\"to\":\"BazelInvocation\",\"label\":\"bazel_invocation\"},{\"from\":\"ExectionInfo\",\"to\":\"TimingBreakdown\",\"label\":\"timing_breakdown\"},{\"from\":\"ExectionInfo\",\"to\":\"ResourceUsage\",\"label\":\"resource_usage\"},{\"from\":\"MemoryMetrics\",\"to\":\"GarbageMetrics\",\"label\":\"garbage_metrics\"},{\"from\":\"Metrics\",\"to\":\"ActionSummary\",\"label\":\"action_summary\"},{\"from\":\"Metrics\",\"to\":\"MemoryMetrics\",\"label\":\"memory_metrics\"},{\"from\":\"Metrics\",\"to\":\"TargetMetrics\",\"label\":\"target_metrics\"},{\"from\":\"Metrics\",\"to\":\"PackageMetrics\",\"label\":\"package_metrics\"},{\"from\":\"Metrics\",\"to\":\"TimingMetrics\",\"label\":\"timing_metrics\"},{\"from\":\"Metrics\",\"to\":\"CumulativeMetrics\",\"label\":\"cumulative_metrics\"},{\"from\":\"Metrics\",\"to\":\"ArtifactMetrics\",\"label\":\"artifact_metrics\"},{\"from\":\"Metrics\",\"to\":\"NetworkMetrics\",\"label\":\"network_metrics\"},{\"from\":\"Metrics\",\"to\":\"DynamicExecutionMetrics\",\"label\":\"dynamic_execution_metrics\"},{\"from\":\"Metrics\",\"to\":\"BuildGraphMetrics\",\"label\":\"build_graph_metrics\"},{\"from\":\"NamedSetOfFiles\",\"to\":\"TestFile\",\"label\":\"files\"},{\"from\":\"NamedSetOfFiles\",\"to\":\"NamedSetOfFiles\",\"label\":\"file_sets\"},{\"from\":\"NetworkMetrics\",\"to\":\"SystemNetworkStats\",\"label\":\"system_network_stats\"},{\"from\":\"OutputGroup\",\"to\":\"TestFile\",\"label\":\"inline_files\"},{\"from\":\"OutputGroup\",\"to\":\"NamedSetOfFiles\",\"label\":\"file_sets\"},{\"from\":\"PackageMetrics\",\"to\":\"PackageLoadMetrics\",\"label\":\"package_load_metrics\"},{\"from\":\"TargetComplete\",\"to\":\"TestFile\",\"label\":\"important_output\"},{\"from\":\"TargetComplete\",\"to\":\"TestFile\",\"label\":\"directory_output\"},{\"from\":\"TargetComplete\",\"to\":\"OutputGroup\",\"label\":\"output_group\"},{\"from\":\"TargetPair\",\"to\":\"TargetConfigured\",\"label\":\"configuration\"},{\"from\":\"TargetPair\",\"to\":\"TargetComplete\",\"label\":\"completion\"},{\"from\":\"TestCollection\",\"to\":\"TestSummary\",\"label\":\"test_summary\"},{\"from\":\"TestCollection\",\"to\":\"TestResultBES\",\"label\":\"test_results\"},{\"from\":\"TestResultBES\",\"to\":\"TestFile\",\"label\":\"test_action_output\"},{\"from\":\"TestResultBES\",\"to\":\"ExectionInfo\",\"label\":\"execution_info\"},{\"from\":\"TestSummary\",\"to\":\"TestFile\",\"label\":\"passed\"},{\"from\":\"TestSummary\",\"to\":\"TestFile\",\"label\":\"failed\"},{\"from\":\"TimingBreakdown\",\"to\":\"TimingChild\",\"label\":\"child\"}]}"); + const entGraph = JSON.parse("{\"nodes\":[{\"id\":\"ActionCacheStatistics\",\"fields\":[{\"name\":\"size_in_bytes\",\"type\":\"uint64\"},{\"name\":\"save_time_in_ms\",\"type\":\"uint64\"},{\"name\":\"load_time_in_ms\",\"type\":\"int64\"},{\"name\":\"hits\",\"type\":\"int32\"},{\"name\":\"misses\",\"type\":\"int32\"}]},{\"id\":\"ActionData\",\"fields\":[{\"name\":\"mnemonic\",\"type\":\"string\"},{\"name\":\"actions_executed\",\"type\":\"int64\"},{\"name\":\"actions_created\",\"type\":\"int64\"},{\"name\":\"first_started_ms\",\"type\":\"int64\"},{\"name\":\"last_ended_ms\",\"type\":\"int64\"},{\"name\":\"system_time\",\"type\":\"int64\"},{\"name\":\"user_time\",\"type\":\"int64\"}]},{\"id\":\"ActionSummary\",\"fields\":[{\"name\":\"actions_created\",\"type\":\"int64\"},{\"name\":\"actions_created_not_including_aspects\",\"type\":\"int64\"},{\"name\":\"actions_executed\",\"type\":\"int64\"},{\"name\":\"remote_cache_hits\",\"type\":\"int64\"}]},{\"id\":\"ArtifactMetrics\",\"fields\":null},{\"id\":\"BazelInvocation\",\"fields\":[{\"name\":\"invocation_id\",\"type\":\"uuid.UUID\"},{\"name\":\"started_at\",\"type\":\"time.Time\"},{\"name\":\"ended_at\",\"type\":\"time.Time\"},{\"name\":\"change_number\",\"type\":\"int\"},{\"name\":\"patchset_number\",\"type\":\"int\"},{\"name\":\"summary\",\"type\":\"summary.InvocationSummary\"},{\"name\":\"bep_completed\",\"type\":\"bool\"},{\"name\":\"step_label\",\"type\":\"string\"},{\"name\":\"related_files\",\"type\":\"map[string]string\"},{\"name\":\"user_email\",\"type\":\"string\"},{\"name\":\"user_ldap\",\"type\":\"string\"},{\"name\":\"build_logs\",\"type\":\"string\"},{\"name\":\"cpu\",\"type\":\"string\"},{\"name\":\"platform_name\",\"type\":\"string\"},{\"name\":\"configuration_mnemonic\",\"type\":\"string\"},{\"name\":\"num_fetches\",\"type\":\"int64\"},{\"name\":\"profile_name\",\"type\":\"string\"}]},{\"id\":\"BazelInvocationProblem\",\"fields\":[{\"name\":\"problem_type\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"bep_events\",\"type\":\"json.RawMessage\"}]},{\"id\":\"Blob\",\"fields\":[{\"name\":\"uri\",\"type\":\"string\"},{\"name\":\"size_bytes\",\"type\":\"int64\"},{\"name\":\"archiving_status\",\"type\":\"blob.ArchivingStatus\"},{\"name\":\"reason\",\"type\":\"string\"},{\"name\":\"archive_url\",\"type\":\"string\"}]},{\"id\":\"Build\",\"fields\":[{\"name\":\"build_url\",\"type\":\"string\"},{\"name\":\"build_uuid\",\"type\":\"uuid.UUID\"},{\"name\":\"env\",\"type\":\"map[string]string\"}]},{\"id\":\"BuildGraphMetrics\",\"fields\":[{\"name\":\"action_lookup_value_count\",\"type\":\"int32\"},{\"name\":\"action_lookup_value_count_not_including_aspects\",\"type\":\"int32\"},{\"name\":\"action_count\",\"type\":\"int32\"},{\"name\":\"action_count_not_including_aspects\",\"type\":\"int32\"},{\"name\":\"input_file_configured_target_count\",\"type\":\"int32\"},{\"name\":\"output_file_configured_target_count\",\"type\":\"int32\"},{\"name\":\"other_configured_target_count\",\"type\":\"int32\"},{\"name\":\"output_artifact_count\",\"type\":\"int32\"},{\"name\":\"post_invocation_skyframe_node_count\",\"type\":\"int32\"}]},{\"id\":\"CumulativeMetrics\",\"fields\":[{\"name\":\"num_analyses\",\"type\":\"int32\"},{\"name\":\"num_builds\",\"type\":\"int32\"}]},{\"id\":\"DynamicExecutionMetrics\",\"fields\":null},{\"id\":\"EvaluationStat\",\"fields\":[{\"name\":\"skyfunction_name\",\"type\":\"string\"},{\"name\":\"count\",\"type\":\"int64\"}]},{\"id\":\"EventFile\",\"fields\":[{\"name\":\"url\",\"type\":\"string\"},{\"name\":\"mod_time\",\"type\":\"time.Time\"},{\"name\":\"protocol\",\"type\":\"string\"},{\"name\":\"mime_type\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"reason\",\"type\":\"string\"}]},{\"id\":\"ExectionInfo\",\"fields\":[{\"name\":\"timeout_seconds\",\"type\":\"int32\"},{\"name\":\"strategy\",\"type\":\"string\"},{\"name\":\"cached_remotely\",\"type\":\"bool\"},{\"name\":\"exit_code\",\"type\":\"int32\"},{\"name\":\"hostname\",\"type\":\"string\"}]},{\"id\":\"FilesMetric\",\"fields\":[{\"name\":\"size_in_bytes\",\"type\":\"int64\"},{\"name\":\"count\",\"type\":\"int32\"}]},{\"id\":\"GarbageMetrics\",\"fields\":[{\"name\":\"type\",\"type\":\"string\"},{\"name\":\"garbage_collected\",\"type\":\"int64\"}]},{\"id\":\"MemoryMetrics\",\"fields\":[{\"name\":\"peak_post_gc_heap_size\",\"type\":\"int64\"},{\"name\":\"used_heap_size_post_build\",\"type\":\"int64\"},{\"name\":\"peak_post_gc_tenured_space_heap_size\",\"type\":\"int64\"}]},{\"id\":\"Metrics\",\"fields\":null},{\"id\":\"MissDetail\",\"fields\":[{\"name\":\"reason\",\"type\":\"missdetail.Reason\"},{\"name\":\"count\",\"type\":\"int32\"}]},{\"id\":\"NamedSetOfFiles\",\"fields\":null},{\"id\":\"NetworkMetrics\",\"fields\":null},{\"id\":\"OutputGroup\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"incomplete\",\"type\":\"bool\"}]},{\"id\":\"PackageLoadMetrics\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"load_duration\",\"type\":\"int64\"},{\"name\":\"num_targets\",\"type\":\"uint64\"},{\"name\":\"computation_steps\",\"type\":\"uint64\"},{\"name\":\"num_transitive_loads\",\"type\":\"uint64\"},{\"name\":\"package_overhead\",\"type\":\"uint64\"}]},{\"id\":\"PackageMetrics\",\"fields\":[{\"name\":\"packages_loaded\",\"type\":\"int64\"}]},{\"id\":\"RaceStatistics\",\"fields\":[{\"name\":\"mnemonic\",\"type\":\"string\"},{\"name\":\"local_runner\",\"type\":\"string\"},{\"name\":\"remote_runner\",\"type\":\"string\"},{\"name\":\"local_wins\",\"type\":\"int64\"},{\"name\":\"renote_wins\",\"type\":\"int64\"}]},{\"id\":\"ResourceUsage\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]},{\"id\":\"RunnerCount\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"exec_kind\",\"type\":\"string\"},{\"name\":\"actions_executed\",\"type\":\"int64\"}]},{\"id\":\"SystemNetworkStats\",\"fields\":[{\"name\":\"bytes_sent\",\"type\":\"uint64\"},{\"name\":\"bytes_recv\",\"type\":\"uint64\"},{\"name\":\"packets_sent\",\"type\":\"uint64\"},{\"name\":\"packets_recv\",\"type\":\"uint64\"},{\"name\":\"peak_bytes_sent_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_bytes_recv_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_packets_sent_per_sec\",\"type\":\"uint64\"},{\"name\":\"peak_packets_recv_per_sec\",\"type\":\"uint64\"}]},{\"id\":\"TargetComplete\",\"fields\":[{\"name\":\"success\",\"type\":\"bool\"},{\"name\":\"tag\",\"type\":\"[]string\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"end_time_in_ms\",\"type\":\"int64\"},{\"name\":\"test_timeout_seconds\",\"type\":\"int64\"},{\"name\":\"test_timeout\",\"type\":\"int64\"},{\"name\":\"test_size\",\"type\":\"targetcomplete.TestSize\"}]},{\"id\":\"TargetConfigured\",\"fields\":[{\"name\":\"tag\",\"type\":\"[]string\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"start_time_in_ms\",\"type\":\"int64\"},{\"name\":\"test_size\",\"type\":\"targetconfigured.TestSize\"}]},{\"id\":\"TargetMetrics\",\"fields\":[{\"name\":\"targets_loaded\",\"type\":\"int64\"},{\"name\":\"targets_configured\",\"type\":\"int64\"},{\"name\":\"targets_configured_not_including_aspects\",\"type\":\"int64\"}]},{\"id\":\"TargetPair\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"duration_in_ms\",\"type\":\"int64\"},{\"name\":\"success\",\"type\":\"bool\"},{\"name\":\"target_kind\",\"type\":\"string\"},{\"name\":\"test_size\",\"type\":\"targetpair.TestSize\"},{\"name\":\"abort_reason\",\"type\":\"targetpair.AbortReason\"}]},{\"id\":\"TestCollection\",\"fields\":[{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"overall_status\",\"type\":\"testcollection.OverallStatus\"},{\"name\":\"strategy\",\"type\":\"string\"},{\"name\":\"cached_locally\",\"type\":\"bool\"},{\"name\":\"cached_remotely\",\"type\":\"bool\"},{\"name\":\"first_seen\",\"type\":\"time.Time\"},{\"name\":\"duration_ms\",\"type\":\"int64\"}]},{\"id\":\"TestFile\",\"fields\":[{\"name\":\"digest\",\"type\":\"string\"},{\"name\":\"file\",\"type\":\"string\"},{\"name\":\"length\",\"type\":\"int64\"},{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"prefix\",\"type\":\"[]string\"}]},{\"id\":\"TestResultBES\",\"fields\":[{\"name\":\"test_status\",\"type\":\"testresultbes.TestStatus\"},{\"name\":\"status_details\",\"type\":\"string\"},{\"name\":\"label\",\"type\":\"string\"},{\"name\":\"warning\",\"type\":\"[]string\"},{\"name\":\"cached_locally\",\"type\":\"bool\"},{\"name\":\"test_attempt_start_millis_epoch\",\"type\":\"int64\"},{\"name\":\"test_attempt_start\",\"type\":\"string\"},{\"name\":\"test_attempt_duration_millis\",\"type\":\"int64\"},{\"name\":\"test_attempt_duration\",\"type\":\"int64\"}]},{\"id\":\"TestSummary\",\"fields\":[{\"name\":\"overall_status\",\"type\":\"testsummary.OverallStatus\"},{\"name\":\"total_run_count\",\"type\":\"int32\"},{\"name\":\"run_count\",\"type\":\"int32\"},{\"name\":\"attempt_count\",\"type\":\"int32\"},{\"name\":\"shard_count\",\"type\":\"int32\"},{\"name\":\"total_num_cached\",\"type\":\"int32\"},{\"name\":\"first_start_time\",\"type\":\"int64\"},{\"name\":\"last_stop_time\",\"type\":\"int64\"},{\"name\":\"total_run_duration\",\"type\":\"int64\"},{\"name\":\"label\",\"type\":\"string\"}]},{\"id\":\"TimingBreakdown\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"}]},{\"id\":\"TimingChild\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"}]},{\"id\":\"TimingMetrics\",\"fields\":[{\"name\":\"cpu_time_in_ms\",\"type\":\"int64\"},{\"name\":\"wall_time_in_ms\",\"type\":\"int64\"},{\"name\":\"analysis_phase_time_in_ms\",\"type\":\"int64\"},{\"name\":\"execution_phase_time_in_ms\",\"type\":\"int64\"},{\"name\":\"actions_execution_start_in_ms\",\"type\":\"int64\"}]}],\"edges\":[{\"from\":\"ActionCacheStatistics\",\"to\":\"MissDetail\",\"label\":\"miss_details\"},{\"from\":\"ActionSummary\",\"to\":\"ActionData\",\"label\":\"action_data\"},{\"from\":\"ActionSummary\",\"to\":\"RunnerCount\",\"label\":\"runner_count\"},{\"from\":\"ActionSummary\",\"to\":\"ActionCacheStatistics\",\"label\":\"action_cache_statistics\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"source_artifacts_read\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"output_artifacts_seen\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"output_artifacts_from_action_cache\"},{\"from\":\"ArtifactMetrics\",\"to\":\"FilesMetric\",\"label\":\"top_level_artifacts\"},{\"from\":\"BazelInvocation\",\"to\":\"BazelInvocationProblem\",\"label\":\"problems\"},{\"from\":\"BazelInvocation\",\"to\":\"Metrics\",\"label\":\"metrics\"},{\"from\":\"BazelInvocation\",\"to\":\"TestCollection\",\"label\":\"test_collection\"},{\"from\":\"BazelInvocation\",\"to\":\"TargetPair\",\"label\":\"targets\"},{\"from\":\"Build\",\"to\":\"BazelInvocation\",\"label\":\"invocations\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"dirtied_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"changed_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"built_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"cleaned_values\"},{\"from\":\"BuildGraphMetrics\",\"to\":\"EvaluationStat\",\"label\":\"evaluated_values\"},{\"from\":\"DynamicExecutionMetrics\",\"to\":\"RaceStatistics\",\"label\":\"race_statistics\"},{\"from\":\"EventFile\",\"to\":\"BazelInvocation\",\"label\":\"bazel_invocation\"},{\"from\":\"ExectionInfo\",\"to\":\"TimingBreakdown\",\"label\":\"timing_breakdown\"},{\"from\":\"ExectionInfo\",\"to\":\"ResourceUsage\",\"label\":\"resource_usage\"},{\"from\":\"MemoryMetrics\",\"to\":\"GarbageMetrics\",\"label\":\"garbage_metrics\"},{\"from\":\"Metrics\",\"to\":\"ActionSummary\",\"label\":\"action_summary\"},{\"from\":\"Metrics\",\"to\":\"MemoryMetrics\",\"label\":\"memory_metrics\"},{\"from\":\"Metrics\",\"to\":\"TargetMetrics\",\"label\":\"target_metrics\"},{\"from\":\"Metrics\",\"to\":\"PackageMetrics\",\"label\":\"package_metrics\"},{\"from\":\"Metrics\",\"to\":\"TimingMetrics\",\"label\":\"timing_metrics\"},{\"from\":\"Metrics\",\"to\":\"CumulativeMetrics\",\"label\":\"cumulative_metrics\"},{\"from\":\"Metrics\",\"to\":\"ArtifactMetrics\",\"label\":\"artifact_metrics\"},{\"from\":\"Metrics\",\"to\":\"NetworkMetrics\",\"label\":\"network_metrics\"},{\"from\":\"Metrics\",\"to\":\"DynamicExecutionMetrics\",\"label\":\"dynamic_execution_metrics\"},{\"from\":\"Metrics\",\"to\":\"BuildGraphMetrics\",\"label\":\"build_graph_metrics\"},{\"from\":\"NamedSetOfFiles\",\"to\":\"TestFile\",\"label\":\"files\"},{\"from\":\"NamedSetOfFiles\",\"to\":\"NamedSetOfFiles\",\"label\":\"file_sets\"},{\"from\":\"NetworkMetrics\",\"to\":\"SystemNetworkStats\",\"label\":\"system_network_stats\"},{\"from\":\"OutputGroup\",\"to\":\"TestFile\",\"label\":\"inline_files\"},{\"from\":\"OutputGroup\",\"to\":\"NamedSetOfFiles\",\"label\":\"file_sets\"},{\"from\":\"PackageMetrics\",\"to\":\"PackageLoadMetrics\",\"label\":\"package_load_metrics\"},{\"from\":\"TargetComplete\",\"to\":\"TestFile\",\"label\":\"important_output\"},{\"from\":\"TargetComplete\",\"to\":\"TestFile\",\"label\":\"directory_output\"},{\"from\":\"TargetComplete\",\"to\":\"OutputGroup\",\"label\":\"output_group\"},{\"from\":\"TargetPair\",\"to\":\"TargetConfigured\",\"label\":\"configuration\"},{\"from\":\"TargetPair\",\"to\":\"TargetComplete\",\"label\":\"completion\"},{\"from\":\"TestCollection\",\"to\":\"TestSummary\",\"label\":\"test_summary\"},{\"from\":\"TestCollection\",\"to\":\"TestResultBES\",\"label\":\"test_results\"},{\"from\":\"TestResultBES\",\"to\":\"TestFile\",\"label\":\"test_action_output\"},{\"from\":\"TestResultBES\",\"to\":\"ExectionInfo\",\"label\":\"execution_info\"},{\"from\":\"TestSummary\",\"to\":\"TestFile\",\"label\":\"passed\"},{\"from\":\"TestSummary\",\"to\":\"TestFile\",\"label\":\"failed\"},{\"from\":\"TimingBreakdown\",\"to\":\"TimingChild\",\"label\":\"child\"}]}"); const nodes = new vis.DataSet((entGraph.nodes || []).map(n => ({ id: n.id, diff --git a/ent/gen/ent/systemnetworkstats/systemnetworkstats.go b/ent/gen/ent/systemnetworkstats/systemnetworkstats.go index 8191c77..e4c0e68 100644 --- a/ent/gen/ent/systemnetworkstats/systemnetworkstats.go +++ b/ent/gen/ent/systemnetworkstats/systemnetworkstats.go @@ -133,6 +133,6 @@ func newNetworkMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(NetworkMetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, NetworkMetricsTable, NetworkMetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, NetworkMetricsTable, NetworkMetricsColumn), ) } diff --git a/ent/gen/ent/systemnetworkstats/where.go b/ent/gen/ent/systemnetworkstats/where.go index 33eac37..2ed1d07 100644 --- a/ent/gen/ent/systemnetworkstats/where.go +++ b/ent/gen/ent/systemnetworkstats/where.go @@ -498,7 +498,7 @@ func HasNetworkMetrics() predicate.SystemNetworkStats { return predicate.SystemNetworkStats(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, NetworkMetricsTable, NetworkMetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, NetworkMetricsTable, NetworkMetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/systemnetworkstats_create.go b/ent/gen/ent/systemnetworkstats_create.go index ea6d267..53fd49b 100644 --- a/ent/gen/ent/systemnetworkstats_create.go +++ b/ent/gen/ent/systemnetworkstats_create.go @@ -244,7 +244,7 @@ func (snsc *SystemNetworkStatsCreate) createSpec() (*SystemNetworkStats, *sqlgra } if nodes := snsc.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: systemnetworkstats.NetworkMetricsTable, Columns: []string{systemnetworkstats.NetworkMetricsColumn}, diff --git a/ent/gen/ent/systemnetworkstats_query.go b/ent/gen/ent/systemnetworkstats_query.go index c9688f5..1325a44 100644 --- a/ent/gen/ent/systemnetworkstats_query.go +++ b/ent/gen/ent/systemnetworkstats_query.go @@ -76,7 +76,7 @@ func (snsq *SystemNetworkStatsQuery) QueryNetworkMetrics() *NetworkMetricsQuery step := sqlgraph.NewStep( sqlgraph.From(systemnetworkstats.Table, systemnetworkstats.FieldID, selector), sqlgraph.To(networkmetrics.Table, networkmetrics.FieldID), - sqlgraph.Edge(sqlgraph.M2O, true, systemnetworkstats.NetworkMetricsTable, systemnetworkstats.NetworkMetricsColumn), + sqlgraph.Edge(sqlgraph.O2O, true, systemnetworkstats.NetworkMetricsTable, systemnetworkstats.NetworkMetricsColumn), ) fromU = sqlgraph.SetNeighbors(snsq.driver.Dialect(), step) return fromU, nil diff --git a/ent/gen/ent/systemnetworkstats_update.go b/ent/gen/ent/systemnetworkstats_update.go index 40ab1a8..6bf3ab4 100644 --- a/ent/gen/ent/systemnetworkstats_update.go +++ b/ent/gen/ent/systemnetworkstats_update.go @@ -384,7 +384,7 @@ func (snsu *SystemNetworkStatsUpdate) sqlSave(ctx context.Context) (n int, err e } if snsu.mutation.NetworkMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: systemnetworkstats.NetworkMetricsTable, Columns: []string{systemnetworkstats.NetworkMetricsColumn}, @@ -397,7 +397,7 @@ func (snsu *SystemNetworkStatsUpdate) sqlSave(ctx context.Context) (n int, err e } if nodes := snsu.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: systemnetworkstats.NetworkMetricsTable, Columns: []string{systemnetworkstats.NetworkMetricsColumn}, @@ -817,7 +817,7 @@ func (snsuo *SystemNetworkStatsUpdateOne) sqlSave(ctx context.Context) (_node *S } if snsuo.mutation.NetworkMetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: systemnetworkstats.NetworkMetricsTable, Columns: []string{systemnetworkstats.NetworkMetricsColumn}, @@ -830,7 +830,7 @@ func (snsuo *SystemNetworkStatsUpdateOne) sqlSave(ctx context.Context) (_node *S } if nodes := snsuo.mutation.NetworkMetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: true, Table: systemnetworkstats.NetworkMetricsTable, Columns: []string{systemnetworkstats.NetworkMetricsColumn}, diff --git a/ent/gen/ent/targetcomplete.go b/ent/gen/ent/targetcomplete.go index 6be8623..b9a2a24 100644 --- a/ent/gen/ent/targetcomplete.go +++ b/ent/gen/ent/targetcomplete.go @@ -11,6 +11,7 @@ import ( "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/outputgroup" "github.com/buildbarn/bb-portal/ent/gen/ent/targetcomplete" + "github.com/buildbarn/bb-portal/ent/gen/ent/targetpair" ) // TargetComplete is the model entity for the TargetComplete schema. @@ -34,15 +35,15 @@ type TargetComplete struct { TestSize targetcomplete.TestSize `json:"test_size,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TargetCompleteQuery when eager-loading is set. - Edges TargetCompleteEdges `json:"edges"` - target_complete_output_group *int - selectValues sql.SelectValues + Edges TargetCompleteEdges `json:"edges"` + target_pair_completion *int + selectValues sql.SelectValues } // TargetCompleteEdges holds the relations/edges for other nodes in the graph. type TargetCompleteEdges struct { // TargetPair holds the value of the target_pair edge. - TargetPair []*TargetPair `json:"target_pair,omitempty"` + TargetPair *TargetPair `json:"target_pair,omitempty"` // ImportantOutput holds the value of the important_output edge. ImportantOutput []*TestFile `json:"important_output,omitempty"` // DirectoryOutput holds the value of the directory_output edge. @@ -55,16 +56,17 @@ type TargetCompleteEdges struct { // totalCount holds the count of the edges above. totalCount [4]map[string]int - namedTargetPair map[string][]*TargetPair namedImportantOutput map[string][]*TestFile namedDirectoryOutput map[string][]*TestFile } // TargetPairOrErr returns the TargetPair value or an error if the edge -// was not loaded in eager-loading. -func (e TargetCompleteEdges) TargetPairOrErr() ([]*TargetPair, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TargetCompleteEdges) TargetPairOrErr() (*TargetPair, error) { + if e.TargetPair != nil { return e.TargetPair, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: targetpair.Label} } return nil, &NotLoadedError{edge: "target_pair"} } @@ -111,7 +113,7 @@ func (*TargetComplete) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case targetcomplete.FieldTargetKind, targetcomplete.FieldTestSize: values[i] = new(sql.NullString) - case targetcomplete.ForeignKeys[0]: // target_complete_output_group + case targetcomplete.ForeignKeys[0]: // target_pair_completion values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -180,10 +182,10 @@ func (tc *TargetComplete) assignValues(columns []string, values []any) error { } case targetcomplete.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field target_complete_output_group", value) + return fmt.Errorf("unexpected type %T for edge-field target_pair_completion", value) } else if value.Valid { - tc.target_complete_output_group = new(int) - *tc.target_complete_output_group = int(value.Int64) + tc.target_pair_completion = new(int) + *tc.target_pair_completion = int(value.Int64) } default: tc.selectValues.Set(columns[i], values[i]) @@ -265,30 +267,6 @@ func (tc *TargetComplete) String() string { return builder.String() } -// NamedTargetPair returns the TargetPair named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tc *TargetComplete) NamedTargetPair(name string) ([]*TargetPair, error) { - if tc.Edges.namedTargetPair == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tc.Edges.namedTargetPair[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tc *TargetComplete) appendNamedTargetPair(name string, edges ...*TargetPair) { - if tc.Edges.namedTargetPair == nil { - tc.Edges.namedTargetPair = make(map[string][]*TargetPair) - } - if len(edges) == 0 { - tc.Edges.namedTargetPair[name] = []*TargetPair{} - } else { - tc.Edges.namedTargetPair[name] = append(tc.Edges.namedTargetPair[name], edges...) - } -} - // NamedImportantOutput returns the ImportantOutput named value or an error if the edge was not // loaded in eager-loading with this name. func (tc *TargetComplete) NamedImportantOutput(name string) ([]*TestFile, error) { diff --git a/ent/gen/ent/targetcomplete/targetcomplete.go b/ent/gen/ent/targetcomplete/targetcomplete.go index 33b0c96..bd1b689 100644 --- a/ent/gen/ent/targetcomplete/targetcomplete.go +++ b/ent/gen/ent/targetcomplete/targetcomplete.go @@ -41,7 +41,7 @@ const ( // Table holds the table name of the targetcomplete in the database. Table = "target_completes" // TargetPairTable is the table that holds the target_pair relation/edge. - TargetPairTable = "target_pairs" + TargetPairTable = "target_completes" // TargetPairInverseTable is the table name for the TargetPair entity. // It exists in this package in order to avoid circular dependency with the "targetpair" package. TargetPairInverseTable = "target_pairs" @@ -62,7 +62,7 @@ const ( // DirectoryOutputColumn is the table column denoting the directory_output relation/edge. DirectoryOutputColumn = "target_complete_directory_output" // OutputGroupTable is the table that holds the output_group relation/edge. - OutputGroupTable = "target_completes" + OutputGroupTable = "output_groups" // OutputGroupInverseTable is the table name for the OutputGroup entity. // It exists in this package in order to avoid circular dependency with the "outputgroup" package. OutputGroupInverseTable = "output_groups" @@ -85,7 +85,7 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "target_completes" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "target_complete_output_group", + "target_pair_completion", } // ValidColumn reports if the column name is valid (part of the table columns). @@ -167,17 +167,10 @@ func ByTestSize(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldTestSize, opts...).ToFunc() } -// ByTargetPairCount orders the results by target_pair count. -func ByTargetPairCount(opts ...sql.OrderTermOption) OrderOption { +// ByTargetPairField orders the results by target_pair field. +func ByTargetPairField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTargetPairStep(), opts...) - } -} - -// ByTargetPair orders the results by target_pair terms. -func ByTargetPair(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTargetPairStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTargetPairStep(), sql.OrderByField(field, opts...)) } } @@ -219,7 +212,7 @@ func newTargetPairStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TargetPairInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetPairTable, TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetPairTable, TargetPairColumn), ) } func newImportantOutputStep() *sqlgraph.Step { @@ -240,7 +233,7 @@ func newOutputGroupStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(OutputGroupInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, OutputGroupTable, OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, false, OutputGroupTable, OutputGroupColumn), ) } diff --git a/ent/gen/ent/targetcomplete/where.go b/ent/gen/ent/targetcomplete/where.go index 1b6a5e1..7025b54 100644 --- a/ent/gen/ent/targetcomplete/where.go +++ b/ent/gen/ent/targetcomplete/where.go @@ -368,7 +368,7 @@ func HasTargetPair() predicate.TargetComplete { return predicate.TargetComplete(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetPairTable, TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetPairTable, TargetPairColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -437,7 +437,7 @@ func HasOutputGroup() predicate.TargetComplete { return predicate.TargetComplete(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, OutputGroupTable, OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, false, OutputGroupTable, OutputGroupColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/targetcomplete_create.go b/ent/gen/ent/targetcomplete_create.go index df2b939..a425339 100644 --- a/ent/gen/ent/targetcomplete_create.go +++ b/ent/gen/ent/targetcomplete_create.go @@ -111,19 +111,23 @@ func (tcc *TargetCompleteCreate) SetNillableTestSize(ts *targetcomplete.TestSize return tcc } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcc *TargetCompleteCreate) AddTargetPairIDs(ids ...int) *TargetCompleteCreate { - tcc.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcc *TargetCompleteCreate) SetTargetPairID(id int) *TargetCompleteCreate { + tcc.mutation.SetTargetPairID(id) return tcc } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcc *TargetCompleteCreate) AddTargetPair(t ...*TargetPair) *TargetCompleteCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcc *TargetCompleteCreate) SetNillableTargetPairID(id *int) *TargetCompleteCreate { + if id != nil { + tcc = tcc.SetTargetPairID(*id) } - return tcc.AddTargetPairIDs(ids...) + return tcc +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcc *TargetCompleteCreate) SetTargetPair(t *TargetPair) *TargetCompleteCreate { + return tcc.SetTargetPairID(t.ID) } // AddImportantOutputIDs adds the "important_output" edge to the TestFile entity by IDs. @@ -270,7 +274,7 @@ func (tcc *TargetCompleteCreate) createSpec() (*TargetComplete, *sqlgraph.Create } if nodes := tcc.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetcomplete.TargetPairTable, Columns: []string{targetcomplete.TargetPairColumn}, @@ -282,6 +286,7 @@ func (tcc *TargetCompleteCreate) createSpec() (*TargetComplete, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.target_pair_completion = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tcc.mutation.ImportantOutputIDs(); len(nodes) > 0 { @@ -318,7 +323,7 @@ func (tcc *TargetCompleteCreate) createSpec() (*TargetComplete, *sqlgraph.Create } if nodes := tcc.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetcomplete.OutputGroupTable, Columns: []string{targetcomplete.OutputGroupColumn}, @@ -330,7 +335,6 @@ func (tcc *TargetCompleteCreate) createSpec() (*TargetComplete, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.target_complete_output_group = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/targetcomplete_query.go b/ent/gen/ent/targetcomplete_query.go index 8352b65..6a5e9ca 100644 --- a/ent/gen/ent/targetcomplete_query.go +++ b/ent/gen/ent/targetcomplete_query.go @@ -32,7 +32,6 @@ type TargetCompleteQuery struct { withFKs bool modifiers []func(*sql.Selector) loadTotal []func(context.Context, []*TargetComplete) error - withNamedTargetPair map[string]*TargetPairQuery withNamedImportantOutput map[string]*TestFileQuery withNamedDirectoryOutput map[string]*TestFileQuery // intermediate query (i.e. traversal path). @@ -85,7 +84,7 @@ func (tcq *TargetCompleteQuery) QueryTargetPair() *TargetPairQuery { step := sqlgraph.NewStep( sqlgraph.From(targetcomplete.Table, targetcomplete.FieldID, selector), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, targetcomplete.TargetPairTable, targetcomplete.TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, targetcomplete.TargetPairTable, targetcomplete.TargetPairColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -151,7 +150,7 @@ func (tcq *TargetCompleteQuery) QueryOutputGroup() *OutputGroupQuery { step := sqlgraph.NewStep( sqlgraph.From(targetcomplete.Table, targetcomplete.FieldID, selector), sqlgraph.To(outputgroup.Table, outputgroup.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetcomplete.OutputGroupTable, targetcomplete.OutputGroupColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetcomplete.OutputGroupTable, targetcomplete.OutputGroupColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -491,7 +490,7 @@ func (tcq *TargetCompleteQuery) sqlAll(ctx context.Context, hooks ...queryHook) tcq.withOutputGroup != nil, } ) - if tcq.withOutputGroup != nil { + if tcq.withTargetPair != nil { withFKs = true } if withFKs { @@ -519,9 +518,8 @@ func (tcq *TargetCompleteQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := tcq.withTargetPair; query != nil { - if err := tcq.loadTargetPair(ctx, query, nodes, - func(n *TargetComplete) { n.Edges.TargetPair = []*TargetPair{} }, - func(n *TargetComplete, e *TargetPair) { n.Edges.TargetPair = append(n.Edges.TargetPair, e) }); err != nil { + if err := tcq.loadTargetPair(ctx, query, nodes, nil, + func(n *TargetComplete, e *TargetPair) { n.Edges.TargetPair = e }); err != nil { return nil, err } } @@ -545,13 +543,6 @@ func (tcq *TargetCompleteQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nil, err } } - for name, query := range tcq.withNamedTargetPair { - if err := tcq.loadTargetPair(ctx, query, nodes, - func(n *TargetComplete) { n.appendNamedTargetPair(name) }, - func(n *TargetComplete, e *TargetPair) { n.appendNamedTargetPair(name, e) }); err != nil { - return nil, err - } - } for name, query := range tcq.withNamedImportantOutput { if err := tcq.loadImportantOutput(ctx, query, nodes, func(n *TargetComplete) { n.appendNamedImportantOutput(name) }, @@ -575,33 +566,34 @@ func (tcq *TargetCompleteQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (tcq *TargetCompleteQuery) loadTargetPair(ctx context.Context, query *TargetPairQuery, nodes []*TargetComplete, init func(*TargetComplete), assign func(*TargetComplete, *TargetPair)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*TargetComplete) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TargetComplete) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].target_pair_completion == nil { + continue } + fk := *nodes[i].target_pair_completion + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.TargetPair(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(targetcomplete.TargetPairColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(targetpair.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.target_pair_completion - if fk == nil { - return fmt.Errorf(`foreign-key "target_pair_completion" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "target_pair_completion" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "target_pair_completion" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } @@ -668,34 +660,30 @@ func (tcq *TargetCompleteQuery) loadDirectoryOutput(ctx context.Context, query * return nil } func (tcq *TargetCompleteQuery) loadOutputGroup(ctx context.Context, query *OutputGroupQuery, nodes []*TargetComplete, init func(*TargetComplete), assign func(*TargetComplete, *OutputGroup)) error { - ids := make([]int, 0, len(nodes)) - nodeids := make(map[int][]*TargetComplete) + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TargetComplete) for i := range nodes { - if nodes[i].target_complete_output_group == nil { - continue - } - fk := *nodes[i].target_complete_output_group - if _, ok := nodeids[fk]; !ok { - ids = append(ids, fk) - } - nodeids[fk] = append(nodeids[fk], nodes[i]) - } - if len(ids) == 0 { - return nil + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - query.Where(outputgroup.IDIn(ids...)) + query.withFKs = true + query.Where(predicate.OutputGroup(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(targetcomplete.OutputGroupColumn), fks...)) + })) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nodeids[n.ID] - if !ok { - return fmt.Errorf(`unexpected foreign-key "target_complete_output_group" returned %v`, n.ID) + fk := n.target_complete_output_group + if fk == nil { + return fmt.Errorf(`foreign-key "target_complete_output_group" is nil for node %v`, n.ID) } - for i := range nodes { - assign(nodes[i], n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "target_complete_output_group" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -784,20 +772,6 @@ func (tcq *TargetCompleteQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTargetPair tells the query-builder to eager-load the nodes that are connected to the "target_pair" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tcq *TargetCompleteQuery) WithNamedTargetPair(name string, opts ...func(*TargetPairQuery)) *TargetCompleteQuery { - query := (&TargetPairClient{config: tcq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tcq.withNamedTargetPair == nil { - tcq.withNamedTargetPair = make(map[string]*TargetPairQuery) - } - tcq.withNamedTargetPair[name] = query - return tcq -} - // WithNamedImportantOutput tells the query-builder to eager-load the nodes that are connected to the "important_output" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (tcq *TargetCompleteQuery) WithNamedImportantOutput(name string, opts ...func(*TestFileQuery)) *TargetCompleteQuery { diff --git a/ent/gen/ent/targetcomplete_update.go b/ent/gen/ent/targetcomplete_update.go index d5228a3..3a0ac32 100644 --- a/ent/gen/ent/targetcomplete_update.go +++ b/ent/gen/ent/targetcomplete_update.go @@ -190,19 +190,23 @@ func (tcu *TargetCompleteUpdate) ClearTestSize() *TargetCompleteUpdate { return tcu } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcu *TargetCompleteUpdate) AddTargetPairIDs(ids ...int) *TargetCompleteUpdate { - tcu.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcu *TargetCompleteUpdate) SetTargetPairID(id int) *TargetCompleteUpdate { + tcu.mutation.SetTargetPairID(id) return tcu } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcu *TargetCompleteUpdate) AddTargetPair(t ...*TargetPair) *TargetCompleteUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcu *TargetCompleteUpdate) SetNillableTargetPairID(id *int) *TargetCompleteUpdate { + if id != nil { + tcu = tcu.SetTargetPairID(*id) } - return tcu.AddTargetPairIDs(ids...) + return tcu +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcu *TargetCompleteUpdate) SetTargetPair(t *TargetPair) *TargetCompleteUpdate { + return tcu.SetTargetPairID(t.ID) } // AddImportantOutputIDs adds the "important_output" edge to the TestFile entity by IDs. @@ -259,27 +263,12 @@ func (tcu *TargetCompleteUpdate) Mutation() *TargetCompleteMutation { return tcu.mutation } -// ClearTargetPair clears all "target_pair" edges to the TargetPair entity. +// ClearTargetPair clears the "target_pair" edge to the TargetPair entity. func (tcu *TargetCompleteUpdate) ClearTargetPair() *TargetCompleteUpdate { tcu.mutation.ClearTargetPair() return tcu } -// RemoveTargetPairIDs removes the "target_pair" edge to TargetPair entities by IDs. -func (tcu *TargetCompleteUpdate) RemoveTargetPairIDs(ids ...int) *TargetCompleteUpdate { - tcu.mutation.RemoveTargetPairIDs(ids...) - return tcu -} - -// RemoveTargetPair removes "target_pair" edges to TargetPair entities. -func (tcu *TargetCompleteUpdate) RemoveTargetPair(t ...*TargetPair) *TargetCompleteUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcu.RemoveTargetPairIDs(ids...) -} - // ClearImportantOutput clears all "important_output" edges to the TestFile entity. func (tcu *TargetCompleteUpdate) ClearImportantOutput() *TargetCompleteUpdate { tcu.mutation.ClearImportantOutput() @@ -435,7 +424,7 @@ func (tcu *TargetCompleteUpdate) sqlSave(ctx context.Context) (n int, err error) } if tcu.mutation.TargetPairCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetcomplete.TargetPairTable, Columns: []string{targetcomplete.TargetPairColumn}, @@ -446,25 +435,9 @@ func (tcu *TargetCompleteUpdate) sqlSave(ctx context.Context) (n int, err error) } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tcu.mutation.RemovedTargetPairIDs(); len(nodes) > 0 && !tcu.mutation.TargetPairCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: targetcomplete.TargetPairTable, - Columns: []string{targetcomplete.TargetPairColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tcu.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetcomplete.TargetPairTable, Columns: []string{targetcomplete.TargetPairColumn}, @@ -570,7 +543,7 @@ func (tcu *TargetCompleteUpdate) sqlSave(ctx context.Context) (n int, err error) } if tcu.mutation.OutputGroupCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetcomplete.OutputGroupTable, Columns: []string{targetcomplete.OutputGroupColumn}, @@ -583,7 +556,7 @@ func (tcu *TargetCompleteUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := tcu.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetcomplete.OutputGroupTable, Columns: []string{targetcomplete.OutputGroupColumn}, @@ -776,19 +749,23 @@ func (tcuo *TargetCompleteUpdateOne) ClearTestSize() *TargetCompleteUpdateOne { return tcuo } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcuo *TargetCompleteUpdateOne) AddTargetPairIDs(ids ...int) *TargetCompleteUpdateOne { - tcuo.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcuo *TargetCompleteUpdateOne) SetTargetPairID(id int) *TargetCompleteUpdateOne { + tcuo.mutation.SetTargetPairID(id) return tcuo } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcuo *TargetCompleteUpdateOne) AddTargetPair(t ...*TargetPair) *TargetCompleteUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcuo *TargetCompleteUpdateOne) SetNillableTargetPairID(id *int) *TargetCompleteUpdateOne { + if id != nil { + tcuo = tcuo.SetTargetPairID(*id) } - return tcuo.AddTargetPairIDs(ids...) + return tcuo +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcuo *TargetCompleteUpdateOne) SetTargetPair(t *TargetPair) *TargetCompleteUpdateOne { + return tcuo.SetTargetPairID(t.ID) } // AddImportantOutputIDs adds the "important_output" edge to the TestFile entity by IDs. @@ -845,27 +822,12 @@ func (tcuo *TargetCompleteUpdateOne) Mutation() *TargetCompleteMutation { return tcuo.mutation } -// ClearTargetPair clears all "target_pair" edges to the TargetPair entity. +// ClearTargetPair clears the "target_pair" edge to the TargetPair entity. func (tcuo *TargetCompleteUpdateOne) ClearTargetPair() *TargetCompleteUpdateOne { tcuo.mutation.ClearTargetPair() return tcuo } -// RemoveTargetPairIDs removes the "target_pair" edge to TargetPair entities by IDs. -func (tcuo *TargetCompleteUpdateOne) RemoveTargetPairIDs(ids ...int) *TargetCompleteUpdateOne { - tcuo.mutation.RemoveTargetPairIDs(ids...) - return tcuo -} - -// RemoveTargetPair removes "target_pair" edges to TargetPair entities. -func (tcuo *TargetCompleteUpdateOne) RemoveTargetPair(t ...*TargetPair) *TargetCompleteUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcuo.RemoveTargetPairIDs(ids...) -} - // ClearImportantOutput clears all "important_output" edges to the TestFile entity. func (tcuo *TargetCompleteUpdateOne) ClearImportantOutput() *TargetCompleteUpdateOne { tcuo.mutation.ClearImportantOutput() @@ -1051,7 +1013,7 @@ func (tcuo *TargetCompleteUpdateOne) sqlSave(ctx context.Context) (_node *Target } if tcuo.mutation.TargetPairCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetcomplete.TargetPairTable, Columns: []string{targetcomplete.TargetPairColumn}, @@ -1062,25 +1024,9 @@ func (tcuo *TargetCompleteUpdateOne) sqlSave(ctx context.Context) (_node *Target } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tcuo.mutation.RemovedTargetPairIDs(); len(nodes) > 0 && !tcuo.mutation.TargetPairCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: targetcomplete.TargetPairTable, - Columns: []string{targetcomplete.TargetPairColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tcuo.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetcomplete.TargetPairTable, Columns: []string{targetcomplete.TargetPairColumn}, @@ -1186,7 +1132,7 @@ func (tcuo *TargetCompleteUpdateOne) sqlSave(ctx context.Context) (_node *Target } if tcuo.mutation.OutputGroupCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetcomplete.OutputGroupTable, Columns: []string{targetcomplete.OutputGroupColumn}, @@ -1199,7 +1145,7 @@ func (tcuo *TargetCompleteUpdateOne) sqlSave(ctx context.Context) (_node *Target } if nodes := tcuo.mutation.OutputGroupIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetcomplete.OutputGroupTable, Columns: []string{targetcomplete.OutputGroupColumn}, diff --git a/ent/gen/ent/targetconfigured.go b/ent/gen/ent/targetconfigured.go index 4859e4b..2e7f9c9 100644 --- a/ent/gen/ent/targetconfigured.go +++ b/ent/gen/ent/targetconfigured.go @@ -10,6 +10,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/targetconfigured" + "github.com/buildbarn/bb-portal/ent/gen/ent/targetpair" ) // TargetConfigured is the model entity for the TargetConfigured schema. @@ -27,28 +28,29 @@ type TargetConfigured struct { TestSize targetconfigured.TestSize `json:"test_size,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TargetConfiguredQuery when eager-loading is set. - Edges TargetConfiguredEdges `json:"edges"` - selectValues sql.SelectValues + Edges TargetConfiguredEdges `json:"edges"` + target_pair_configuration *int + selectValues sql.SelectValues } // TargetConfiguredEdges holds the relations/edges for other nodes in the graph. type TargetConfiguredEdges struct { // TargetPair holds the value of the target_pair edge. - TargetPair []*TargetPair `json:"target_pair,omitempty"` + TargetPair *TargetPair `json:"target_pair,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedTargetPair map[string][]*TargetPair } // TargetPairOrErr returns the TargetPair value or an error if the edge -// was not loaded in eager-loading. -func (e TargetConfiguredEdges) TargetPairOrErr() ([]*TargetPair, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TargetConfiguredEdges) TargetPairOrErr() (*TargetPair, error) { + if e.TargetPair != nil { return e.TargetPair, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: targetpair.Label} } return nil, &NotLoadedError{edge: "target_pair"} } @@ -64,6 +66,8 @@ func (*TargetConfigured) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case targetconfigured.FieldTargetKind, targetconfigured.FieldTestSize: values[i] = new(sql.NullString) + case targetconfigured.ForeignKeys[0]: // target_pair_configuration + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -111,6 +115,13 @@ func (tc *TargetConfigured) assignValues(columns []string, values []any) error { } else if value.Valid { tc.TestSize = targetconfigured.TestSize(value.String) } + case targetconfigured.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field target_pair_configuration", value) + } else if value.Valid { + tc.target_pair_configuration = new(int) + *tc.target_pair_configuration = int(value.Int64) + } default: tc.selectValues.Set(columns[i], values[i]) } @@ -167,29 +178,5 @@ func (tc *TargetConfigured) String() string { return builder.String() } -// NamedTargetPair returns the TargetPair named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tc *TargetConfigured) NamedTargetPair(name string) ([]*TargetPair, error) { - if tc.Edges.namedTargetPair == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tc.Edges.namedTargetPair[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tc *TargetConfigured) appendNamedTargetPair(name string, edges ...*TargetPair) { - if tc.Edges.namedTargetPair == nil { - tc.Edges.namedTargetPair = make(map[string][]*TargetPair) - } - if len(edges) == 0 { - tc.Edges.namedTargetPair[name] = []*TargetPair{} - } else { - tc.Edges.namedTargetPair[name] = append(tc.Edges.namedTargetPair[name], edges...) - } -} - // TargetConfigureds is a parsable slice of TargetConfigured. type TargetConfigureds []*TargetConfigured diff --git a/ent/gen/ent/targetconfigured/targetconfigured.go b/ent/gen/ent/targetconfigured/targetconfigured.go index 61dabf2..daa837f 100644 --- a/ent/gen/ent/targetconfigured/targetconfigured.go +++ b/ent/gen/ent/targetconfigured/targetconfigured.go @@ -29,7 +29,7 @@ const ( // Table holds the table name of the targetconfigured in the database. Table = "target_configureds" // TargetPairTable is the table that holds the target_pair relation/edge. - TargetPairTable = "target_pairs" + TargetPairTable = "target_configureds" // TargetPairInverseTable is the table name for the TargetPair entity. // It exists in this package in order to avoid circular dependency with the "targetpair" package. TargetPairInverseTable = "target_pairs" @@ -46,6 +46,12 @@ var Columns = []string{ FieldTestSize, } +// ForeignKeys holds the SQL foreign-keys that are owned by the "target_configureds" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "target_pair_configuration", +} + // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -53,6 +59,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -105,24 +116,17 @@ func ByTestSize(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldTestSize, opts...).ToFunc() } -// ByTargetPairCount orders the results by target_pair count. -func ByTargetPairCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTargetPairStep(), opts...) - } -} - -// ByTargetPair orders the results by target_pair terms. -func ByTargetPair(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTargetPairField orders the results by target_pair field. +func ByTargetPairField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTargetPairStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTargetPairStep(), sql.OrderByField(field, opts...)) } } func newTargetPairStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TargetPairInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetPairTable, TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetPairTable, TargetPairColumn), ) } diff --git a/ent/gen/ent/targetconfigured/where.go b/ent/gen/ent/targetconfigured/where.go index d507407..e39db19 100644 --- a/ent/gen/ent/targetconfigured/where.go +++ b/ent/gen/ent/targetconfigured/where.go @@ -233,7 +233,7 @@ func HasTargetPair() predicate.TargetConfigured { return predicate.TargetConfigured(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TargetPairTable, TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TargetPairTable, TargetPairColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/targetconfigured_create.go b/ent/gen/ent/targetconfigured_create.go index 3998bbc..25f4728 100644 --- a/ent/gen/ent/targetconfigured_create.go +++ b/ent/gen/ent/targetconfigured_create.go @@ -67,19 +67,23 @@ func (tcc *TargetConfiguredCreate) SetNillableTestSize(ts *targetconfigured.Test return tcc } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcc *TargetConfiguredCreate) AddTargetPairIDs(ids ...int) *TargetConfiguredCreate { - tcc.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcc *TargetConfiguredCreate) SetTargetPairID(id int) *TargetConfiguredCreate { + tcc.mutation.SetTargetPairID(id) return tcc } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcc *TargetConfiguredCreate) AddTargetPair(t ...*TargetPair) *TargetConfiguredCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcc *TargetConfiguredCreate) SetNillableTargetPairID(id *int) *TargetConfiguredCreate { + if id != nil { + tcc = tcc.SetTargetPairID(*id) } - return tcc.AddTargetPairIDs(ids...) + return tcc +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcc *TargetConfiguredCreate) SetTargetPair(t *TargetPair) *TargetConfiguredCreate { + return tcc.SetTargetPairID(t.ID) } // Mutation returns the TargetConfiguredMutation object of the builder. @@ -165,7 +169,7 @@ func (tcc *TargetConfiguredCreate) createSpec() (*TargetConfigured, *sqlgraph.Cr } if nodes := tcc.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetconfigured.TargetPairTable, Columns: []string{targetconfigured.TargetPairColumn}, @@ -177,6 +181,7 @@ func (tcc *TargetConfiguredCreate) createSpec() (*TargetConfigured, *sqlgraph.Cr for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.target_pair_configuration = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/targetconfigured_query.go b/ent/gen/ent/targetconfigured_query.go index 090938f..e6fdab0 100644 --- a/ent/gen/ent/targetconfigured_query.go +++ b/ent/gen/ent/targetconfigured_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // TargetConfiguredQuery is the builder for querying TargetConfigured entities. type TargetConfiguredQuery struct { config - ctx *QueryContext - order []targetconfigured.OrderOption - inters []Interceptor - predicates []predicate.TargetConfigured - withTargetPair *TargetPairQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TargetConfigured) error - withNamedTargetPair map[string]*TargetPairQuery + ctx *QueryContext + order []targetconfigured.OrderOption + inters []Interceptor + predicates []predicate.TargetConfigured + withTargetPair *TargetPairQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TargetConfigured) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (tcq *TargetConfiguredQuery) QueryTargetPair() *TargetPairQuery { step := sqlgraph.NewStep( sqlgraph.From(targetconfigured.Table, targetconfigured.FieldID, selector), sqlgraph.To(targetpair.Table, targetpair.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, targetconfigured.TargetPairTable, targetconfigured.TargetPairColumn), + sqlgraph.Edge(sqlgraph.O2O, true, targetconfigured.TargetPairTable, targetconfigured.TargetPairColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (tcq *TargetConfiguredQuery) prepareQuery(ctx context.Context) error { func (tcq *TargetConfiguredQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TargetConfigured, error) { var ( nodes = []*TargetConfigured{} + withFKs = tcq.withFKs _spec = tcq.querySpec() loadedTypes = [1]bool{ tcq.withTargetPair != nil, } ) + if tcq.withTargetPair != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, targetconfigured.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TargetConfigured).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (tcq *TargetConfiguredQuery) sqlAll(ctx context.Context, hooks ...queryHook return nodes, nil } if query := tcq.withTargetPair; query != nil { - if err := tcq.loadTargetPair(ctx, query, nodes, - func(n *TargetConfigured) { n.Edges.TargetPair = []*TargetPair{} }, - func(n *TargetConfigured, e *TargetPair) { n.Edges.TargetPair = append(n.Edges.TargetPair, e) }); err != nil { - return nil, err - } - } - for name, query := range tcq.withNamedTargetPair { - if err := tcq.loadTargetPair(ctx, query, nodes, - func(n *TargetConfigured) { n.appendNamedTargetPair(name) }, - func(n *TargetConfigured, e *TargetPair) { n.appendNamedTargetPair(name, e) }); err != nil { + if err := tcq.loadTargetPair(ctx, query, nodes, nil, + func(n *TargetConfigured, e *TargetPair) { n.Edges.TargetPair = e }); err != nil { return nil, err } } @@ -421,33 +419,34 @@ func (tcq *TargetConfiguredQuery) sqlAll(ctx context.Context, hooks ...queryHook } func (tcq *TargetConfiguredQuery) loadTargetPair(ctx context.Context, query *TargetPairQuery, nodes []*TargetConfigured, init func(*TargetConfigured), assign func(*TargetConfigured, *TargetPair)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*TargetConfigured) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TargetConfigured) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].target_pair_configuration == nil { + continue } + fk := *nodes[i].target_pair_configuration + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.TargetPair(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(targetconfigured.TargetPairColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(targetpair.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.target_pair_configuration - if fk == nil { - return fmt.Errorf(`foreign-key "target_pair_configuration" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "target_pair_configuration" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "target_pair_configuration" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } @@ -536,20 +535,6 @@ func (tcq *TargetConfiguredQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTargetPair tells the query-builder to eager-load the nodes that are connected to the "target_pair" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tcq *TargetConfiguredQuery) WithNamedTargetPair(name string, opts ...func(*TargetPairQuery)) *TargetConfiguredQuery { - query := (&TargetPairClient{config: tcq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tcq.withNamedTargetPair == nil { - tcq.withNamedTargetPair = make(map[string]*TargetPairQuery) - } - tcq.withNamedTargetPair[name] = query - return tcq -} - // TargetConfiguredGroupBy is the group-by builder for TargetConfigured entities. type TargetConfiguredGroupBy struct { selector diff --git a/ent/gen/ent/targetconfigured_update.go b/ent/gen/ent/targetconfigured_update.go index 980e0ff..6b13e03 100644 --- a/ent/gen/ent/targetconfigured_update.go +++ b/ent/gen/ent/targetconfigured_update.go @@ -114,19 +114,23 @@ func (tcu *TargetConfiguredUpdate) ClearTestSize() *TargetConfiguredUpdate { return tcu } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcu *TargetConfiguredUpdate) AddTargetPairIDs(ids ...int) *TargetConfiguredUpdate { - tcu.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcu *TargetConfiguredUpdate) SetTargetPairID(id int) *TargetConfiguredUpdate { + tcu.mutation.SetTargetPairID(id) return tcu } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcu *TargetConfiguredUpdate) AddTargetPair(t ...*TargetPair) *TargetConfiguredUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcu *TargetConfiguredUpdate) SetNillableTargetPairID(id *int) *TargetConfiguredUpdate { + if id != nil { + tcu = tcu.SetTargetPairID(*id) } - return tcu.AddTargetPairIDs(ids...) + return tcu +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcu *TargetConfiguredUpdate) SetTargetPair(t *TargetPair) *TargetConfiguredUpdate { + return tcu.SetTargetPairID(t.ID) } // Mutation returns the TargetConfiguredMutation object of the builder. @@ -134,27 +138,12 @@ func (tcu *TargetConfiguredUpdate) Mutation() *TargetConfiguredMutation { return tcu.mutation } -// ClearTargetPair clears all "target_pair" edges to the TargetPair entity. +// ClearTargetPair clears the "target_pair" edge to the TargetPair entity. func (tcu *TargetConfiguredUpdate) ClearTargetPair() *TargetConfiguredUpdate { tcu.mutation.ClearTargetPair() return tcu } -// RemoveTargetPairIDs removes the "target_pair" edge to TargetPair entities by IDs. -func (tcu *TargetConfiguredUpdate) RemoveTargetPairIDs(ids ...int) *TargetConfiguredUpdate { - tcu.mutation.RemoveTargetPairIDs(ids...) - return tcu -} - -// RemoveTargetPair removes "target_pair" edges to TargetPair entities. -func (tcu *TargetConfiguredUpdate) RemoveTargetPair(t ...*TargetPair) *TargetConfiguredUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcu.RemoveTargetPairIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (tcu *TargetConfiguredUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, tcu.sqlSave, tcu.mutation, tcu.hooks) @@ -238,7 +227,7 @@ func (tcu *TargetConfiguredUpdate) sqlSave(ctx context.Context) (n int, err erro } if tcu.mutation.TargetPairCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetconfigured.TargetPairTable, Columns: []string{targetconfigured.TargetPairColumn}, @@ -249,25 +238,9 @@ func (tcu *TargetConfiguredUpdate) sqlSave(ctx context.Context) (n int, err erro } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tcu.mutation.RemovedTargetPairIDs(); len(nodes) > 0 && !tcu.mutation.TargetPairCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: targetconfigured.TargetPairTable, - Columns: []string{targetconfigured.TargetPairColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tcu.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetconfigured.TargetPairTable, Columns: []string{targetconfigured.TargetPairColumn}, @@ -386,19 +359,23 @@ func (tcuo *TargetConfiguredUpdateOne) ClearTestSize() *TargetConfiguredUpdateOn return tcuo } -// AddTargetPairIDs adds the "target_pair" edge to the TargetPair entity by IDs. -func (tcuo *TargetConfiguredUpdateOne) AddTargetPairIDs(ids ...int) *TargetConfiguredUpdateOne { - tcuo.mutation.AddTargetPairIDs(ids...) +// SetTargetPairID sets the "target_pair" edge to the TargetPair entity by ID. +func (tcuo *TargetConfiguredUpdateOne) SetTargetPairID(id int) *TargetConfiguredUpdateOne { + tcuo.mutation.SetTargetPairID(id) return tcuo } -// AddTargetPair adds the "target_pair" edges to the TargetPair entity. -func (tcuo *TargetConfiguredUpdateOne) AddTargetPair(t ...*TargetPair) *TargetConfiguredUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTargetPairID sets the "target_pair" edge to the TargetPair entity by ID if the given value is not nil. +func (tcuo *TargetConfiguredUpdateOne) SetNillableTargetPairID(id *int) *TargetConfiguredUpdateOne { + if id != nil { + tcuo = tcuo.SetTargetPairID(*id) } - return tcuo.AddTargetPairIDs(ids...) + return tcuo +} + +// SetTargetPair sets the "target_pair" edge to the TargetPair entity. +func (tcuo *TargetConfiguredUpdateOne) SetTargetPair(t *TargetPair) *TargetConfiguredUpdateOne { + return tcuo.SetTargetPairID(t.ID) } // Mutation returns the TargetConfiguredMutation object of the builder. @@ -406,27 +383,12 @@ func (tcuo *TargetConfiguredUpdateOne) Mutation() *TargetConfiguredMutation { return tcuo.mutation } -// ClearTargetPair clears all "target_pair" edges to the TargetPair entity. +// ClearTargetPair clears the "target_pair" edge to the TargetPair entity. func (tcuo *TargetConfiguredUpdateOne) ClearTargetPair() *TargetConfiguredUpdateOne { tcuo.mutation.ClearTargetPair() return tcuo } -// RemoveTargetPairIDs removes the "target_pair" edge to TargetPair entities by IDs. -func (tcuo *TargetConfiguredUpdateOne) RemoveTargetPairIDs(ids ...int) *TargetConfiguredUpdateOne { - tcuo.mutation.RemoveTargetPairIDs(ids...) - return tcuo -} - -// RemoveTargetPair removes "target_pair" edges to TargetPair entities. -func (tcuo *TargetConfiguredUpdateOne) RemoveTargetPair(t ...*TargetPair) *TargetConfiguredUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcuo.RemoveTargetPairIDs(ids...) -} - // Where appends a list predicates to the TargetConfiguredUpdate builder. func (tcuo *TargetConfiguredUpdateOne) Where(ps ...predicate.TargetConfigured) *TargetConfiguredUpdateOne { tcuo.mutation.Where(ps...) @@ -540,7 +502,7 @@ func (tcuo *TargetConfiguredUpdateOne) sqlSave(ctx context.Context) (_node *Targ } if tcuo.mutation.TargetPairCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetconfigured.TargetPairTable, Columns: []string{targetconfigured.TargetPairColumn}, @@ -551,25 +513,9 @@ func (tcuo *TargetConfiguredUpdateOne) sqlSave(ctx context.Context) (_node *Targ } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tcuo.mutation.RemovedTargetPairIDs(); len(nodes) > 0 && !tcuo.mutation.TargetPairCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: targetconfigured.TargetPairTable, - Columns: []string{targetconfigured.TargetPairColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(targetpair.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tcuo.mutation.TargetPairIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetconfigured.TargetPairTable, Columns: []string{targetconfigured.TargetPairColumn}, diff --git a/ent/gen/ent/targetmetrics.go b/ent/gen/ent/targetmetrics.go index df5a2cb..a7e7166 100644 --- a/ent/gen/ent/targetmetrics.go +++ b/ent/gen/ent/targetmetrics.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" "github.com/buildbarn/bb-portal/ent/gen/ent/targetmetrics" ) @@ -24,28 +25,29 @@ type TargetMetrics struct { TargetsConfiguredNotIncludingAspects int64 `json:"targets_configured_not_including_aspects,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TargetMetricsQuery when eager-loading is set. - Edges TargetMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges TargetMetricsEdges `json:"edges"` + metrics_target_metrics *int + selectValues sql.SelectValues } // TargetMetricsEdges holds the relations/edges for other nodes in the graph. type TargetMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedMetrics map[string][]*Metrics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e TargetMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TargetMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -57,6 +59,8 @@ func (*TargetMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case targetmetrics.FieldID, targetmetrics.FieldTargetsLoaded, targetmetrics.FieldTargetsConfigured, targetmetrics.FieldTargetsConfiguredNotIncludingAspects: values[i] = new(sql.NullInt64) + case targetmetrics.ForeignKeys[0]: // metrics_target_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -96,6 +100,13 @@ func (tm *TargetMetrics) assignValues(columns []string, values []any) error { } else if value.Valid { tm.TargetsConfiguredNotIncludingAspects = value.Int64 } + case targetmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_target_metrics", value) + } else if value.Valid { + tm.metrics_target_metrics = new(int) + *tm.metrics_target_metrics = int(value.Int64) + } default: tm.selectValues.Set(columns[i], values[i]) } @@ -149,29 +160,5 @@ func (tm *TargetMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tm *TargetMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if tm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tm *TargetMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if tm.Edges.namedMetrics == nil { - tm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - tm.Edges.namedMetrics[name] = []*Metrics{} - } else { - tm.Edges.namedMetrics[name] = append(tm.Edges.namedMetrics[name], edges...) - } -} - // TargetMetricsSlice is a parsable slice of TargetMetrics. type TargetMetricsSlice []*TargetMetrics diff --git a/ent/gen/ent/targetmetrics/targetmetrics.go b/ent/gen/ent/targetmetrics/targetmetrics.go index a3fe0b9..e96fc55 100644 --- a/ent/gen/ent/targetmetrics/targetmetrics.go +++ b/ent/gen/ent/targetmetrics/targetmetrics.go @@ -22,11 +22,13 @@ const ( EdgeMetrics = "metrics" // Table holds the table name of the targetmetrics in the database. Table = "target_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_target_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "target_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_target_metrics" ) // Columns holds all SQL columns for targetmetrics fields. @@ -37,11 +39,11 @@ var Columns = []string{ FieldTargetsConfiguredNotIncludingAspects, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "target_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "target_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_target_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -50,6 +52,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -76,23 +83,16 @@ func ByTargetsConfiguredNotIncludingAspects(opts ...sql.OrderTermOption) OrderOp return sql.OrderByField(FieldTargetsConfiguredNotIncludingAspects, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } diff --git a/ent/gen/ent/targetmetrics/where.go b/ent/gen/ent/targetmetrics/where.go index 571effe..8c62927 100644 --- a/ent/gen/ent/targetmetrics/where.go +++ b/ent/gen/ent/targetmetrics/where.go @@ -223,7 +223,7 @@ func HasMetrics() predicate.TargetMetrics { return predicate.TargetMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/targetmetrics_create.go b/ent/gen/ent/targetmetrics_create.go index a4e9307..699bc14 100644 --- a/ent/gen/ent/targetmetrics_create.go +++ b/ent/gen/ent/targetmetrics_create.go @@ -61,19 +61,23 @@ func (tmc *TargetMetricsCreate) SetNillableTargetsConfiguredNotIncludingAspects( return tmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmc *TargetMetricsCreate) AddMetricIDs(ids ...int) *TargetMetricsCreate { - tmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmc *TargetMetricsCreate) SetMetricsID(id int) *TargetMetricsCreate { + tmc.mutation.SetMetricsID(id) return tmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmc *TargetMetricsCreate) AddMetrics(m ...*Metrics) *TargetMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmc *TargetMetricsCreate) SetNillableMetricsID(id *int) *TargetMetricsCreate { + if id != nil { + tmc = tmc.SetMetricsID(*id) } - return tmc.AddMetricIDs(ids...) + return tmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmc *TargetMetricsCreate) SetMetrics(m *Metrics) *TargetMetricsCreate { + return tmc.SetMetricsID(m.ID) } // Mutation returns the TargetMetricsMutation object of the builder. @@ -150,10 +154,10 @@ func (tmc *TargetMetricsCreate) createSpec() (*TargetMetrics, *sqlgraph.CreateSp } if nodes := tmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, + Columns: []string{targetmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -162,6 +166,7 @@ func (tmc *TargetMetricsCreate) createSpec() (*TargetMetrics, *sqlgraph.CreateSp for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_target_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/targetmetrics_query.go b/ent/gen/ent/targetmetrics_query.go index a65957f..94e1ff2 100644 --- a/ent/gen/ent/targetmetrics_query.go +++ b/ent/gen/ent/targetmetrics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // TargetMetricsQuery is the builder for querying TargetMetrics entities. type TargetMetricsQuery struct { config - ctx *QueryContext - order []targetmetrics.OrderOption - inters []Interceptor - predicates []predicate.TargetMetrics - withMetrics *MetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TargetMetrics) error - withNamedMetrics map[string]*MetricsQuery + ctx *QueryContext + order []targetmetrics.OrderOption + inters []Interceptor + predicates []predicate.TargetMetrics + withMetrics *MetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TargetMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (tmq *TargetMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(targetmetrics.Table, targetmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, targetmetrics.MetricsTable, targetmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, targetmetrics.MetricsTable, targetmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(tmq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (tmq *TargetMetricsQuery) prepareQuery(ctx context.Context) error { func (tmq *TargetMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TargetMetrics, error) { var ( nodes = []*TargetMetrics{} + withFKs = tmq.withFKs _spec = tmq.querySpec() loadedTypes = [1]bool{ tmq.withMetrics != nil, } ) + if tmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, targetmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TargetMetrics).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (tmq *TargetMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nodes, nil } if query := tmq.withMetrics; query != nil { - if err := tmq.loadMetrics(ctx, query, nodes, - func(n *TargetMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *TargetMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { - return nil, err - } - } - for name, query := range tmq.withNamedMetrics { - if err := tmq.loadMetrics(ctx, query, nodes, - func(n *TargetMetrics) { n.appendNamedMetrics(name) }, - func(n *TargetMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { + if err := tmq.loadMetrics(ctx, query, nodes, nil, + func(n *TargetMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (tmq *TargetMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( } func (tmq *TargetMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*TargetMetrics, init func(*TargetMetrics), assign func(*TargetMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TargetMetrics) - nids := make(map[int]map[*TargetMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TargetMetrics) + for i := range nodes { + if nodes[i].metrics_target_metrics == nil { + continue } + fk := *nodes[i].metrics_target_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(targetmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(targetmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(targetmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(targetmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TargetMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_target_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (tmq *TargetMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tmq *TargetMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *TargetMetricsQuery { - query := (&MetricsClient{config: tmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tmq.withNamedMetrics == nil { - tmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - tmq.withNamedMetrics[name] = query - return tmq -} - // TargetMetricsGroupBy is the group-by builder for TargetMetrics entities. type TargetMetricsGroupBy struct { selector diff --git a/ent/gen/ent/targetmetrics_update.go b/ent/gen/ent/targetmetrics_update.go index 5313405..5c2609c 100644 --- a/ent/gen/ent/targetmetrics_update.go +++ b/ent/gen/ent/targetmetrics_update.go @@ -109,19 +109,23 @@ func (tmu *TargetMetricsUpdate) ClearTargetsConfiguredNotIncludingAspects() *Tar return tmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmu *TargetMetricsUpdate) AddMetricIDs(ids ...int) *TargetMetricsUpdate { - tmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmu *TargetMetricsUpdate) SetMetricsID(id int) *TargetMetricsUpdate { + tmu.mutation.SetMetricsID(id) return tmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmu *TargetMetricsUpdate) AddMetrics(m ...*Metrics) *TargetMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmu *TargetMetricsUpdate) SetNillableMetricsID(id *int) *TargetMetricsUpdate { + if id != nil { + tmu = tmu.SetMetricsID(*id) } - return tmu.AddMetricIDs(ids...) + return tmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmu *TargetMetricsUpdate) SetMetrics(m *Metrics) *TargetMetricsUpdate { + return tmu.SetMetricsID(m.ID) } // Mutation returns the TargetMetricsMutation object of the builder. @@ -129,27 +133,12 @@ func (tmu *TargetMetricsUpdate) Mutation() *TargetMetricsMutation { return tmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (tmu *TargetMetricsUpdate) ClearMetrics() *TargetMetricsUpdate { tmu.mutation.ClearMetrics() return tmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (tmu *TargetMetricsUpdate) RemoveMetricIDs(ids ...int) *TargetMetricsUpdate { - tmu.mutation.RemoveMetricIDs(ids...) - return tmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (tmu *TargetMetricsUpdate) RemoveMetrics(m ...*Metrics) *TargetMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return tmu.RemoveMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (tmu *TargetMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, tmu.sqlSave, tmu.mutation, tmu.hooks) @@ -215,39 +204,23 @@ func (tmu *TargetMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if tmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !tmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, + Columns: []string{targetmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, + Columns: []string{targetmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -359,19 +332,23 @@ func (tmuo *TargetMetricsUpdateOne) ClearTargetsConfiguredNotIncludingAspects() return tmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmuo *TargetMetricsUpdateOne) AddMetricIDs(ids ...int) *TargetMetricsUpdateOne { - tmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmuo *TargetMetricsUpdateOne) SetMetricsID(id int) *TargetMetricsUpdateOne { + tmuo.mutation.SetMetricsID(id) return tmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmuo *TargetMetricsUpdateOne) AddMetrics(m ...*Metrics) *TargetMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmuo *TargetMetricsUpdateOne) SetNillableMetricsID(id *int) *TargetMetricsUpdateOne { + if id != nil { + tmuo = tmuo.SetMetricsID(*id) } - return tmuo.AddMetricIDs(ids...) + return tmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmuo *TargetMetricsUpdateOne) SetMetrics(m *Metrics) *TargetMetricsUpdateOne { + return tmuo.SetMetricsID(m.ID) } // Mutation returns the TargetMetricsMutation object of the builder. @@ -379,27 +356,12 @@ func (tmuo *TargetMetricsUpdateOne) Mutation() *TargetMetricsMutation { return tmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (tmuo *TargetMetricsUpdateOne) ClearMetrics() *TargetMetricsUpdateOne { tmuo.mutation.ClearMetrics() return tmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (tmuo *TargetMetricsUpdateOne) RemoveMetricIDs(ids ...int) *TargetMetricsUpdateOne { - tmuo.mutation.RemoveMetricIDs(ids...) - return tmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (tmuo *TargetMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *TargetMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return tmuo.RemoveMetricIDs(ids...) -} - // Where appends a list predicates to the TargetMetricsUpdate builder. func (tmuo *TargetMetricsUpdateOne) Where(ps ...predicate.TargetMetrics) *TargetMetricsUpdateOne { tmuo.mutation.Where(ps...) @@ -495,39 +457,23 @@ func (tmuo *TargetMetricsUpdateOne) sqlSave(ctx context.Context) (_node *TargetM } if tmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !tmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, + Columns: []string{targetmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: targetmetrics.MetricsTable, - Columns: targetmetrics.MetricsPrimaryKey, + Columns: []string{targetmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), diff --git a/ent/gen/ent/targetpair.go b/ent/gen/ent/targetpair.go index c608cf6..9f2e1f5 100644 --- a/ent/gen/ent/targetpair.go +++ b/ent/gen/ent/targetpair.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/bazelinvocation" "github.com/buildbarn/bb-portal/ent/gen/ent/targetcomplete" "github.com/buildbarn/bb-portal/ent/gen/ent/targetconfigured" "github.com/buildbarn/bb-portal/ent/gen/ent/targetpair" @@ -32,16 +33,15 @@ type TargetPair struct { AbortReason targetpair.AbortReason `json:"abort_reason,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TargetPairQuery when eager-loading is set. - Edges TargetPairEdges `json:"edges"` - target_pair_configuration *int - target_pair_completion *int - selectValues sql.SelectValues + Edges TargetPairEdges `json:"edges"` + bazel_invocation_targets *int + selectValues sql.SelectValues } // TargetPairEdges holds the relations/edges for other nodes in the graph. type TargetPairEdges struct { // BazelInvocation holds the value of the bazel_invocation edge. - BazelInvocation []*BazelInvocation `json:"bazel_invocation,omitempty"` + BazelInvocation *BazelInvocation `json:"bazel_invocation,omitempty"` // Configuration holds the value of the configuration edge. Configuration *TargetConfigured `json:"configuration,omitempty"` // Completion holds the value of the completion edge. @@ -51,15 +51,15 @@ type TargetPairEdges struct { loadedTypes [3]bool // totalCount holds the count of the edges above. totalCount [3]map[string]int - - namedBazelInvocation map[string][]*BazelInvocation } // BazelInvocationOrErr returns the BazelInvocation value or an error if the edge -// was not loaded in eager-loading. -func (e TargetPairEdges) BazelInvocationOrErr() ([]*BazelInvocation, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TargetPairEdges) BazelInvocationOrErr() (*BazelInvocation, error) { + if e.BazelInvocation != nil { return e.BazelInvocation, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: bazelinvocation.Label} } return nil, &NotLoadedError{edge: "bazel_invocation"} } @@ -97,9 +97,7 @@ func (*TargetPair) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case targetpair.FieldLabel, targetpair.FieldTargetKind, targetpair.FieldTestSize, targetpair.FieldAbortReason: values[i] = new(sql.NullString) - case targetpair.ForeignKeys[0]: // target_pair_configuration - values[i] = new(sql.NullInt64) - case targetpair.ForeignKeys[1]: // target_pair_completion + case targetpair.ForeignKeys[0]: // bazel_invocation_targets values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -160,17 +158,10 @@ func (tp *TargetPair) assignValues(columns []string, values []any) error { } case targetpair.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field target_pair_configuration", value) - } else if value.Valid { - tp.target_pair_configuration = new(int) - *tp.target_pair_configuration = int(value.Int64) - } - case targetpair.ForeignKeys[1]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field target_pair_completion", value) + return fmt.Errorf("unexpected type %T for edge-field bazel_invocation_targets", value) } else if value.Valid { - tp.target_pair_completion = new(int) - *tp.target_pair_completion = int(value.Int64) + tp.bazel_invocation_targets = new(int) + *tp.bazel_invocation_targets = int(value.Int64) } default: tp.selectValues.Set(columns[i], values[i]) @@ -244,29 +235,5 @@ func (tp *TargetPair) String() string { return builder.String() } -// NamedBazelInvocation returns the BazelInvocation named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tp *TargetPair) NamedBazelInvocation(name string) ([]*BazelInvocation, error) { - if tp.Edges.namedBazelInvocation == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tp.Edges.namedBazelInvocation[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tp *TargetPair) appendNamedBazelInvocation(name string, edges ...*BazelInvocation) { - if tp.Edges.namedBazelInvocation == nil { - tp.Edges.namedBazelInvocation = make(map[string][]*BazelInvocation) - } - if len(edges) == 0 { - tp.Edges.namedBazelInvocation[name] = []*BazelInvocation{} - } else { - tp.Edges.namedBazelInvocation[name] = append(tp.Edges.namedBazelInvocation[name], edges...) - } -} - // TargetPairs is a parsable slice of TargetPair. type TargetPairs []*TargetPair diff --git a/ent/gen/ent/targetpair/targetpair.go b/ent/gen/ent/targetpair/targetpair.go index 884bc65..9357b39 100644 --- a/ent/gen/ent/targetpair/targetpair.go +++ b/ent/gen/ent/targetpair/targetpair.go @@ -36,20 +36,22 @@ const ( EdgeCompletion = "completion" // Table holds the table name of the targetpair in the database. Table = "target_pairs" - // BazelInvocationTable is the table that holds the bazel_invocation relation/edge. The primary key declared below. - BazelInvocationTable = "bazel_invocation_targets" + // BazelInvocationTable is the table that holds the bazel_invocation relation/edge. + BazelInvocationTable = "target_pairs" // BazelInvocationInverseTable is the table name for the BazelInvocation entity. // It exists in this package in order to avoid circular dependency with the "bazelinvocation" package. BazelInvocationInverseTable = "bazel_invocations" + // BazelInvocationColumn is the table column denoting the bazel_invocation relation/edge. + BazelInvocationColumn = "bazel_invocation_targets" // ConfigurationTable is the table that holds the configuration relation/edge. - ConfigurationTable = "target_pairs" + ConfigurationTable = "target_configureds" // ConfigurationInverseTable is the table name for the TargetConfigured entity. // It exists in this package in order to avoid circular dependency with the "targetconfigured" package. ConfigurationInverseTable = "target_configureds" // ConfigurationColumn is the table column denoting the configuration relation/edge. ConfigurationColumn = "target_pair_configuration" // CompletionTable is the table that holds the completion relation/edge. - CompletionTable = "target_pairs" + CompletionTable = "target_completes" // CompletionInverseTable is the table name for the TargetComplete entity. // It exists in this package in order to avoid circular dependency with the "targetcomplete" package. CompletionInverseTable = "target_completes" @@ -71,16 +73,9 @@ var Columns = []string{ // ForeignKeys holds the SQL foreign-keys that are owned by the "target_pairs" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "target_pair_configuration", - "target_pair_completion", + "bazel_invocation_targets", } -var ( - // BazelInvocationPrimaryKey and BazelInvocationColumn2 are the table columns denoting the - // primary key for the bazel_invocation relation (M2M). - BazelInvocationPrimaryKey = []string{"bazel_invocation_id", "target_pair_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -201,17 +196,10 @@ func ByAbortReason(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldAbortReason, opts...).ToFunc() } -// ByBazelInvocationCount orders the results by bazel_invocation count. -func ByBazelInvocationCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newBazelInvocationStep(), opts...) - } -} - -// ByBazelInvocation orders the results by bazel_invocation terms. -func ByBazelInvocation(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByBazelInvocationField orders the results by bazel_invocation field. +func ByBazelInvocationField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newBazelInvocationStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newBazelInvocationStep(), sql.OrderByField(field, opts...)) } } @@ -232,21 +220,21 @@ func newBazelInvocationStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(BazelInvocationInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BazelInvocationTable, BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, BazelInvocationTable, BazelInvocationColumn), ) } func newConfigurationStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ConfigurationInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, ConfigurationTable, ConfigurationColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ConfigurationTable, ConfigurationColumn), ) } func newCompletionStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(CompletionInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, CompletionTable, CompletionColumn), + sqlgraph.Edge(sqlgraph.O2O, false, CompletionTable, CompletionColumn), ) } diff --git a/ent/gen/ent/targetpair/where.go b/ent/gen/ent/targetpair/where.go index ae4d966..a5a6550 100644 --- a/ent/gen/ent/targetpair/where.go +++ b/ent/gen/ent/targetpair/where.go @@ -353,7 +353,7 @@ func HasBazelInvocation() predicate.TargetPair { return predicate.TargetPair(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BazelInvocationTable, BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, BazelInvocationTable, BazelInvocationColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -376,7 +376,7 @@ func HasConfiguration() predicate.TargetPair { return predicate.TargetPair(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, ConfigurationTable, ConfigurationColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ConfigurationTable, ConfigurationColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -399,7 +399,7 @@ func HasCompletion() predicate.TargetPair { return predicate.TargetPair(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, CompletionTable, CompletionColumn), + sqlgraph.Edge(sqlgraph.O2O, false, CompletionTable, CompletionColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/targetpair_create.go b/ent/gen/ent/targetpair_create.go index b7430a2..102120e 100644 --- a/ent/gen/ent/targetpair_create.go +++ b/ent/gen/ent/targetpair_create.go @@ -105,19 +105,23 @@ func (tpc *TargetPairCreate) SetNillableAbortReason(tr *targetpair.AbortReason) return tpc } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tpc *TargetPairCreate) AddBazelInvocationIDs(ids ...int) *TargetPairCreate { - tpc.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tpc *TargetPairCreate) SetBazelInvocationID(id int) *TargetPairCreate { + tpc.mutation.SetBazelInvocationID(id) return tpc } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tpc *TargetPairCreate) AddBazelInvocation(b ...*BazelInvocation) *TargetPairCreate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tpc *TargetPairCreate) SetNillableBazelInvocationID(id *int) *TargetPairCreate { + if id != nil { + tpc = tpc.SetBazelInvocationID(*id) } - return tpc.AddBazelInvocationIDs(ids...) + return tpc +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tpc *TargetPairCreate) SetBazelInvocation(b *BazelInvocation) *TargetPairCreate { + return tpc.SetBazelInvocationID(b.ID) } // SetConfigurationID sets the "configuration" edge to the TargetConfigured entity by ID. @@ -267,10 +271,10 @@ func (tpc *TargetPairCreate) createSpec() (*TargetPair, *sqlgraph.CreateSpec) { } if nodes := tpc.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, + Columns: []string{targetpair.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -279,11 +283,12 @@ func (tpc *TargetPairCreate) createSpec() (*TargetPair, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.bazel_invocation_targets = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tpc.mutation.ConfigurationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.ConfigurationTable, Columns: []string{targetpair.ConfigurationColumn}, @@ -295,12 +300,11 @@ func (tpc *TargetPairCreate) createSpec() (*TargetPair, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.target_pair_configuration = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tpc.mutation.CompletionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.CompletionTable, Columns: []string{targetpair.CompletionColumn}, @@ -312,7 +316,6 @@ func (tpc *TargetPairCreate) createSpec() (*TargetPair, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.target_pair_completion = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/targetpair_query.go b/ent/gen/ent/targetpair_query.go index 64dad54..3d2c588 100644 --- a/ent/gen/ent/targetpair_query.go +++ b/ent/gen/ent/targetpair_query.go @@ -21,17 +21,16 @@ import ( // TargetPairQuery is the builder for querying TargetPair entities. type TargetPairQuery struct { config - ctx *QueryContext - order []targetpair.OrderOption - inters []Interceptor - predicates []predicate.TargetPair - withBazelInvocation *BazelInvocationQuery - withConfiguration *TargetConfiguredQuery - withCompletion *TargetCompleteQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TargetPair) error - withNamedBazelInvocation map[string]*BazelInvocationQuery + ctx *QueryContext + order []targetpair.OrderOption + inters []Interceptor + predicates []predicate.TargetPair + withBazelInvocation *BazelInvocationQuery + withConfiguration *TargetConfiguredQuery + withCompletion *TargetCompleteQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TargetPair) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -82,7 +81,7 @@ func (tpq *TargetPairQuery) QueryBazelInvocation() *BazelInvocationQuery { step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, selector), sqlgraph.To(bazelinvocation.Table, bazelinvocation.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, targetpair.BazelInvocationTable, targetpair.BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, targetpair.BazelInvocationTable, targetpair.BazelInvocationColumn), ) fromU = sqlgraph.SetNeighbors(tpq.driver.Dialect(), step) return fromU, nil @@ -104,7 +103,7 @@ func (tpq *TargetPairQuery) QueryConfiguration() *TargetConfiguredQuery { step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, selector), sqlgraph.To(targetconfigured.Table, targetconfigured.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetpair.ConfigurationTable, targetpair.ConfigurationColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetpair.ConfigurationTable, targetpair.ConfigurationColumn), ) fromU = sqlgraph.SetNeighbors(tpq.driver.Dialect(), step) return fromU, nil @@ -126,7 +125,7 @@ func (tpq *TargetPairQuery) QueryCompletion() *TargetCompleteQuery { step := sqlgraph.NewStep( sqlgraph.From(targetpair.Table, targetpair.FieldID, selector), sqlgraph.To(targetcomplete.Table, targetcomplete.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, targetpair.CompletionTable, targetpair.CompletionColumn), + sqlgraph.Edge(sqlgraph.O2O, false, targetpair.CompletionTable, targetpair.CompletionColumn), ) fromU = sqlgraph.SetNeighbors(tpq.driver.Dialect(), step) return fromU, nil @@ -453,7 +452,7 @@ func (tpq *TargetPairQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* tpq.withCompletion != nil, } ) - if tpq.withConfiguration != nil || tpq.withCompletion != nil { + if tpq.withBazelInvocation != nil { withFKs = true } if withFKs { @@ -481,9 +480,8 @@ func (tpq *TargetPairQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* return nodes, nil } if query := tpq.withBazelInvocation; query != nil { - if err := tpq.loadBazelInvocation(ctx, query, nodes, - func(n *TargetPair) { n.Edges.BazelInvocation = []*BazelInvocation{} }, - func(n *TargetPair, e *BazelInvocation) { n.Edges.BazelInvocation = append(n.Edges.BazelInvocation, e) }); err != nil { + if err := tpq.loadBazelInvocation(ctx, query, nodes, nil, + func(n *TargetPair, e *BazelInvocation) { n.Edges.BazelInvocation = e }); err != nil { return nil, err } } @@ -499,13 +497,6 @@ func (tpq *TargetPairQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* return nil, err } } - for name, query := range tpq.withNamedBazelInvocation { - if err := tpq.loadBazelInvocation(ctx, query, nodes, - func(n *TargetPair) { n.appendNamedBazelInvocation(name) }, - func(n *TargetPair, e *BazelInvocation) { n.appendNamedBazelInvocation(name, e) }); err != nil { - return nil, err - } - } for i := range tpq.loadTotal { if err := tpq.loadTotal[i](ctx, nodes); err != nil { return nil, err @@ -515,74 +506,13 @@ func (tpq *TargetPairQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]* } func (tpq *TargetPairQuery) loadBazelInvocation(ctx context.Context, query *BazelInvocationQuery, nodes []*TargetPair, init func(*TargetPair), assign func(*TargetPair, *BazelInvocation)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TargetPair) - nids := make(map[int]map[*TargetPair]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(targetpair.BazelInvocationTable) - s.Join(joinT).On(s.C(bazelinvocation.FieldID), joinT.C(targetpair.BazelInvocationPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(targetpair.BazelInvocationPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(targetpair.BazelInvocationPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TargetPair]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*BazelInvocation](ctx, query, qr, query.inters) - if err != nil { - return err - } - for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "bazel_invocation" node returned %v`, n.ID) - } - for kn := range nodes { - assign(kn, n) - } - } - return nil -} -func (tpq *TargetPairQuery) loadConfiguration(ctx context.Context, query *TargetConfiguredQuery, nodes []*TargetPair, init func(*TargetPair), assign func(*TargetPair, *TargetConfigured)) error { ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*TargetPair) for i := range nodes { - if nodes[i].target_pair_configuration == nil { + if nodes[i].bazel_invocation_targets == nil { continue } - fk := *nodes[i].target_pair_configuration + fk := *nodes[i].bazel_invocation_targets if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } @@ -591,7 +521,7 @@ func (tpq *TargetPairQuery) loadConfiguration(ctx context.Context, query *Target if len(ids) == 0 { return nil } - query.Where(targetconfigured.IDIn(ids...)) + query.Where(bazelinvocation.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err @@ -599,7 +529,7 @@ func (tpq *TargetPairQuery) loadConfiguration(ctx context.Context, query *Target for _, n := range neighbors { nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected foreign-key "target_pair_configuration" returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "bazel_invocation_targets" returned %v`, n.ID) } for i := range nodes { assign(nodes[i], n) @@ -607,35 +537,59 @@ func (tpq *TargetPairQuery) loadConfiguration(ctx context.Context, query *Target } return nil } -func (tpq *TargetPairQuery) loadCompletion(ctx context.Context, query *TargetCompleteQuery, nodes []*TargetPair, init func(*TargetPair), assign func(*TargetPair, *TargetComplete)) error { - ids := make([]int, 0, len(nodes)) - nodeids := make(map[int][]*TargetPair) +func (tpq *TargetPairQuery) loadConfiguration(ctx context.Context, query *TargetConfiguredQuery, nodes []*TargetPair, init func(*TargetPair), assign func(*TargetPair, *TargetConfigured)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TargetPair) for i := range nodes { - if nodes[i].target_pair_completion == nil { - continue + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + } + query.withFKs = true + query.Where(predicate.TargetConfigured(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(targetpair.ConfigurationColumn), fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.target_pair_configuration + if fk == nil { + return fmt.Errorf(`foreign-key "target_pair_configuration" is nil for node %v`, n.ID) } - fk := *nodes[i].target_pair_completion - if _, ok := nodeids[fk]; !ok { - ids = append(ids, fk) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "target_pair_configuration" returned %v for node %v`, *fk, n.ID) } - nodeids[fk] = append(nodeids[fk], nodes[i]) + assign(node, n) } - if len(ids) == 0 { - return nil + return nil +} +func (tpq *TargetPairQuery) loadCompletion(ctx context.Context, query *TargetCompleteQuery, nodes []*TargetPair, init func(*TargetPair), assign func(*TargetPair, *TargetComplete)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TargetPair) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - query.Where(targetcomplete.IDIn(ids...)) + query.withFKs = true + query.Where(predicate.TargetComplete(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(targetpair.CompletionColumn), fks...)) + })) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nodeids[n.ID] - if !ok { - return fmt.Errorf(`unexpected foreign-key "target_pair_completion" returned %v`, n.ID) + fk := n.target_pair_completion + if fk == nil { + return fmt.Errorf(`foreign-key "target_pair_completion" is nil for node %v`, n.ID) } - for i := range nodes { - assign(nodes[i], n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "target_pair_completion" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -724,20 +678,6 @@ func (tpq *TargetPairQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedBazelInvocation tells the query-builder to eager-load the nodes that are connected to the "bazel_invocation" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tpq *TargetPairQuery) WithNamedBazelInvocation(name string, opts ...func(*BazelInvocationQuery)) *TargetPairQuery { - query := (&BazelInvocationClient{config: tpq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tpq.withNamedBazelInvocation == nil { - tpq.withNamedBazelInvocation = make(map[string]*BazelInvocationQuery) - } - tpq.withNamedBazelInvocation[name] = query - return tpq -} - // TargetPairGroupBy is the group-by builder for TargetPair entities. type TargetPairGroupBy struct { selector diff --git a/ent/gen/ent/targetpair_update.go b/ent/gen/ent/targetpair_update.go index 810045e..ba202b4 100644 --- a/ent/gen/ent/targetpair_update.go +++ b/ent/gen/ent/targetpair_update.go @@ -157,19 +157,23 @@ func (tpu *TargetPairUpdate) ClearAbortReason() *TargetPairUpdate { return tpu } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tpu *TargetPairUpdate) AddBazelInvocationIDs(ids ...int) *TargetPairUpdate { - tpu.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tpu *TargetPairUpdate) SetBazelInvocationID(id int) *TargetPairUpdate { + tpu.mutation.SetBazelInvocationID(id) return tpu } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tpu *TargetPairUpdate) AddBazelInvocation(b ...*BazelInvocation) *TargetPairUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tpu *TargetPairUpdate) SetNillableBazelInvocationID(id *int) *TargetPairUpdate { + if id != nil { + tpu = tpu.SetBazelInvocationID(*id) } - return tpu.AddBazelInvocationIDs(ids...) + return tpu +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tpu *TargetPairUpdate) SetBazelInvocation(b *BazelInvocation) *TargetPairUpdate { + return tpu.SetBazelInvocationID(b.ID) } // SetConfigurationID sets the "configuration" edge to the TargetConfigured entity by ID. @@ -215,27 +219,12 @@ func (tpu *TargetPairUpdate) Mutation() *TargetPairMutation { return tpu.mutation } -// ClearBazelInvocation clears all "bazel_invocation" edges to the BazelInvocation entity. +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. func (tpu *TargetPairUpdate) ClearBazelInvocation() *TargetPairUpdate { tpu.mutation.ClearBazelInvocation() return tpu } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to BazelInvocation entities by IDs. -func (tpu *TargetPairUpdate) RemoveBazelInvocationIDs(ids ...int) *TargetPairUpdate { - tpu.mutation.RemoveBazelInvocationIDs(ids...) - return tpu -} - -// RemoveBazelInvocation removes "bazel_invocation" edges to BazelInvocation entities. -func (tpu *TargetPairUpdate) RemoveBazelInvocation(b ...*BazelInvocation) *TargetPairUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return tpu.RemoveBazelInvocationIDs(ids...) -} - // ClearConfiguration clears the "configuration" edge to the TargetConfigured entity. func (tpu *TargetPairUpdate) ClearConfiguration() *TargetPairUpdate { tpu.mutation.ClearConfiguration() @@ -343,39 +332,23 @@ func (tpu *TargetPairUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tpu.mutation.BazelInvocationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tpu.mutation.RemovedBazelInvocationIDs(); len(nodes) > 0 && !tpu.mutation.BazelInvocationCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, + Columns: []string{targetpair.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tpu.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, + Columns: []string{targetpair.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -388,7 +361,7 @@ func (tpu *TargetPairUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tpu.mutation.ConfigurationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.ConfigurationTable, Columns: []string{targetpair.ConfigurationColumn}, @@ -401,7 +374,7 @@ func (tpu *TargetPairUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := tpu.mutation.ConfigurationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.ConfigurationTable, Columns: []string{targetpair.ConfigurationColumn}, @@ -417,7 +390,7 @@ func (tpu *TargetPairUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tpu.mutation.CompletionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.CompletionTable, Columns: []string{targetpair.CompletionColumn}, @@ -430,7 +403,7 @@ func (tpu *TargetPairUpdate) sqlSave(ctx context.Context) (n int, err error) { } if nodes := tpu.mutation.CompletionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.CompletionTable, Columns: []string{targetpair.CompletionColumn}, @@ -591,19 +564,23 @@ func (tpuo *TargetPairUpdateOne) ClearAbortReason() *TargetPairUpdateOne { return tpuo } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tpuo *TargetPairUpdateOne) AddBazelInvocationIDs(ids ...int) *TargetPairUpdateOne { - tpuo.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tpuo *TargetPairUpdateOne) SetBazelInvocationID(id int) *TargetPairUpdateOne { + tpuo.mutation.SetBazelInvocationID(id) return tpuo } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tpuo *TargetPairUpdateOne) AddBazelInvocation(b ...*BazelInvocation) *TargetPairUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tpuo *TargetPairUpdateOne) SetNillableBazelInvocationID(id *int) *TargetPairUpdateOne { + if id != nil { + tpuo = tpuo.SetBazelInvocationID(*id) } - return tpuo.AddBazelInvocationIDs(ids...) + return tpuo +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tpuo *TargetPairUpdateOne) SetBazelInvocation(b *BazelInvocation) *TargetPairUpdateOne { + return tpuo.SetBazelInvocationID(b.ID) } // SetConfigurationID sets the "configuration" edge to the TargetConfigured entity by ID. @@ -649,27 +626,12 @@ func (tpuo *TargetPairUpdateOne) Mutation() *TargetPairMutation { return tpuo.mutation } -// ClearBazelInvocation clears all "bazel_invocation" edges to the BazelInvocation entity. +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. func (tpuo *TargetPairUpdateOne) ClearBazelInvocation() *TargetPairUpdateOne { tpuo.mutation.ClearBazelInvocation() return tpuo } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to BazelInvocation entities by IDs. -func (tpuo *TargetPairUpdateOne) RemoveBazelInvocationIDs(ids ...int) *TargetPairUpdateOne { - tpuo.mutation.RemoveBazelInvocationIDs(ids...) - return tpuo -} - -// RemoveBazelInvocation removes "bazel_invocation" edges to BazelInvocation entities. -func (tpuo *TargetPairUpdateOne) RemoveBazelInvocation(b ...*BazelInvocation) *TargetPairUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return tpuo.RemoveBazelInvocationIDs(ids...) -} - // ClearConfiguration clears the "configuration" edge to the TargetConfigured entity. func (tpuo *TargetPairUpdateOne) ClearConfiguration() *TargetPairUpdateOne { tpuo.mutation.ClearConfiguration() @@ -807,39 +769,23 @@ func (tpuo *TargetPairUpdateOne) sqlSave(ctx context.Context) (_node *TargetPair } if tpuo.mutation.BazelInvocationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tpuo.mutation.RemovedBazelInvocationIDs(); len(nodes) > 0 && !tpuo.mutation.BazelInvocationCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, + Columns: []string{targetpair.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tpuo.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: targetpair.BazelInvocationTable, - Columns: targetpair.BazelInvocationPrimaryKey, + Columns: []string{targetpair.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -852,7 +798,7 @@ func (tpuo *TargetPairUpdateOne) sqlSave(ctx context.Context) (_node *TargetPair } if tpuo.mutation.ConfigurationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.ConfigurationTable, Columns: []string{targetpair.ConfigurationColumn}, @@ -865,7 +811,7 @@ func (tpuo *TargetPairUpdateOne) sqlSave(ctx context.Context) (_node *TargetPair } if nodes := tpuo.mutation.ConfigurationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.ConfigurationTable, Columns: []string{targetpair.ConfigurationColumn}, @@ -881,7 +827,7 @@ func (tpuo *TargetPairUpdateOne) sqlSave(ctx context.Context) (_node *TargetPair } if tpuo.mutation.CompletionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.CompletionTable, Columns: []string{targetpair.CompletionColumn}, @@ -894,7 +840,7 @@ func (tpuo *TargetPairUpdateOne) sqlSave(ctx context.Context) (_node *TargetPair } if nodes := tpuo.mutation.CompletionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: targetpair.CompletionTable, Columns: []string{targetpair.CompletionColumn}, diff --git a/ent/gen/ent/testcollection.go b/ent/gen/ent/testcollection.go index 9347658..afd3b6f 100644 --- a/ent/gen/ent/testcollection.go +++ b/ent/gen/ent/testcollection.go @@ -5,9 +5,11 @@ package ent import ( "fmt" "strings" + "time" "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/bazelinvocation" "github.com/buildbarn/bb-portal/ent/gen/ent/testcollection" "github.com/buildbarn/bb-portal/ent/gen/ent/testsummary" ) @@ -27,19 +29,21 @@ type TestCollection struct { CachedLocally bool `json:"cached_locally,omitempty"` // CachedRemotely holds the value of the "cached_remotely" field. CachedRemotely bool `json:"cached_remotely,omitempty"` + // FirstSeen holds the value of the "first_seen" field. + FirstSeen *time.Time `json:"first_seen,omitempty"` // DurationMs holds the value of the "duration_ms" field. DurationMs int64 `json:"duration_ms,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TestCollectionQuery when eager-loading is set. - Edges TestCollectionEdges `json:"edges"` - test_collection_test_summary *int - selectValues sql.SelectValues + Edges TestCollectionEdges `json:"edges"` + bazel_invocation_test_collection *int + selectValues sql.SelectValues } // TestCollectionEdges holds the relations/edges for other nodes in the graph. type TestCollectionEdges struct { // BazelInvocation holds the value of the bazel_invocation edge. - BazelInvocation []*BazelInvocation `json:"bazel_invocation,omitempty"` + BazelInvocation *BazelInvocation `json:"bazel_invocation,omitempty"` // TestSummary holds the value of the test_summary edge. TestSummary *TestSummary `json:"test_summary,omitempty"` // TestResults holds the value of the test_results edge. @@ -50,15 +54,16 @@ type TestCollectionEdges struct { // totalCount holds the count of the edges above. totalCount [3]map[string]int - namedBazelInvocation map[string][]*BazelInvocation - namedTestResults map[string][]*TestResultBES + namedTestResults map[string][]*TestResultBES } // BazelInvocationOrErr returns the BazelInvocation value or an error if the edge -// was not loaded in eager-loading. -func (e TestCollectionEdges) BazelInvocationOrErr() ([]*BazelInvocation, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TestCollectionEdges) BazelInvocationOrErr() (*BazelInvocation, error) { + if e.BazelInvocation != nil { return e.BazelInvocation, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: bazelinvocation.Label} } return nil, &NotLoadedError{edge: "bazel_invocation"} } @@ -94,7 +99,9 @@ func (*TestCollection) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case testcollection.FieldLabel, testcollection.FieldOverallStatus, testcollection.FieldStrategy: values[i] = new(sql.NullString) - case testcollection.ForeignKeys[0]: // test_collection_test_summary + case testcollection.FieldFirstSeen: + values[i] = new(sql.NullTime) + case testcollection.ForeignKeys[0]: // bazel_invocation_test_collection values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -147,6 +154,13 @@ func (tc *TestCollection) assignValues(columns []string, values []any) error { } else if value.Valid { tc.CachedRemotely = value.Bool } + case testcollection.FieldFirstSeen: + if value, ok := values[i].(*sql.NullTime); !ok { + return fmt.Errorf("unexpected type %T for field first_seen", values[i]) + } else if value.Valid { + tc.FirstSeen = new(time.Time) + *tc.FirstSeen = value.Time + } case testcollection.FieldDurationMs: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for field duration_ms", values[i]) @@ -155,10 +169,10 @@ func (tc *TestCollection) assignValues(columns []string, values []any) error { } case testcollection.ForeignKeys[0]: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field test_collection_test_summary", value) + return fmt.Errorf("unexpected type %T for edge-field bazel_invocation_test_collection", value) } else if value.Valid { - tc.test_collection_test_summary = new(int) - *tc.test_collection_test_summary = int(value.Int64) + tc.bazel_invocation_test_collection = new(int) + *tc.bazel_invocation_test_collection = int(value.Int64) } default: tc.selectValues.Set(columns[i], values[i]) @@ -226,36 +240,17 @@ func (tc *TestCollection) String() string { builder.WriteString("cached_remotely=") builder.WriteString(fmt.Sprintf("%v", tc.CachedRemotely)) builder.WriteString(", ") + if v := tc.FirstSeen; v != nil { + builder.WriteString("first_seen=") + builder.WriteString(v.Format(time.ANSIC)) + } + builder.WriteString(", ") builder.WriteString("duration_ms=") builder.WriteString(fmt.Sprintf("%v", tc.DurationMs)) builder.WriteByte(')') return builder.String() } -// NamedBazelInvocation returns the BazelInvocation named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tc *TestCollection) NamedBazelInvocation(name string) ([]*BazelInvocation, error) { - if tc.Edges.namedBazelInvocation == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tc.Edges.namedBazelInvocation[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tc *TestCollection) appendNamedBazelInvocation(name string, edges ...*BazelInvocation) { - if tc.Edges.namedBazelInvocation == nil { - tc.Edges.namedBazelInvocation = make(map[string][]*BazelInvocation) - } - if len(edges) == 0 { - tc.Edges.namedBazelInvocation[name] = []*BazelInvocation{} - } else { - tc.Edges.namedBazelInvocation[name] = append(tc.Edges.namedBazelInvocation[name], edges...) - } -} - // NamedTestResults returns the TestResults named value or an error if the edge was not // loaded in eager-loading with this name. func (tc *TestCollection) NamedTestResults(name string) ([]*TestResultBES, error) { diff --git a/ent/gen/ent/testcollection/testcollection.go b/ent/gen/ent/testcollection/testcollection.go index 7d25b79..cf3f30d 100644 --- a/ent/gen/ent/testcollection/testcollection.go +++ b/ent/gen/ent/testcollection/testcollection.go @@ -26,6 +26,8 @@ const ( FieldCachedLocally = "cached_locally" // FieldCachedRemotely holds the string denoting the cached_remotely field in the database. FieldCachedRemotely = "cached_remotely" + // FieldFirstSeen holds the string denoting the first_seen field in the database. + FieldFirstSeen = "first_seen" // FieldDurationMs holds the string denoting the duration_ms field in the database. FieldDurationMs = "duration_ms" // EdgeBazelInvocation holds the string denoting the bazel_invocation edge name in mutations. @@ -36,13 +38,15 @@ const ( EdgeTestResults = "test_results" // Table holds the table name of the testcollection in the database. Table = "test_collections" - // BazelInvocationTable is the table that holds the bazel_invocation relation/edge. The primary key declared below. - BazelInvocationTable = "bazel_invocation_test_collection" + // BazelInvocationTable is the table that holds the bazel_invocation relation/edge. + BazelInvocationTable = "test_collections" // BazelInvocationInverseTable is the table name for the BazelInvocation entity. // It exists in this package in order to avoid circular dependency with the "bazelinvocation" package. BazelInvocationInverseTable = "bazel_invocations" + // BazelInvocationColumn is the table column denoting the bazel_invocation relation/edge. + BazelInvocationColumn = "bazel_invocation_test_collection" // TestSummaryTable is the table that holds the test_summary relation/edge. - TestSummaryTable = "test_collections" + TestSummaryTable = "test_summaries" // TestSummaryInverseTable is the table name for the TestSummary entity. // It exists in this package in order to avoid circular dependency with the "testsummary" package. TestSummaryInverseTable = "test_summaries" @@ -65,21 +69,16 @@ var Columns = []string{ FieldStrategy, FieldCachedLocally, FieldCachedRemotely, + FieldFirstSeen, FieldDurationMs, } // ForeignKeys holds the SQL foreign-keys that are owned by the "test_collections" // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ - "test_collection_test_summary", + "bazel_invocation_test_collection", } -var ( - // BazelInvocationPrimaryKey and BazelInvocationColumn2 are the table columns denoting the - // primary key for the bazel_invocation relation (M2M). - BazelInvocationPrimaryKey = []string{"bazel_invocation_id", "test_collection_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -161,22 +160,20 @@ func ByCachedRemotely(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldCachedRemotely, opts...).ToFunc() } +// ByFirstSeen orders the results by the first_seen field. +func ByFirstSeen(opts ...sql.OrderTermOption) OrderOption { + return sql.OrderByField(FieldFirstSeen, opts...).ToFunc() +} + // ByDurationMs orders the results by the duration_ms field. func ByDurationMs(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldDurationMs, opts...).ToFunc() } -// ByBazelInvocationCount orders the results by bazel_invocation count. -func ByBazelInvocationCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newBazelInvocationStep(), opts...) - } -} - -// ByBazelInvocation orders the results by bazel_invocation terms. -func ByBazelInvocation(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByBazelInvocationField orders the results by bazel_invocation field. +func ByBazelInvocationField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newBazelInvocationStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newBazelInvocationStep(), sql.OrderByField(field, opts...)) } } @@ -204,14 +201,14 @@ func newBazelInvocationStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(BazelInvocationInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BazelInvocationTable, BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, BazelInvocationTable, BazelInvocationColumn), ) } func newTestSummaryStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestSummaryInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, TestSummaryTable, TestSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, TestSummaryTable, TestSummaryColumn), ) } func newTestResultsStep() *sqlgraph.Step { diff --git a/ent/gen/ent/testcollection/where.go b/ent/gen/ent/testcollection/where.go index a79f4dd..e66d7e8 100644 --- a/ent/gen/ent/testcollection/where.go +++ b/ent/gen/ent/testcollection/where.go @@ -3,6 +3,8 @@ package testcollection import ( + "time" + "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" "github.com/buildbarn/bb-portal/ent/gen/ent/predicate" @@ -68,6 +70,11 @@ func CachedRemotely(v bool) predicate.TestCollection { return predicate.TestCollection(sql.FieldEQ(FieldCachedRemotely, v)) } +// FirstSeen applies equality check predicate on the "first_seen" field. It's identical to FirstSeenEQ. +func FirstSeen(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldEQ(FieldFirstSeen, v)) +} + // DurationMs applies equality check predicate on the "duration_ms" field. It's identical to DurationMsEQ. func DurationMs(v int64) predicate.TestCollection { return predicate.TestCollection(sql.FieldEQ(FieldDurationMs, v)) @@ -293,6 +300,56 @@ func CachedRemotelyNotNil() predicate.TestCollection { return predicate.TestCollection(sql.FieldNotNull(FieldCachedRemotely)) } +// FirstSeenEQ applies the EQ predicate on the "first_seen" field. +func FirstSeenEQ(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldEQ(FieldFirstSeen, v)) +} + +// FirstSeenNEQ applies the NEQ predicate on the "first_seen" field. +func FirstSeenNEQ(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldNEQ(FieldFirstSeen, v)) +} + +// FirstSeenIn applies the In predicate on the "first_seen" field. +func FirstSeenIn(vs ...time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldIn(FieldFirstSeen, vs...)) +} + +// FirstSeenNotIn applies the NotIn predicate on the "first_seen" field. +func FirstSeenNotIn(vs ...time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldNotIn(FieldFirstSeen, vs...)) +} + +// FirstSeenGT applies the GT predicate on the "first_seen" field. +func FirstSeenGT(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldGT(FieldFirstSeen, v)) +} + +// FirstSeenGTE applies the GTE predicate on the "first_seen" field. +func FirstSeenGTE(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldGTE(FieldFirstSeen, v)) +} + +// FirstSeenLT applies the LT predicate on the "first_seen" field. +func FirstSeenLT(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldLT(FieldFirstSeen, v)) +} + +// FirstSeenLTE applies the LTE predicate on the "first_seen" field. +func FirstSeenLTE(v time.Time) predicate.TestCollection { + return predicate.TestCollection(sql.FieldLTE(FieldFirstSeen, v)) +} + +// FirstSeenIsNil applies the IsNil predicate on the "first_seen" field. +func FirstSeenIsNil() predicate.TestCollection { + return predicate.TestCollection(sql.FieldIsNull(FieldFirstSeen)) +} + +// FirstSeenNotNil applies the NotNil predicate on the "first_seen" field. +func FirstSeenNotNil() predicate.TestCollection { + return predicate.TestCollection(sql.FieldNotNull(FieldFirstSeen)) +} + // DurationMsEQ applies the EQ predicate on the "duration_ms" field. func DurationMsEQ(v int64) predicate.TestCollection { return predicate.TestCollection(sql.FieldEQ(FieldDurationMs, v)) @@ -348,7 +405,7 @@ func HasBazelInvocation() predicate.TestCollection { return predicate.TestCollection(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, BazelInvocationTable, BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, BazelInvocationTable, BazelInvocationColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -371,7 +428,7 @@ func HasTestSummary() predicate.TestCollection { return predicate.TestCollection(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, TestSummaryTable, TestSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, TestSummaryTable, TestSummaryColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/testcollection_create.go b/ent/gen/ent/testcollection_create.go index 4fb1a06..3f26993 100644 --- a/ent/gen/ent/testcollection_create.go +++ b/ent/gen/ent/testcollection_create.go @@ -5,6 +5,7 @@ package ent import ( "context" "fmt" + "time" "entgo.io/ent/dialect/sql/sqlgraph" "entgo.io/ent/schema/field" @@ -91,6 +92,20 @@ func (tcc *TestCollectionCreate) SetNillableCachedRemotely(b *bool) *TestCollect return tcc } +// SetFirstSeen sets the "first_seen" field. +func (tcc *TestCollectionCreate) SetFirstSeen(t time.Time) *TestCollectionCreate { + tcc.mutation.SetFirstSeen(t) + return tcc +} + +// SetNillableFirstSeen sets the "first_seen" field if the given value is not nil. +func (tcc *TestCollectionCreate) SetNillableFirstSeen(t *time.Time) *TestCollectionCreate { + if t != nil { + tcc.SetFirstSeen(*t) + } + return tcc +} + // SetDurationMs sets the "duration_ms" field. func (tcc *TestCollectionCreate) SetDurationMs(i int64) *TestCollectionCreate { tcc.mutation.SetDurationMs(i) @@ -105,19 +120,23 @@ func (tcc *TestCollectionCreate) SetNillableDurationMs(i *int64) *TestCollection return tcc } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tcc *TestCollectionCreate) AddBazelInvocationIDs(ids ...int) *TestCollectionCreate { - tcc.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tcc *TestCollectionCreate) SetBazelInvocationID(id int) *TestCollectionCreate { + tcc.mutation.SetBazelInvocationID(id) return tcc } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tcc *TestCollectionCreate) AddBazelInvocation(b ...*BazelInvocation) *TestCollectionCreate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tcc *TestCollectionCreate) SetNillableBazelInvocationID(id *int) *TestCollectionCreate { + if id != nil { + tcc = tcc.SetBazelInvocationID(*id) } - return tcc.AddBazelInvocationIDs(ids...) + return tcc +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tcc *TestCollectionCreate) SetBazelInvocation(b *BazelInvocation) *TestCollectionCreate { + return tcc.SetBazelInvocationID(b.ID) } // SetTestSummaryID sets the "test_summary" edge to the TestSummary entity by ID. @@ -248,16 +267,20 @@ func (tcc *TestCollectionCreate) createSpec() (*TestCollection, *sqlgraph.Create _spec.SetField(testcollection.FieldCachedRemotely, field.TypeBool, value) _node.CachedRemotely = value } + if value, ok := tcc.mutation.FirstSeen(); ok { + _spec.SetField(testcollection.FieldFirstSeen, field.TypeTime, value) + _node.FirstSeen = &value + } if value, ok := tcc.mutation.DurationMs(); ok { _spec.SetField(testcollection.FieldDurationMs, field.TypeInt64, value) _node.DurationMs = value } if nodes := tcc.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, + Columns: []string{testcollection.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -266,11 +289,12 @@ func (tcc *TestCollectionCreate) createSpec() (*TestCollection, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.bazel_invocation_test_collection = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tcc.mutation.TestSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testcollection.TestSummaryTable, Columns: []string{testcollection.TestSummaryColumn}, @@ -282,7 +306,6 @@ func (tcc *TestCollectionCreate) createSpec() (*TestCollection, *sqlgraph.Create for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.test_collection_test_summary = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tcc.mutation.TestResultsIDs(); len(nodes) > 0 { diff --git a/ent/gen/ent/testcollection_query.go b/ent/gen/ent/testcollection_query.go index c3936e6..e4d1a36 100644 --- a/ent/gen/ent/testcollection_query.go +++ b/ent/gen/ent/testcollection_query.go @@ -21,18 +21,17 @@ import ( // TestCollectionQuery is the builder for querying TestCollection entities. type TestCollectionQuery struct { config - ctx *QueryContext - order []testcollection.OrderOption - inters []Interceptor - predicates []predicate.TestCollection - withBazelInvocation *BazelInvocationQuery - withTestSummary *TestSummaryQuery - withTestResults *TestResultBESQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TestCollection) error - withNamedBazelInvocation map[string]*BazelInvocationQuery - withNamedTestResults map[string]*TestResultBESQuery + ctx *QueryContext + order []testcollection.OrderOption + inters []Interceptor + predicates []predicate.TestCollection + withBazelInvocation *BazelInvocationQuery + withTestSummary *TestSummaryQuery + withTestResults *TestResultBESQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TestCollection) error + withNamedTestResults map[string]*TestResultBESQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -83,7 +82,7 @@ func (tcq *TestCollectionQuery) QueryBazelInvocation() *BazelInvocationQuery { step := sqlgraph.NewStep( sqlgraph.From(testcollection.Table, testcollection.FieldID, selector), sqlgraph.To(bazelinvocation.Table, bazelinvocation.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, testcollection.BazelInvocationTable, testcollection.BazelInvocationPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, testcollection.BazelInvocationTable, testcollection.BazelInvocationColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -105,7 +104,7 @@ func (tcq *TestCollectionQuery) QueryTestSummary() *TestSummaryQuery { step := sqlgraph.NewStep( sqlgraph.From(testcollection.Table, testcollection.FieldID, selector), sqlgraph.To(testsummary.Table, testsummary.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, testcollection.TestSummaryTable, testcollection.TestSummaryColumn), + sqlgraph.Edge(sqlgraph.O2O, false, testcollection.TestSummaryTable, testcollection.TestSummaryColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -454,7 +453,7 @@ func (tcq *TestCollectionQuery) sqlAll(ctx context.Context, hooks ...queryHook) tcq.withTestResults != nil, } ) - if tcq.withTestSummary != nil { + if tcq.withBazelInvocation != nil { withFKs = true } if withFKs { @@ -482,11 +481,8 @@ func (tcq *TestCollectionQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := tcq.withBazelInvocation; query != nil { - if err := tcq.loadBazelInvocation(ctx, query, nodes, - func(n *TestCollection) { n.Edges.BazelInvocation = []*BazelInvocation{} }, - func(n *TestCollection, e *BazelInvocation) { - n.Edges.BazelInvocation = append(n.Edges.BazelInvocation, e) - }); err != nil { + if err := tcq.loadBazelInvocation(ctx, query, nodes, nil, + func(n *TestCollection, e *BazelInvocation) { n.Edges.BazelInvocation = e }); err != nil { return nil, err } } @@ -503,13 +499,6 @@ func (tcq *TestCollectionQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nil, err } } - for name, query := range tcq.withNamedBazelInvocation { - if err := tcq.loadBazelInvocation(ctx, query, nodes, - func(n *TestCollection) { n.appendNamedBazelInvocation(name) }, - func(n *TestCollection, e *BazelInvocation) { n.appendNamedBazelInvocation(name, e) }); err != nil { - return nil, err - } - } for name, query := range tcq.withNamedTestResults { if err := tcq.loadTestResults(ctx, query, nodes, func(n *TestCollection) { n.appendNamedTestResults(name) }, @@ -526,74 +515,13 @@ func (tcq *TestCollectionQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (tcq *TestCollectionQuery) loadBazelInvocation(ctx context.Context, query *BazelInvocationQuery, nodes []*TestCollection, init func(*TestCollection), assign func(*TestCollection, *BazelInvocation)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TestCollection) - nids := make(map[int]map[*TestCollection]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) - } - } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(testcollection.BazelInvocationTable) - s.Join(joinT).On(s.C(bazelinvocation.FieldID), joinT.C(testcollection.BazelInvocationPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(testcollection.BazelInvocationPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(testcollection.BazelInvocationPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TestCollection]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*BazelInvocation](ctx, query, qr, query.inters) - if err != nil { - return err - } - for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "bazel_invocation" node returned %v`, n.ID) - } - for kn := range nodes { - assign(kn, n) - } - } - return nil -} -func (tcq *TestCollectionQuery) loadTestSummary(ctx context.Context, query *TestSummaryQuery, nodes []*TestCollection, init func(*TestCollection), assign func(*TestCollection, *TestSummary)) error { ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*TestCollection) for i := range nodes { - if nodes[i].test_collection_test_summary == nil { + if nodes[i].bazel_invocation_test_collection == nil { continue } - fk := *nodes[i].test_collection_test_summary + fk := *nodes[i].bazel_invocation_test_collection if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } @@ -602,7 +530,7 @@ func (tcq *TestCollectionQuery) loadTestSummary(ctx context.Context, query *Test if len(ids) == 0 { return nil } - query.Where(testsummary.IDIn(ids...)) + query.Where(bazelinvocation.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err @@ -610,7 +538,7 @@ func (tcq *TestCollectionQuery) loadTestSummary(ctx context.Context, query *Test for _, n := range neighbors { nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected foreign-key "test_collection_test_summary" returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "bazel_invocation_test_collection" returned %v`, n.ID) } for i := range nodes { assign(nodes[i], n) @@ -618,6 +546,34 @@ func (tcq *TestCollectionQuery) loadTestSummary(ctx context.Context, query *Test } return nil } +func (tcq *TestCollectionQuery) loadTestSummary(ctx context.Context, query *TestSummaryQuery, nodes []*TestCollection, init func(*TestCollection), assign func(*TestCollection, *TestSummary)) error { + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TestCollection) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] + } + query.withFKs = true + query.Where(predicate.TestSummary(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(testcollection.TestSummaryColumn), fks...)) + })) + neighbors, err := query.All(ctx) + if err != nil { + return err + } + for _, n := range neighbors { + fk := n.test_collection_test_summary + if fk == nil { + return fmt.Errorf(`foreign-key "test_collection_test_summary" is nil for node %v`, n.ID) + } + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "test_collection_test_summary" returned %v for node %v`, *fk, n.ID) + } + assign(node, n) + } + return nil +} func (tcq *TestCollectionQuery) loadTestResults(ctx context.Context, query *TestResultBESQuery, nodes []*TestCollection, init func(*TestCollection), assign func(*TestCollection, *TestResultBES)) error { fks := make([]driver.Value, 0, len(nodes)) nodeids := make(map[int]*TestCollection) @@ -734,20 +690,6 @@ func (tcq *TestCollectionQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedBazelInvocation tells the query-builder to eager-load the nodes that are connected to the "bazel_invocation" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tcq *TestCollectionQuery) WithNamedBazelInvocation(name string, opts ...func(*BazelInvocationQuery)) *TestCollectionQuery { - query := (&BazelInvocationClient{config: tcq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tcq.withNamedBazelInvocation == nil { - tcq.withNamedBazelInvocation = make(map[string]*BazelInvocationQuery) - } - tcq.withNamedBazelInvocation[name] = query - return tcq -} - // WithNamedTestResults tells the query-builder to eager-load the nodes that are connected to the "test_results" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (tcq *TestCollectionQuery) WithNamedTestResults(name string, opts ...func(*TestResultBESQuery)) *TestCollectionQuery { diff --git a/ent/gen/ent/testcollection_update.go b/ent/gen/ent/testcollection_update.go index 1e6dca1..0fa0c65 100644 --- a/ent/gen/ent/testcollection_update.go +++ b/ent/gen/ent/testcollection_update.go @@ -6,6 +6,7 @@ import ( "context" "errors" "fmt" + "time" "entgo.io/ent/dialect/sql" "entgo.io/ent/dialect/sql/sqlgraph" @@ -130,6 +131,26 @@ func (tcu *TestCollectionUpdate) ClearCachedRemotely() *TestCollectionUpdate { return tcu } +// SetFirstSeen sets the "first_seen" field. +func (tcu *TestCollectionUpdate) SetFirstSeen(t time.Time) *TestCollectionUpdate { + tcu.mutation.SetFirstSeen(t) + return tcu +} + +// SetNillableFirstSeen sets the "first_seen" field if the given value is not nil. +func (tcu *TestCollectionUpdate) SetNillableFirstSeen(t *time.Time) *TestCollectionUpdate { + if t != nil { + tcu.SetFirstSeen(*t) + } + return tcu +} + +// ClearFirstSeen clears the value of the "first_seen" field. +func (tcu *TestCollectionUpdate) ClearFirstSeen() *TestCollectionUpdate { + tcu.mutation.ClearFirstSeen() + return tcu +} + // SetDurationMs sets the "duration_ms" field. func (tcu *TestCollectionUpdate) SetDurationMs(i int64) *TestCollectionUpdate { tcu.mutation.ResetDurationMs() @@ -157,19 +178,23 @@ func (tcu *TestCollectionUpdate) ClearDurationMs() *TestCollectionUpdate { return tcu } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tcu *TestCollectionUpdate) AddBazelInvocationIDs(ids ...int) *TestCollectionUpdate { - tcu.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tcu *TestCollectionUpdate) SetBazelInvocationID(id int) *TestCollectionUpdate { + tcu.mutation.SetBazelInvocationID(id) return tcu } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tcu *TestCollectionUpdate) AddBazelInvocation(b ...*BazelInvocation) *TestCollectionUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tcu *TestCollectionUpdate) SetNillableBazelInvocationID(id *int) *TestCollectionUpdate { + if id != nil { + tcu = tcu.SetBazelInvocationID(*id) } - return tcu.AddBazelInvocationIDs(ids...) + return tcu +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tcu *TestCollectionUpdate) SetBazelInvocation(b *BazelInvocation) *TestCollectionUpdate { + return tcu.SetBazelInvocationID(b.ID) } // SetTestSummaryID sets the "test_summary" edge to the TestSummary entity by ID. @@ -211,27 +236,12 @@ func (tcu *TestCollectionUpdate) Mutation() *TestCollectionMutation { return tcu.mutation } -// ClearBazelInvocation clears all "bazel_invocation" edges to the BazelInvocation entity. +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. func (tcu *TestCollectionUpdate) ClearBazelInvocation() *TestCollectionUpdate { tcu.mutation.ClearBazelInvocation() return tcu } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to BazelInvocation entities by IDs. -func (tcu *TestCollectionUpdate) RemoveBazelInvocationIDs(ids ...int) *TestCollectionUpdate { - tcu.mutation.RemoveBazelInvocationIDs(ids...) - return tcu -} - -// RemoveBazelInvocation removes "bazel_invocation" edges to BazelInvocation entities. -func (tcu *TestCollectionUpdate) RemoveBazelInvocation(b ...*BazelInvocation) *TestCollectionUpdate { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return tcu.RemoveBazelInvocationIDs(ids...) -} - // ClearTestSummary clears the "test_summary" edge to the TestSummary entity. func (tcu *TestCollectionUpdate) ClearTestSummary() *TestCollectionUpdate { tcu.mutation.ClearTestSummary() @@ -338,6 +348,12 @@ func (tcu *TestCollectionUpdate) sqlSave(ctx context.Context) (n int, err error) if tcu.mutation.CachedRemotelyCleared() { _spec.ClearField(testcollection.FieldCachedRemotely, field.TypeBool) } + if value, ok := tcu.mutation.FirstSeen(); ok { + _spec.SetField(testcollection.FieldFirstSeen, field.TypeTime, value) + } + if tcu.mutation.FirstSeenCleared() { + _spec.ClearField(testcollection.FieldFirstSeen, field.TypeTime) + } if value, ok := tcu.mutation.DurationMs(); ok { _spec.SetField(testcollection.FieldDurationMs, field.TypeInt64, value) } @@ -349,39 +365,23 @@ func (tcu *TestCollectionUpdate) sqlSave(ctx context.Context) (n int, err error) } if tcu.mutation.BazelInvocationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tcu.mutation.RemovedBazelInvocationIDs(); len(nodes) > 0 && !tcu.mutation.BazelInvocationCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, + Columns: []string{testcollection.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tcu.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, + Columns: []string{testcollection.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -394,7 +394,7 @@ func (tcu *TestCollectionUpdate) sqlSave(ctx context.Context) (n int, err error) } if tcu.mutation.TestSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testcollection.TestSummaryTable, Columns: []string{testcollection.TestSummaryColumn}, @@ -407,7 +407,7 @@ func (tcu *TestCollectionUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := tcu.mutation.TestSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testcollection.TestSummaryTable, Columns: []string{testcollection.TestSummaryColumn}, @@ -586,6 +586,26 @@ func (tcuo *TestCollectionUpdateOne) ClearCachedRemotely() *TestCollectionUpdate return tcuo } +// SetFirstSeen sets the "first_seen" field. +func (tcuo *TestCollectionUpdateOne) SetFirstSeen(t time.Time) *TestCollectionUpdateOne { + tcuo.mutation.SetFirstSeen(t) + return tcuo +} + +// SetNillableFirstSeen sets the "first_seen" field if the given value is not nil. +func (tcuo *TestCollectionUpdateOne) SetNillableFirstSeen(t *time.Time) *TestCollectionUpdateOne { + if t != nil { + tcuo.SetFirstSeen(*t) + } + return tcuo +} + +// ClearFirstSeen clears the value of the "first_seen" field. +func (tcuo *TestCollectionUpdateOne) ClearFirstSeen() *TestCollectionUpdateOne { + tcuo.mutation.ClearFirstSeen() + return tcuo +} + // SetDurationMs sets the "duration_ms" field. func (tcuo *TestCollectionUpdateOne) SetDurationMs(i int64) *TestCollectionUpdateOne { tcuo.mutation.ResetDurationMs() @@ -613,19 +633,23 @@ func (tcuo *TestCollectionUpdateOne) ClearDurationMs() *TestCollectionUpdateOne return tcuo } -// AddBazelInvocationIDs adds the "bazel_invocation" edge to the BazelInvocation entity by IDs. -func (tcuo *TestCollectionUpdateOne) AddBazelInvocationIDs(ids ...int) *TestCollectionUpdateOne { - tcuo.mutation.AddBazelInvocationIDs(ids...) +// SetBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID. +func (tcuo *TestCollectionUpdateOne) SetBazelInvocationID(id int) *TestCollectionUpdateOne { + tcuo.mutation.SetBazelInvocationID(id) return tcuo } -// AddBazelInvocation adds the "bazel_invocation" edges to the BazelInvocation entity. -func (tcuo *TestCollectionUpdateOne) AddBazelInvocation(b ...*BazelInvocation) *TestCollectionUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID +// SetNillableBazelInvocationID sets the "bazel_invocation" edge to the BazelInvocation entity by ID if the given value is not nil. +func (tcuo *TestCollectionUpdateOne) SetNillableBazelInvocationID(id *int) *TestCollectionUpdateOne { + if id != nil { + tcuo = tcuo.SetBazelInvocationID(*id) } - return tcuo.AddBazelInvocationIDs(ids...) + return tcuo +} + +// SetBazelInvocation sets the "bazel_invocation" edge to the BazelInvocation entity. +func (tcuo *TestCollectionUpdateOne) SetBazelInvocation(b *BazelInvocation) *TestCollectionUpdateOne { + return tcuo.SetBazelInvocationID(b.ID) } // SetTestSummaryID sets the "test_summary" edge to the TestSummary entity by ID. @@ -667,27 +691,12 @@ func (tcuo *TestCollectionUpdateOne) Mutation() *TestCollectionMutation { return tcuo.mutation } -// ClearBazelInvocation clears all "bazel_invocation" edges to the BazelInvocation entity. +// ClearBazelInvocation clears the "bazel_invocation" edge to the BazelInvocation entity. func (tcuo *TestCollectionUpdateOne) ClearBazelInvocation() *TestCollectionUpdateOne { tcuo.mutation.ClearBazelInvocation() return tcuo } -// RemoveBazelInvocationIDs removes the "bazel_invocation" edge to BazelInvocation entities by IDs. -func (tcuo *TestCollectionUpdateOne) RemoveBazelInvocationIDs(ids ...int) *TestCollectionUpdateOne { - tcuo.mutation.RemoveBazelInvocationIDs(ids...) - return tcuo -} - -// RemoveBazelInvocation removes "bazel_invocation" edges to BazelInvocation entities. -func (tcuo *TestCollectionUpdateOne) RemoveBazelInvocation(b ...*BazelInvocation) *TestCollectionUpdateOne { - ids := make([]int, len(b)) - for i := range b { - ids[i] = b[i].ID - } - return tcuo.RemoveBazelInvocationIDs(ids...) -} - // ClearTestSummary clears the "test_summary" edge to the TestSummary entity. func (tcuo *TestCollectionUpdateOne) ClearTestSummary() *TestCollectionUpdateOne { tcuo.mutation.ClearTestSummary() @@ -824,6 +833,12 @@ func (tcuo *TestCollectionUpdateOne) sqlSave(ctx context.Context) (_node *TestCo if tcuo.mutation.CachedRemotelyCleared() { _spec.ClearField(testcollection.FieldCachedRemotely, field.TypeBool) } + if value, ok := tcuo.mutation.FirstSeen(); ok { + _spec.SetField(testcollection.FieldFirstSeen, field.TypeTime, value) + } + if tcuo.mutation.FirstSeenCleared() { + _spec.ClearField(testcollection.FieldFirstSeen, field.TypeTime) + } if value, ok := tcuo.mutation.DurationMs(); ok { _spec.SetField(testcollection.FieldDurationMs, field.TypeInt64, value) } @@ -835,39 +850,23 @@ func (tcuo *TestCollectionUpdateOne) sqlSave(ctx context.Context) (_node *TestCo } if tcuo.mutation.BazelInvocationCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tcuo.mutation.RemovedBazelInvocationIDs(); len(nodes) > 0 && !tcuo.mutation.BazelInvocationCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, + Columns: []string{testcollection.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tcuo.mutation.BazelInvocationIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testcollection.BazelInvocationTable, - Columns: testcollection.BazelInvocationPrimaryKey, + Columns: []string{testcollection.BazelInvocationColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(bazelinvocation.FieldID, field.TypeInt), @@ -880,7 +879,7 @@ func (tcuo *TestCollectionUpdateOne) sqlSave(ctx context.Context) (_node *TestCo } if tcuo.mutation.TestSummaryCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testcollection.TestSummaryTable, Columns: []string{testcollection.TestSummaryColumn}, @@ -893,7 +892,7 @@ func (tcuo *TestCollectionUpdateOne) sqlSave(ctx context.Context) (_node *TestCo } if nodes := tcuo.mutation.TestSummaryIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testcollection.TestSummaryTable, Columns: []string{testcollection.TestSummaryColumn}, diff --git a/ent/gen/ent/testfile.go b/ent/gen/ent/testfile.go index bca03d3..98c5ec8 100644 --- a/ent/gen/ent/testfile.go +++ b/ent/gen/ent/testfile.go @@ -10,6 +10,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" "github.com/buildbarn/bb-portal/ent/gen/ent/testfile" + "github.com/buildbarn/bb-portal/ent/gen/ent/testresultbes" ) // TestFile is the model entity for the TestFile schema. @@ -29,34 +30,35 @@ type TestFile struct { Prefix []string `json:"prefix,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TestFileQuery when eager-loading is set. - Edges TestFileEdges `json:"edges"` - named_set_of_files_files *int - output_group_inline_files *int - target_complete_important_output *int - target_complete_directory_output *int - test_summary_passed *int - test_summary_failed *int - selectValues sql.SelectValues + Edges TestFileEdges `json:"edges"` + named_set_of_files_files *int + output_group_inline_files *int + target_complete_important_output *int + target_complete_directory_output *int + test_result_bes_test_action_output *int + test_summary_passed *int + test_summary_failed *int + selectValues sql.SelectValues } // TestFileEdges holds the relations/edges for other nodes in the graph. type TestFileEdges struct { // TestResult holds the value of the test_result edge. - TestResult []*TestResultBES `json:"test_result,omitempty"` + TestResult *TestResultBES `json:"test_result,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedTestResult map[string][]*TestResultBES } // TestResultOrErr returns the TestResult value or an error if the edge -// was not loaded in eager-loading. -func (e TestFileEdges) TestResultOrErr() ([]*TestResultBES, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TestFileEdges) TestResultOrErr() (*TestResultBES, error) { + if e.TestResult != nil { return e.TestResult, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: testresultbes.Label} } return nil, &NotLoadedError{edge: "test_result"} } @@ -80,9 +82,11 @@ func (*TestFile) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case testfile.ForeignKeys[3]: // target_complete_directory_output values[i] = new(sql.NullInt64) - case testfile.ForeignKeys[4]: // test_summary_passed + case testfile.ForeignKeys[4]: // test_result_bes_test_action_output + values[i] = new(sql.NullInt64) + case testfile.ForeignKeys[5]: // test_summary_passed values[i] = new(sql.NullInt64) - case testfile.ForeignKeys[5]: // test_summary_failed + case testfile.ForeignKeys[6]: // test_summary_failed values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) @@ -166,13 +170,20 @@ func (tf *TestFile) assignValues(columns []string, values []any) error { *tf.target_complete_directory_output = int(value.Int64) } case testfile.ForeignKeys[4]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field test_result_bes_test_action_output", value) + } else if value.Valid { + tf.test_result_bes_test_action_output = new(int) + *tf.test_result_bes_test_action_output = int(value.Int64) + } + case testfile.ForeignKeys[5]: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for edge-field test_summary_passed", value) } else if value.Valid { tf.test_summary_passed = new(int) *tf.test_summary_passed = int(value.Int64) } - case testfile.ForeignKeys[5]: + case testfile.ForeignKeys[6]: if value, ok := values[i].(*sql.NullInt64); !ok { return fmt.Errorf("unexpected type %T for edge-field test_summary_failed", value) } else if value.Valid { @@ -238,29 +249,5 @@ func (tf *TestFile) String() string { return builder.String() } -// NamedTestResult returns the TestResult named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tf *TestFile) NamedTestResult(name string) ([]*TestResultBES, error) { - if tf.Edges.namedTestResult == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tf.Edges.namedTestResult[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tf *TestFile) appendNamedTestResult(name string, edges ...*TestResultBES) { - if tf.Edges.namedTestResult == nil { - tf.Edges.namedTestResult = make(map[string][]*TestResultBES) - } - if len(edges) == 0 { - tf.Edges.namedTestResult[name] = []*TestResultBES{} - } else { - tf.Edges.namedTestResult[name] = append(tf.Edges.namedTestResult[name], edges...) - } -} - // TestFiles is a parsable slice of TestFile. type TestFiles []*TestFile diff --git a/ent/gen/ent/testfile/testfile.go b/ent/gen/ent/testfile/testfile.go index 1aa981e..460a7b2 100644 --- a/ent/gen/ent/testfile/testfile.go +++ b/ent/gen/ent/testfile/testfile.go @@ -26,11 +26,13 @@ const ( EdgeTestResult = "test_result" // Table holds the table name of the testfile in the database. Table = "test_files" - // TestResultTable is the table that holds the test_result relation/edge. The primary key declared below. - TestResultTable = "test_result_bes_test_action_output" + // TestResultTable is the table that holds the test_result relation/edge. + TestResultTable = "test_files" // TestResultInverseTable is the table name for the TestResultBES entity. // It exists in this package in order to avoid circular dependency with the "testresultbes" package. TestResultInverseTable = "test_result_be_ss" + // TestResultColumn is the table column denoting the test_result relation/edge. + TestResultColumn = "test_result_bes_test_action_output" ) // Columns holds all SQL columns for testfile fields. @@ -50,16 +52,11 @@ var ForeignKeys = []string{ "output_group_inline_files", "target_complete_important_output", "target_complete_directory_output", + "test_result_bes_test_action_output", "test_summary_passed", "test_summary_failed", } -var ( - // TestResultPrimaryKey and TestResultColumn2 are the table columns denoting the - // primary key for the test_result relation (M2M). - TestResultPrimaryKey = []string{"test_result_bes_id", "test_file_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -103,23 +100,16 @@ func ByName(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldName, opts...).ToFunc() } -// ByTestResultCount orders the results by test_result count. -func ByTestResultCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTestResultStep(), opts...) - } -} - -// ByTestResult orders the results by test_result terms. -func ByTestResult(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTestResultField orders the results by test_result field. +func ByTestResultField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTestResultStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTestResultStep(), sql.OrderByField(field, opts...)) } } func newTestResultStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestResultInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, TestResultTable, TestResultPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, TestResultTable, TestResultColumn), ) } diff --git a/ent/gen/ent/testfile/where.go b/ent/gen/ent/testfile/where.go index 0151045..d8118a9 100644 --- a/ent/gen/ent/testfile/where.go +++ b/ent/gen/ent/testfile/where.go @@ -363,7 +363,7 @@ func HasTestResult() predicate.TestFile { return predicate.TestFile(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, TestResultTable, TestResultPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, TestResultTable, TestResultColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/testfile_create.go b/ent/gen/ent/testfile_create.go index 1ffe34d..b934d3c 100644 --- a/ent/gen/ent/testfile_create.go +++ b/ent/gen/ent/testfile_create.go @@ -81,19 +81,23 @@ func (tfc *TestFileCreate) SetPrefix(s []string) *TestFileCreate { return tfc } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (tfc *TestFileCreate) AddTestResultIDs(ids ...int) *TestFileCreate { - tfc.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (tfc *TestFileCreate) SetTestResultID(id int) *TestFileCreate { + tfc.mutation.SetTestResultID(id) return tfc } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (tfc *TestFileCreate) AddTestResult(t ...*TestResultBES) *TestFileCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (tfc *TestFileCreate) SetNillableTestResultID(id *int) *TestFileCreate { + if id != nil { + tfc = tfc.SetTestResultID(*id) } - return tfc.AddTestResultIDs(ids...) + return tfc +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (tfc *TestFileCreate) SetTestResult(t *TestResultBES) *TestFileCreate { + return tfc.SetTestResultID(t.ID) } // Mutation returns the TestFileMutation object of the builder. @@ -178,10 +182,10 @@ func (tfc *TestFileCreate) createSpec() (*TestFile, *sqlgraph.CreateSpec) { } if nodes := tfc.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, + Columns: []string{testfile.TestResultColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), @@ -190,6 +194,7 @@ func (tfc *TestFileCreate) createSpec() (*TestFile, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.test_result_bes_test_action_output = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/testfile_query.go b/ent/gen/ent/testfile_query.go index ace6489..42f62c9 100644 --- a/ent/gen/ent/testfile_query.go +++ b/ent/gen/ent/testfile_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,15 +18,14 @@ import ( // TestFileQuery is the builder for querying TestFile entities. type TestFileQuery struct { config - ctx *QueryContext - order []testfile.OrderOption - inters []Interceptor - predicates []predicate.TestFile - withTestResult *TestResultBESQuery - withFKs bool - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TestFile) error - withNamedTestResult map[string]*TestResultBESQuery + ctx *QueryContext + order []testfile.OrderOption + inters []Interceptor + predicates []predicate.TestFile + withTestResult *TestResultBESQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TestFile) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -78,7 +76,7 @@ func (tfq *TestFileQuery) QueryTestResult() *TestResultBESQuery { step := sqlgraph.NewStep( sqlgraph.From(testfile.Table, testfile.FieldID, selector), sqlgraph.To(testresultbes.Table, testresultbes.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, testfile.TestResultTable, testfile.TestResultPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, testfile.TestResultTable, testfile.TestResultColumn), ) fromU = sqlgraph.SetNeighbors(tfq.driver.Dialect(), step) return fromU, nil @@ -379,6 +377,9 @@ func (tfq *TestFileQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Te tfq.withTestResult != nil, } ) + if tfq.withTestResult != nil { + withFKs = true + } if withFKs { _spec.Node.Columns = append(_spec.Node.Columns, testfile.ForeignKeys...) } @@ -404,16 +405,8 @@ func (tfq *TestFileQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Te return nodes, nil } if query := tfq.withTestResult; query != nil { - if err := tfq.loadTestResult(ctx, query, nodes, - func(n *TestFile) { n.Edges.TestResult = []*TestResultBES{} }, - func(n *TestFile, e *TestResultBES) { n.Edges.TestResult = append(n.Edges.TestResult, e) }); err != nil { - return nil, err - } - } - for name, query := range tfq.withNamedTestResult { - if err := tfq.loadTestResult(ctx, query, nodes, - func(n *TestFile) { n.appendNamedTestResult(name) }, - func(n *TestFile, e *TestResultBES) { n.appendNamedTestResult(name, e) }); err != nil { + if err := tfq.loadTestResult(ctx, query, nodes, nil, + func(n *TestFile, e *TestResultBES) { n.Edges.TestResult = e }); err != nil { return nil, err } } @@ -426,62 +419,33 @@ func (tfq *TestFileQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Te } func (tfq *TestFileQuery) loadTestResult(ctx context.Context, query *TestResultBESQuery, nodes []*TestFile, init func(*TestFile), assign func(*TestFile, *TestResultBES)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TestFile) - nids := make(map[int]map[*TestFile]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TestFile) + for i := range nodes { + if nodes[i].test_result_bes_test_action_output == nil { + continue + } + fk := *nodes[i].test_result_bes_test_action_output + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(testfile.TestResultTable) - s.Join(joinT).On(s.C(testresultbes.FieldID), joinT.C(testfile.TestResultPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(testfile.TestResultPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(testfile.TestResultPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TestFile]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TestResultBES](ctx, query, qr, query.inters) + query.Where(testresultbes.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "test_result" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "test_result_bes_test_action_output" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -571,20 +535,6 @@ func (tfq *TestFileQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTestResult tells the query-builder to eager-load the nodes that are connected to the "test_result" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tfq *TestFileQuery) WithNamedTestResult(name string, opts ...func(*TestResultBESQuery)) *TestFileQuery { - query := (&TestResultBESClient{config: tfq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tfq.withNamedTestResult == nil { - tfq.withNamedTestResult = make(map[string]*TestResultBESQuery) - } - tfq.withNamedTestResult[name] = query - return tfq -} - // TestFileGroupBy is the group-by builder for TestFile entities. type TestFileGroupBy struct { selector diff --git a/ent/gen/ent/testfile_update.go b/ent/gen/ent/testfile_update.go index 3da3d87..706d42d 100644 --- a/ent/gen/ent/testfile_update.go +++ b/ent/gen/ent/testfile_update.go @@ -134,19 +134,23 @@ func (tfu *TestFileUpdate) ClearPrefix() *TestFileUpdate { return tfu } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (tfu *TestFileUpdate) AddTestResultIDs(ids ...int) *TestFileUpdate { - tfu.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (tfu *TestFileUpdate) SetTestResultID(id int) *TestFileUpdate { + tfu.mutation.SetTestResultID(id) return tfu } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (tfu *TestFileUpdate) AddTestResult(t ...*TestResultBES) *TestFileUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (tfu *TestFileUpdate) SetNillableTestResultID(id *int) *TestFileUpdate { + if id != nil { + tfu = tfu.SetTestResultID(*id) } - return tfu.AddTestResultIDs(ids...) + return tfu +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (tfu *TestFileUpdate) SetTestResult(t *TestResultBES) *TestFileUpdate { + return tfu.SetTestResultID(t.ID) } // Mutation returns the TestFileMutation object of the builder. @@ -154,27 +158,12 @@ func (tfu *TestFileUpdate) Mutation() *TestFileMutation { return tfu.mutation } -// ClearTestResult clears all "test_result" edges to the TestResultBES entity. +// ClearTestResult clears the "test_result" edge to the TestResultBES entity. func (tfu *TestFileUpdate) ClearTestResult() *TestFileUpdate { tfu.mutation.ClearTestResult() return tfu } -// RemoveTestResultIDs removes the "test_result" edge to TestResultBES entities by IDs. -func (tfu *TestFileUpdate) RemoveTestResultIDs(ids ...int) *TestFileUpdate { - tfu.mutation.RemoveTestResultIDs(ids...) - return tfu -} - -// RemoveTestResult removes "test_result" edges to TestResultBES entities. -func (tfu *TestFileUpdate) RemoveTestResult(t ...*TestResultBES) *TestFileUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tfu.RemoveTestResultIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (tfu *TestFileUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, tfu.sqlSave, tfu.mutation, tfu.hooks) @@ -251,39 +240,23 @@ func (tfu *TestFileUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tfu.mutation.TestResultCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tfu.mutation.RemovedTestResultIDs(); len(nodes) > 0 && !tfu.mutation.TestResultCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, + Columns: []string{testfile.TestResultColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tfu.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, + Columns: []string{testfile.TestResultColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), @@ -419,19 +392,23 @@ func (tfuo *TestFileUpdateOne) ClearPrefix() *TestFileUpdateOne { return tfuo } -// AddTestResultIDs adds the "test_result" edge to the TestResultBES entity by IDs. -func (tfuo *TestFileUpdateOne) AddTestResultIDs(ids ...int) *TestFileUpdateOne { - tfuo.mutation.AddTestResultIDs(ids...) +// SetTestResultID sets the "test_result" edge to the TestResultBES entity by ID. +func (tfuo *TestFileUpdateOne) SetTestResultID(id int) *TestFileUpdateOne { + tfuo.mutation.SetTestResultID(id) return tfuo } -// AddTestResult adds the "test_result" edges to the TestResultBES entity. -func (tfuo *TestFileUpdateOne) AddTestResult(t ...*TestResultBES) *TestFileUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestResultID sets the "test_result" edge to the TestResultBES entity by ID if the given value is not nil. +func (tfuo *TestFileUpdateOne) SetNillableTestResultID(id *int) *TestFileUpdateOne { + if id != nil { + tfuo = tfuo.SetTestResultID(*id) } - return tfuo.AddTestResultIDs(ids...) + return tfuo +} + +// SetTestResult sets the "test_result" edge to the TestResultBES entity. +func (tfuo *TestFileUpdateOne) SetTestResult(t *TestResultBES) *TestFileUpdateOne { + return tfuo.SetTestResultID(t.ID) } // Mutation returns the TestFileMutation object of the builder. @@ -439,27 +416,12 @@ func (tfuo *TestFileUpdateOne) Mutation() *TestFileMutation { return tfuo.mutation } -// ClearTestResult clears all "test_result" edges to the TestResultBES entity. +// ClearTestResult clears the "test_result" edge to the TestResultBES entity. func (tfuo *TestFileUpdateOne) ClearTestResult() *TestFileUpdateOne { tfuo.mutation.ClearTestResult() return tfuo } -// RemoveTestResultIDs removes the "test_result" edge to TestResultBES entities by IDs. -func (tfuo *TestFileUpdateOne) RemoveTestResultIDs(ids ...int) *TestFileUpdateOne { - tfuo.mutation.RemoveTestResultIDs(ids...) - return tfuo -} - -// RemoveTestResult removes "test_result" edges to TestResultBES entities. -func (tfuo *TestFileUpdateOne) RemoveTestResult(t ...*TestResultBES) *TestFileUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tfuo.RemoveTestResultIDs(ids...) -} - // Where appends a list predicates to the TestFileUpdate builder. func (tfuo *TestFileUpdateOne) Where(ps ...predicate.TestFile) *TestFileUpdateOne { tfuo.mutation.Where(ps...) @@ -566,39 +528,23 @@ func (tfuo *TestFileUpdateOne) sqlSave(ctx context.Context) (_node *TestFile, er } if tfuo.mutation.TestResultCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tfuo.mutation.RemovedTestResultIDs(); len(nodes) > 0 && !tfuo.mutation.TestResultCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, + Columns: []string{testfile.TestResultColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tfuo.mutation.TestResultIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: testfile.TestResultTable, - Columns: testfile.TestResultPrimaryKey, + Columns: []string{testfile.TestResultColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testresultbes.FieldID, field.TypeInt), diff --git a/ent/gen/ent/testresultbes.go b/ent/gen/ent/testresultbes.go index 1d4cdf9..2c53077 100644 --- a/ent/gen/ent/testresultbes.go +++ b/ent/gen/ent/testresultbes.go @@ -39,10 +39,9 @@ type TestResultBES struct { TestAttemptDuration int64 `json:"test_attempt_duration,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TestResultBESQuery when eager-loading is set. - Edges TestResultBESEdges `json:"edges"` - test_collection_test_results *int - test_result_bes_execution_info *int - selectValues sql.SelectValues + Edges TestResultBESEdges `json:"edges"` + test_collection_test_results *int + selectValues sql.SelectValues } // TestResultBESEdges holds the relations/edges for other nodes in the graph. @@ -108,8 +107,6 @@ func (*TestResultBES) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullString) case testresultbes.ForeignKeys[0]: // test_collection_test_results values[i] = new(sql.NullInt64) - case testresultbes.ForeignKeys[1]: // test_result_bes_execution_info - values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -194,13 +191,6 @@ func (trb *TestResultBES) assignValues(columns []string, values []any) error { trb.test_collection_test_results = new(int) *trb.test_collection_test_results = int(value.Int64) } - case testresultbes.ForeignKeys[1]: - if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field test_result_bes_execution_info", value) - } else if value.Valid { - trb.test_result_bes_execution_info = new(int) - *trb.test_result_bes_execution_info = int(value.Int64) - } default: trb.selectValues.Set(columns[i], values[i]) } diff --git a/ent/gen/ent/testresultbes/testresultbes.go b/ent/gen/ent/testresultbes/testresultbes.go index 27f3079..abca9d1 100644 --- a/ent/gen/ent/testresultbes/testresultbes.go +++ b/ent/gen/ent/testresultbes/testresultbes.go @@ -49,13 +49,15 @@ const ( TestCollectionInverseTable = "test_collections" // TestCollectionColumn is the table column denoting the test_collection relation/edge. TestCollectionColumn = "test_collection_test_results" - // TestActionOutputTable is the table that holds the test_action_output relation/edge. The primary key declared below. - TestActionOutputTable = "test_result_bes_test_action_output" + // TestActionOutputTable is the table that holds the test_action_output relation/edge. + TestActionOutputTable = "test_files" // TestActionOutputInverseTable is the table name for the TestFile entity. // It exists in this package in order to avoid circular dependency with the "testfile" package. TestActionOutputInverseTable = "test_files" + // TestActionOutputColumn is the table column denoting the test_action_output relation/edge. + TestActionOutputColumn = "test_result_bes_test_action_output" // ExecutionInfoTable is the table that holds the execution_info relation/edge. - ExecutionInfoTable = "test_result_be_ss" + ExecutionInfoTable = "exection_infos" // ExecutionInfoInverseTable is the table name for the ExectionInfo entity. // It exists in this package in order to avoid circular dependency with the "exectioninfo" package. ExecutionInfoInverseTable = "exection_infos" @@ -81,15 +83,8 @@ var Columns = []string{ // table and are not defined as standalone fields in the schema. var ForeignKeys = []string{ "test_collection_test_results", - "test_result_bes_execution_info", } -var ( - // TestActionOutputPrimaryKey and TestActionOutputColumn2 are the table columns denoting the - // primary key for the test_action_output relation (M2M). - TestActionOutputPrimaryKey = []string{"test_result_bes_id", "test_file_id"} -) - // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -224,14 +219,14 @@ func newTestActionOutputStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestActionOutputInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TestActionOutputTable, TestActionOutputPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TestActionOutputTable, TestActionOutputColumn), ) } func newExecutionInfoStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ExecutionInfoInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, ExecutionInfoTable, ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ExecutionInfoTable, ExecutionInfoColumn), ) } diff --git a/ent/gen/ent/testresultbes/where.go b/ent/gen/ent/testresultbes/where.go index 6177cf9..3d669a8 100644 --- a/ent/gen/ent/testresultbes/where.go +++ b/ent/gen/ent/testresultbes/where.go @@ -546,7 +546,7 @@ func HasTestActionOutput() predicate.TestResultBES { return predicate.TestResultBES(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, TestActionOutputTable, TestActionOutputPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, TestActionOutputTable, TestActionOutputColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -569,7 +569,7 @@ func HasExecutionInfo() predicate.TestResultBES { return predicate.TestResultBES(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, ExecutionInfoTable, ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, false, ExecutionInfoTable, ExecutionInfoColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/testresultbes_create.go b/ent/gen/ent/testresultbes_create.go index 0b7b5f3..13c16db 100644 --- a/ent/gen/ent/testresultbes_create.go +++ b/ent/gen/ent/testresultbes_create.go @@ -321,10 +321,10 @@ func (trbc *TestResultBESCreate) createSpec() (*TestResultBES, *sqlgraph.CreateS } if nodes := trbc.mutation.TestActionOutputIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -337,7 +337,7 @@ func (trbc *TestResultBESCreate) createSpec() (*TestResultBES, *sqlgraph.CreateS } if nodes := trbc.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testresultbes.ExecutionInfoTable, Columns: []string{testresultbes.ExecutionInfoColumn}, @@ -349,7 +349,6 @@ func (trbc *TestResultBESCreate) createSpec() (*TestResultBES, *sqlgraph.CreateS for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.test_result_bes_execution_info = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/testresultbes_query.go b/ent/gen/ent/testresultbes_query.go index c6262ad..dc49878 100644 --- a/ent/gen/ent/testresultbes_query.go +++ b/ent/gen/ent/testresultbes_query.go @@ -104,7 +104,7 @@ func (trbq *TestResultBESQuery) QueryTestActionOutput() *TestFileQuery { step := sqlgraph.NewStep( sqlgraph.From(testresultbes.Table, testresultbes.FieldID, selector), sqlgraph.To(testfile.Table, testfile.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, testresultbes.TestActionOutputTable, testresultbes.TestActionOutputPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, testresultbes.TestActionOutputTable, testresultbes.TestActionOutputColumn), ) fromU = sqlgraph.SetNeighbors(trbq.driver.Dialect(), step) return fromU, nil @@ -126,7 +126,7 @@ func (trbq *TestResultBESQuery) QueryExecutionInfo() *ExectionInfoQuery { step := sqlgraph.NewStep( sqlgraph.From(testresultbes.Table, testresultbes.FieldID, selector), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.M2O, false, testresultbes.ExecutionInfoTable, testresultbes.ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, false, testresultbes.ExecutionInfoTable, testresultbes.ExecutionInfoColumn), ) fromU = sqlgraph.SetNeighbors(trbq.driver.Dialect(), step) return fromU, nil @@ -453,7 +453,7 @@ func (trbq *TestResultBESQuery) sqlAll(ctx context.Context, hooks ...queryHook) trbq.withExecutionInfo != nil, } ) - if trbq.withTestCollection != nil || trbq.withExecutionInfo != nil { + if trbq.withTestCollection != nil { withFKs = true } if withFKs { @@ -547,95 +547,61 @@ func (trbq *TestResultBESQuery) loadTestCollection(ctx context.Context, query *T return nil } func (trbq *TestResultBESQuery) loadTestActionOutput(ctx context.Context, query *TestFileQuery, nodes []*TestResultBES, init func(*TestResultBES), assign func(*TestResultBES, *TestFile)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TestResultBES) - nids := make(map[int]map[*TestResultBES]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TestResultBES) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(testresultbes.TestActionOutputTable) - s.Join(joinT).On(s.C(testfile.FieldID), joinT.C(testresultbes.TestActionOutputPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(testresultbes.TestActionOutputPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(testresultbes.TestActionOutputPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TestResultBES]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TestFile](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TestFile(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(testresultbes.TestActionOutputColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "test_action_output" node returned %v`, n.ID) + fk := n.test_result_bes_test_action_output + if fk == nil { + return fmt.Errorf(`foreign-key "test_result_bes_test_action_output" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "test_result_bes_test_action_output" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } func (trbq *TestResultBESQuery) loadExecutionInfo(ctx context.Context, query *ExectionInfoQuery, nodes []*TestResultBES, init func(*TestResultBES), assign func(*TestResultBES, *ExectionInfo)) error { - ids := make([]int, 0, len(nodes)) - nodeids := make(map[int][]*TestResultBES) + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TestResultBES) for i := range nodes { - if nodes[i].test_result_bes_execution_info == nil { - continue - } - fk := *nodes[i].test_result_bes_execution_info - if _, ok := nodeids[fk]; !ok { - ids = append(ids, fk) - } - nodeids[fk] = append(nodeids[fk], nodes[i]) - } - if len(ids) == 0 { - return nil + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] } - query.Where(exectioninfo.IDIn(ids...)) + query.withFKs = true + query.Where(predicate.ExectionInfo(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(testresultbes.ExecutionInfoColumn), fks...)) + })) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nodeids[n.ID] - if !ok { - return fmt.Errorf(`unexpected foreign-key "test_result_bes_execution_info" returned %v`, n.ID) + fk := n.test_result_bes_execution_info + if fk == nil { + return fmt.Errorf(`foreign-key "test_result_bes_execution_info" is nil for node %v`, n.ID) } - for i := range nodes { - assign(nodes[i], n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "test_result_bes_execution_info" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } diff --git a/ent/gen/ent/testresultbes_update.go b/ent/gen/ent/testresultbes_update.go index 449170c..1bab09a 100644 --- a/ent/gen/ent/testresultbes_update.go +++ b/ent/gen/ent/testresultbes_update.go @@ -469,10 +469,10 @@ func (trbu *TestResultBESUpdate) sqlSave(ctx context.Context) (n int, err error) } if trbu.mutation.TestActionOutputCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -482,10 +482,10 @@ func (trbu *TestResultBESUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := trbu.mutation.RemovedTestActionOutputIDs(); len(nodes) > 0 && !trbu.mutation.TestActionOutputCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -498,10 +498,10 @@ func (trbu *TestResultBESUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := trbu.mutation.TestActionOutputIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -514,7 +514,7 @@ func (trbu *TestResultBESUpdate) sqlSave(ctx context.Context) (n int, err error) } if trbu.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testresultbes.ExecutionInfoTable, Columns: []string{testresultbes.ExecutionInfoColumn}, @@ -527,7 +527,7 @@ func (trbu *TestResultBESUpdate) sqlSave(ctx context.Context) (n int, err error) } if nodes := trbu.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testresultbes.ExecutionInfoTable, Columns: []string{testresultbes.ExecutionInfoColumn}, @@ -1029,10 +1029,10 @@ func (trbuo *TestResultBESUpdateOne) sqlSave(ctx context.Context) (_node *TestRe } if trbuo.mutation.TestActionOutputCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -1042,10 +1042,10 @@ func (trbuo *TestResultBESUpdateOne) sqlSave(ctx context.Context) (_node *TestRe } if nodes := trbuo.mutation.RemovedTestActionOutputIDs(); len(nodes) > 0 && !trbuo.mutation.TestActionOutputCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -1058,10 +1058,10 @@ func (trbuo *TestResultBESUpdateOne) sqlSave(ctx context.Context) (_node *TestRe } if nodes := trbuo.mutation.TestActionOutputIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: testresultbes.TestActionOutputTable, - Columns: testresultbes.TestActionOutputPrimaryKey, + Columns: []string{testresultbes.TestActionOutputColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(testfile.FieldID, field.TypeInt), @@ -1074,7 +1074,7 @@ func (trbuo *TestResultBESUpdateOne) sqlSave(ctx context.Context) (_node *TestRe } if trbuo.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testresultbes.ExecutionInfoTable, Columns: []string{testresultbes.ExecutionInfoColumn}, @@ -1087,7 +1087,7 @@ func (trbuo *TestResultBESUpdateOne) sqlSave(ctx context.Context) (_node *TestRe } if nodes := trbuo.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2O, + Rel: sqlgraph.O2O, Inverse: false, Table: testresultbes.ExecutionInfoTable, Columns: []string{testresultbes.ExecutionInfoColumn}, diff --git a/ent/gen/ent/testsummary.go b/ent/gen/ent/testsummary.go index 4439687..cab6a9a 100644 --- a/ent/gen/ent/testsummary.go +++ b/ent/gen/ent/testsummary.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/testcollection" "github.com/buildbarn/bb-portal/ent/gen/ent/testsummary" ) @@ -38,14 +39,15 @@ type TestSummary struct { Label string `json:"label,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TestSummaryQuery when eager-loading is set. - Edges TestSummaryEdges `json:"edges"` - selectValues sql.SelectValues + Edges TestSummaryEdges `json:"edges"` + test_collection_test_summary *int + selectValues sql.SelectValues } // TestSummaryEdges holds the relations/edges for other nodes in the graph. type TestSummaryEdges struct { // TestCollection holds the value of the test_collection edge. - TestCollection []*TestCollection `json:"test_collection,omitempty"` + TestCollection *TestCollection `json:"test_collection,omitempty"` // Passed holds the value of the passed edge. Passed []*TestFile `json:"passed,omitempty"` // Failed holds the value of the failed edge. @@ -56,16 +58,17 @@ type TestSummaryEdges struct { // totalCount holds the count of the edges above. totalCount [3]map[string]int - namedTestCollection map[string][]*TestCollection - namedPassed map[string][]*TestFile - namedFailed map[string][]*TestFile + namedPassed map[string][]*TestFile + namedFailed map[string][]*TestFile } // TestCollectionOrErr returns the TestCollection value or an error if the edge -// was not loaded in eager-loading. -func (e TestSummaryEdges) TestCollectionOrErr() ([]*TestCollection, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TestSummaryEdges) TestCollectionOrErr() (*TestCollection, error) { + if e.TestCollection != nil { return e.TestCollection, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: testcollection.Label} } return nil, &NotLoadedError{edge: "test_collection"} } @@ -97,6 +100,8 @@ func (*TestSummary) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case testsummary.FieldOverallStatus, testsummary.FieldLabel: values[i] = new(sql.NullString) + case testsummary.ForeignKeys[0]: // test_collection_test_summary + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -178,6 +183,13 @@ func (ts *TestSummary) assignValues(columns []string, values []any) error { } else if value.Valid { ts.Label = value.String } + case testsummary.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field test_collection_test_summary", value) + } else if value.Valid { + ts.test_collection_test_summary = new(int) + *ts.test_collection_test_summary = int(value.Int64) + } default: ts.selectValues.Set(columns[i], values[i]) } @@ -262,30 +274,6 @@ func (ts *TestSummary) String() string { return builder.String() } -// NamedTestCollection returns the TestCollection named value or an error if the edge was not -// loaded in eager-loading with this name. -func (ts *TestSummary) NamedTestCollection(name string) ([]*TestCollection, error) { - if ts.Edges.namedTestCollection == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := ts.Edges.namedTestCollection[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (ts *TestSummary) appendNamedTestCollection(name string, edges ...*TestCollection) { - if ts.Edges.namedTestCollection == nil { - ts.Edges.namedTestCollection = make(map[string][]*TestCollection) - } - if len(edges) == 0 { - ts.Edges.namedTestCollection[name] = []*TestCollection{} - } else { - ts.Edges.namedTestCollection[name] = append(ts.Edges.namedTestCollection[name], edges...) - } -} - // NamedPassed returns the Passed named value or an error if the edge was not // loaded in eager-loading with this name. func (ts *TestSummary) NamedPassed(name string) ([]*TestFile, error) { diff --git a/ent/gen/ent/testsummary/testsummary.go b/ent/gen/ent/testsummary/testsummary.go index e51f8e3..2383687 100644 --- a/ent/gen/ent/testsummary/testsummary.go +++ b/ent/gen/ent/testsummary/testsummary.go @@ -45,7 +45,7 @@ const ( // Table holds the table name of the testsummary in the database. Table = "test_summaries" // TestCollectionTable is the table that holds the test_collection relation/edge. - TestCollectionTable = "test_collections" + TestCollectionTable = "test_summaries" // TestCollectionInverseTable is the table name for the TestCollection entity. // It exists in this package in order to avoid circular dependency with the "testcollection" package. TestCollectionInverseTable = "test_collections" @@ -82,6 +82,12 @@ var Columns = []string{ FieldLabel, } +// ForeignKeys holds the SQL foreign-keys that are owned by the "test_summaries" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "test_collection_test_summary", +} + // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { for i := range Columns { @@ -89,6 +95,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -183,17 +194,10 @@ func ByLabel(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldLabel, opts...).ToFunc() } -// ByTestCollectionCount orders the results by test_collection count. -func ByTestCollectionCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTestCollectionStep(), opts...) - } -} - -// ByTestCollection orders the results by test_collection terms. -func ByTestCollection(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTestCollectionField orders the results by test_collection field. +func ByTestCollectionField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTestCollectionStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTestCollectionStep(), sql.OrderByField(field, opts...)) } } @@ -228,7 +232,7 @@ func newTestCollectionStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TestCollectionInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TestCollectionTable, TestCollectionColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TestCollectionTable, TestCollectionColumn), ) } func newPassedStep() *sqlgraph.Step { diff --git a/ent/gen/ent/testsummary/where.go b/ent/gen/ent/testsummary/where.go index 2766657..6d6534d 100644 --- a/ent/gen/ent/testsummary/where.go +++ b/ent/gen/ent/testsummary/where.go @@ -603,7 +603,7 @@ func HasTestCollection() predicate.TestSummary { return predicate.TestSummary(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, TestCollectionTable, TestCollectionColumn), + sqlgraph.Edge(sqlgraph.O2O, true, TestCollectionTable, TestCollectionColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/testsummary_create.go b/ent/gen/ent/testsummary_create.go index 3c7d5b0..d4d98d2 100644 --- a/ent/gen/ent/testsummary_create.go +++ b/ent/gen/ent/testsummary_create.go @@ -160,19 +160,23 @@ func (tsc *TestSummaryCreate) SetNillableLabel(s *string) *TestSummaryCreate { return tsc } -// AddTestCollectionIDs adds the "test_collection" edge to the TestCollection entity by IDs. -func (tsc *TestSummaryCreate) AddTestCollectionIDs(ids ...int) *TestSummaryCreate { - tsc.mutation.AddTestCollectionIDs(ids...) +// SetTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID. +func (tsc *TestSummaryCreate) SetTestCollectionID(id int) *TestSummaryCreate { + tsc.mutation.SetTestCollectionID(id) return tsc } -// AddTestCollection adds the "test_collection" edges to the TestCollection entity. -func (tsc *TestSummaryCreate) AddTestCollection(t ...*TestCollection) *TestSummaryCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID if the given value is not nil. +func (tsc *TestSummaryCreate) SetNillableTestCollectionID(id *int) *TestSummaryCreate { + if id != nil { + tsc = tsc.SetTestCollectionID(*id) } - return tsc.AddTestCollectionIDs(ids...) + return tsc +} + +// SetTestCollection sets the "test_collection" edge to the TestCollection entity. +func (tsc *TestSummaryCreate) SetTestCollection(t *TestCollection) *TestSummaryCreate { + return tsc.SetTestCollectionID(t.ID) } // AddPassedIDs adds the "passed" edge to the TestFile entity by IDs. @@ -321,7 +325,7 @@ func (tsc *TestSummaryCreate) createSpec() (*TestSummary, *sqlgraph.CreateSpec) } if nodes := tsc.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: testsummary.TestCollectionTable, Columns: []string{testsummary.TestCollectionColumn}, @@ -333,6 +337,7 @@ func (tsc *TestSummaryCreate) createSpec() (*TestSummary, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.test_collection_test_summary = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tsc.mutation.PassedIDs(); len(nodes) > 0 { diff --git a/ent/gen/ent/testsummary_query.go b/ent/gen/ent/testsummary_query.go index 08735eb..98ee1e4 100644 --- a/ent/gen/ent/testsummary_query.go +++ b/ent/gen/ent/testsummary_query.go @@ -20,18 +20,18 @@ import ( // TestSummaryQuery is the builder for querying TestSummary entities. type TestSummaryQuery struct { config - ctx *QueryContext - order []testsummary.OrderOption - inters []Interceptor - predicates []predicate.TestSummary - withTestCollection *TestCollectionQuery - withPassed *TestFileQuery - withFailed *TestFileQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TestSummary) error - withNamedTestCollection map[string]*TestCollectionQuery - withNamedPassed map[string]*TestFileQuery - withNamedFailed map[string]*TestFileQuery + ctx *QueryContext + order []testsummary.OrderOption + inters []Interceptor + predicates []predicate.TestSummary + withTestCollection *TestCollectionQuery + withPassed *TestFileQuery + withFailed *TestFileQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TestSummary) error + withNamedPassed map[string]*TestFileQuery + withNamedFailed map[string]*TestFileQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -82,7 +82,7 @@ func (tsq *TestSummaryQuery) QueryTestCollection() *TestCollectionQuery { step := sqlgraph.NewStep( sqlgraph.From(testsummary.Table, testsummary.FieldID, selector), sqlgraph.To(testcollection.Table, testcollection.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, testsummary.TestCollectionTable, testsummary.TestCollectionColumn), + sqlgraph.Edge(sqlgraph.O2O, true, testsummary.TestCollectionTable, testsummary.TestCollectionColumn), ) fromU = sqlgraph.SetNeighbors(tsq.driver.Dialect(), step) return fromU, nil @@ -445,6 +445,7 @@ func (tsq *TestSummaryQuery) prepareQuery(ctx context.Context) error { func (tsq *TestSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TestSummary, error) { var ( nodes = []*TestSummary{} + withFKs = tsq.withFKs _spec = tsq.querySpec() loadedTypes = [3]bool{ tsq.withTestCollection != nil, @@ -452,6 +453,12 @@ func (tsq *TestSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] tsq.withFailed != nil, } ) + if tsq.withTestCollection != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, testsummary.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TestSummary).scanValues(nil, columns) } @@ -474,9 +481,8 @@ func (tsq *TestSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nodes, nil } if query := tsq.withTestCollection; query != nil { - if err := tsq.loadTestCollection(ctx, query, nodes, - func(n *TestSummary) { n.Edges.TestCollection = []*TestCollection{} }, - func(n *TestSummary, e *TestCollection) { n.Edges.TestCollection = append(n.Edges.TestCollection, e) }); err != nil { + if err := tsq.loadTestCollection(ctx, query, nodes, nil, + func(n *TestSummary, e *TestCollection) { n.Edges.TestCollection = e }); err != nil { return nil, err } } @@ -494,13 +500,6 @@ func (tsq *TestSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nil, err } } - for name, query := range tsq.withNamedTestCollection { - if err := tsq.loadTestCollection(ctx, query, nodes, - func(n *TestSummary) { n.appendNamedTestCollection(name) }, - func(n *TestSummary, e *TestCollection) { n.appendNamedTestCollection(name, e) }); err != nil { - return nil, err - } - } for name, query := range tsq.withNamedPassed { if err := tsq.loadPassed(ctx, query, nodes, func(n *TestSummary) { n.appendNamedPassed(name) }, @@ -524,33 +523,34 @@ func (tsq *TestSummaryQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] } func (tsq *TestSummaryQuery) loadTestCollection(ctx context.Context, query *TestCollectionQuery, nodes []*TestSummary, init func(*TestSummary), assign func(*TestSummary, *TestCollection)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*TestSummary) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TestSummary) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].test_collection_test_summary == nil { + continue + } + fk := *nodes[i].test_collection_test_summary + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.TestCollection(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(testsummary.TestCollectionColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(testcollection.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.test_collection_test_summary - if fk == nil { - return fmt.Errorf(`foreign-key "test_collection_test_summary" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "test_collection_test_summary" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "test_collection_test_summary" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } @@ -701,20 +701,6 @@ func (tsq *TestSummaryQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTestCollection tells the query-builder to eager-load the nodes that are connected to the "test_collection" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tsq *TestSummaryQuery) WithNamedTestCollection(name string, opts ...func(*TestCollectionQuery)) *TestSummaryQuery { - query := (&TestCollectionClient{config: tsq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tsq.withNamedTestCollection == nil { - tsq.withNamedTestCollection = make(map[string]*TestCollectionQuery) - } - tsq.withNamedTestCollection[name] = query - return tsq -} - // WithNamedPassed tells the query-builder to eager-load the nodes that are connected to the "passed" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (tsq *TestSummaryQuery) WithNamedPassed(name string, opts ...func(*TestFileQuery)) *TestSummaryQuery { diff --git a/ent/gen/ent/testsummary_update.go b/ent/gen/ent/testsummary_update.go index 1ea2e12..8003d18 100644 --- a/ent/gen/ent/testsummary_update.go +++ b/ent/gen/ent/testsummary_update.go @@ -285,19 +285,23 @@ func (tsu *TestSummaryUpdate) ClearLabel() *TestSummaryUpdate { return tsu } -// AddTestCollectionIDs adds the "test_collection" edge to the TestCollection entity by IDs. -func (tsu *TestSummaryUpdate) AddTestCollectionIDs(ids ...int) *TestSummaryUpdate { - tsu.mutation.AddTestCollectionIDs(ids...) +// SetTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID. +func (tsu *TestSummaryUpdate) SetTestCollectionID(id int) *TestSummaryUpdate { + tsu.mutation.SetTestCollectionID(id) return tsu } -// AddTestCollection adds the "test_collection" edges to the TestCollection entity. -func (tsu *TestSummaryUpdate) AddTestCollection(t ...*TestCollection) *TestSummaryUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID if the given value is not nil. +func (tsu *TestSummaryUpdate) SetNillableTestCollectionID(id *int) *TestSummaryUpdate { + if id != nil { + tsu = tsu.SetTestCollectionID(*id) } - return tsu.AddTestCollectionIDs(ids...) + return tsu +} + +// SetTestCollection sets the "test_collection" edge to the TestCollection entity. +func (tsu *TestSummaryUpdate) SetTestCollection(t *TestCollection) *TestSummaryUpdate { + return tsu.SetTestCollectionID(t.ID) } // AddPassedIDs adds the "passed" edge to the TestFile entity by IDs. @@ -335,27 +339,12 @@ func (tsu *TestSummaryUpdate) Mutation() *TestSummaryMutation { return tsu.mutation } -// ClearTestCollection clears all "test_collection" edges to the TestCollection entity. +// ClearTestCollection clears the "test_collection" edge to the TestCollection entity. func (tsu *TestSummaryUpdate) ClearTestCollection() *TestSummaryUpdate { tsu.mutation.ClearTestCollection() return tsu } -// RemoveTestCollectionIDs removes the "test_collection" edge to TestCollection entities by IDs. -func (tsu *TestSummaryUpdate) RemoveTestCollectionIDs(ids ...int) *TestSummaryUpdate { - tsu.mutation.RemoveTestCollectionIDs(ids...) - return tsu -} - -// RemoveTestCollection removes "test_collection" edges to TestCollection entities. -func (tsu *TestSummaryUpdate) RemoveTestCollection(t ...*TestCollection) *TestSummaryUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tsu.RemoveTestCollectionIDs(ids...) -} - // ClearPassed clears all "passed" edges to the TestFile entity. func (tsu *TestSummaryUpdate) ClearPassed() *TestSummaryUpdate { tsu.mutation.ClearPassed() @@ -533,20 +522,7 @@ func (tsu *TestSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tsu.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: testsummary.TestCollectionTable, - Columns: []string{testsummary.TestCollectionColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tsu.mutation.RemovedTestCollectionIDs(); len(nodes) > 0 && !tsu.mutation.TestCollectionCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: testsummary.TestCollectionTable, Columns: []string{testsummary.TestCollectionColumn}, @@ -555,14 +531,11 @@ func (tsu *TestSummaryUpdate) sqlSave(ctx context.Context) (n int, err error) { IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tsu.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: testsummary.TestCollectionTable, Columns: []string{testsummary.TestCollectionColumn}, @@ -942,19 +915,23 @@ func (tsuo *TestSummaryUpdateOne) ClearLabel() *TestSummaryUpdateOne { return tsuo } -// AddTestCollectionIDs adds the "test_collection" edge to the TestCollection entity by IDs. -func (tsuo *TestSummaryUpdateOne) AddTestCollectionIDs(ids ...int) *TestSummaryUpdateOne { - tsuo.mutation.AddTestCollectionIDs(ids...) +// SetTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID. +func (tsuo *TestSummaryUpdateOne) SetTestCollectionID(id int) *TestSummaryUpdateOne { + tsuo.mutation.SetTestCollectionID(id) return tsuo } -// AddTestCollection adds the "test_collection" edges to the TestCollection entity. -func (tsuo *TestSummaryUpdateOne) AddTestCollection(t ...*TestCollection) *TestSummaryUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTestCollectionID sets the "test_collection" edge to the TestCollection entity by ID if the given value is not nil. +func (tsuo *TestSummaryUpdateOne) SetNillableTestCollectionID(id *int) *TestSummaryUpdateOne { + if id != nil { + tsuo = tsuo.SetTestCollectionID(*id) } - return tsuo.AddTestCollectionIDs(ids...) + return tsuo +} + +// SetTestCollection sets the "test_collection" edge to the TestCollection entity. +func (tsuo *TestSummaryUpdateOne) SetTestCollection(t *TestCollection) *TestSummaryUpdateOne { + return tsuo.SetTestCollectionID(t.ID) } // AddPassedIDs adds the "passed" edge to the TestFile entity by IDs. @@ -992,27 +969,12 @@ func (tsuo *TestSummaryUpdateOne) Mutation() *TestSummaryMutation { return tsuo.mutation } -// ClearTestCollection clears all "test_collection" edges to the TestCollection entity. +// ClearTestCollection clears the "test_collection" edge to the TestCollection entity. func (tsuo *TestSummaryUpdateOne) ClearTestCollection() *TestSummaryUpdateOne { tsuo.mutation.ClearTestCollection() return tsuo } -// RemoveTestCollectionIDs removes the "test_collection" edge to TestCollection entities by IDs. -func (tsuo *TestSummaryUpdateOne) RemoveTestCollectionIDs(ids ...int) *TestSummaryUpdateOne { - tsuo.mutation.RemoveTestCollectionIDs(ids...) - return tsuo -} - -// RemoveTestCollection removes "test_collection" edges to TestCollection entities. -func (tsuo *TestSummaryUpdateOne) RemoveTestCollection(t ...*TestCollection) *TestSummaryUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tsuo.RemoveTestCollectionIDs(ids...) -} - // ClearPassed clears all "passed" edges to the TestFile entity. func (tsuo *TestSummaryUpdateOne) ClearPassed() *TestSummaryUpdateOne { tsuo.mutation.ClearPassed() @@ -1220,20 +1182,7 @@ func (tsuo *TestSummaryUpdateOne) sqlSave(ctx context.Context) (_node *TestSumma } if tsuo.mutation.TestCollectionCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: testsummary.TestCollectionTable, - Columns: []string{testsummary.TestCollectionColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tsuo.mutation.RemovedTestCollectionIDs(); len(nodes) > 0 && !tsuo.mutation.TestCollectionCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: testsummary.TestCollectionTable, Columns: []string{testsummary.TestCollectionColumn}, @@ -1242,14 +1191,11 @@ func (tsuo *TestSummaryUpdateOne) sqlSave(ctx context.Context) (_node *TestSumma IDSpec: sqlgraph.NewFieldSpec(testcollection.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tsuo.mutation.TestCollectionIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: testsummary.TestCollectionTable, Columns: []string{testsummary.TestCollectionColumn}, diff --git a/ent/gen/ent/timingbreakdown.go b/ent/gen/ent/timingbreakdown.go index 6b9a291..4950a1c 100644 --- a/ent/gen/ent/timingbreakdown.go +++ b/ent/gen/ent/timingbreakdown.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/exectioninfo" "github.com/buildbarn/bb-portal/ent/gen/ent/timingbreakdown" ) @@ -22,14 +23,15 @@ type TimingBreakdown struct { Time string `json:"time,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TimingBreakdownQuery when eager-loading is set. - Edges TimingBreakdownEdges `json:"edges"` - selectValues sql.SelectValues + Edges TimingBreakdownEdges `json:"edges"` + exection_info_timing_breakdown *int + selectValues sql.SelectValues } // TimingBreakdownEdges holds the relations/edges for other nodes in the graph. type TimingBreakdownEdges struct { // ExecutionInfo holds the value of the execution_info edge. - ExecutionInfo []*ExectionInfo `json:"execution_info,omitempty"` + ExecutionInfo *ExectionInfo `json:"execution_info,omitempty"` // Child holds the value of the child edge. Child []*TimingChild `json:"child,omitempty"` // loadedTypes holds the information for reporting if a @@ -38,15 +40,16 @@ type TimingBreakdownEdges struct { // totalCount holds the count of the edges above. totalCount [2]map[string]int - namedExecutionInfo map[string][]*ExectionInfo - namedChild map[string][]*TimingChild + namedChild map[string][]*TimingChild } // ExecutionInfoOrErr returns the ExecutionInfo value or an error if the edge -// was not loaded in eager-loading. -func (e TimingBreakdownEdges) ExecutionInfoOrErr() ([]*ExectionInfo, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TimingBreakdownEdges) ExecutionInfoOrErr() (*ExectionInfo, error) { + if e.ExecutionInfo != nil { return e.ExecutionInfo, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: exectioninfo.Label} } return nil, &NotLoadedError{edge: "execution_info"} } @@ -69,6 +72,8 @@ func (*TimingBreakdown) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case timingbreakdown.FieldName, timingbreakdown.FieldTime: values[i] = new(sql.NullString) + case timingbreakdown.ForeignKeys[0]: // exection_info_timing_breakdown + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -102,6 +107,13 @@ func (tb *TimingBreakdown) assignValues(columns []string, values []any) error { } else if value.Valid { tb.Time = value.String } + case timingbreakdown.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field exection_info_timing_breakdown", value) + } else if value.Valid { + tb.exection_info_timing_breakdown = new(int) + *tb.exection_info_timing_breakdown = int(value.Int64) + } default: tb.selectValues.Set(columns[i], values[i]) } @@ -157,30 +169,6 @@ func (tb *TimingBreakdown) String() string { return builder.String() } -// NamedExecutionInfo returns the ExecutionInfo named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tb *TimingBreakdown) NamedExecutionInfo(name string) ([]*ExectionInfo, error) { - if tb.Edges.namedExecutionInfo == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tb.Edges.namedExecutionInfo[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tb *TimingBreakdown) appendNamedExecutionInfo(name string, edges ...*ExectionInfo) { - if tb.Edges.namedExecutionInfo == nil { - tb.Edges.namedExecutionInfo = make(map[string][]*ExectionInfo) - } - if len(edges) == 0 { - tb.Edges.namedExecutionInfo[name] = []*ExectionInfo{} - } else { - tb.Edges.namedExecutionInfo[name] = append(tb.Edges.namedExecutionInfo[name], edges...) - } -} - // NamedChild returns the Child named value or an error if the edge was not // loaded in eager-loading with this name. func (tb *TimingBreakdown) NamedChild(name string) ([]*TimingChild, error) { diff --git a/ent/gen/ent/timingbreakdown/timingbreakdown.go b/ent/gen/ent/timingbreakdown/timingbreakdown.go index 211df71..ee4782a 100644 --- a/ent/gen/ent/timingbreakdown/timingbreakdown.go +++ b/ent/gen/ent/timingbreakdown/timingbreakdown.go @@ -23,17 +23,19 @@ const ( // Table holds the table name of the timingbreakdown in the database. Table = "timing_breakdowns" // ExecutionInfoTable is the table that holds the execution_info relation/edge. - ExecutionInfoTable = "exection_infos" + ExecutionInfoTable = "timing_breakdowns" // ExecutionInfoInverseTable is the table name for the ExectionInfo entity. // It exists in this package in order to avoid circular dependency with the "exectioninfo" package. ExecutionInfoInverseTable = "exection_infos" // ExecutionInfoColumn is the table column denoting the execution_info relation/edge. ExecutionInfoColumn = "exection_info_timing_breakdown" - // ChildTable is the table that holds the child relation/edge. The primary key declared below. - ChildTable = "timing_breakdown_child" + // ChildTable is the table that holds the child relation/edge. + ChildTable = "timing_childs" // ChildInverseTable is the table name for the TimingChild entity. // It exists in this package in order to avoid circular dependency with the "timingchild" package. ChildInverseTable = "timing_childs" + // ChildColumn is the table column denoting the child relation/edge. + ChildColumn = "timing_breakdown_child" ) // Columns holds all SQL columns for timingbreakdown fields. @@ -43,11 +45,11 @@ var Columns = []string{ FieldTime, } -var ( - // ChildPrimaryKey and ChildColumn2 are the table columns denoting the - // primary key for the child relation (M2M). - ChildPrimaryKey = []string{"timing_breakdown_id", "timing_child_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "timing_breakdowns" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "exection_info_timing_breakdown", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -56,6 +58,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -77,17 +84,10 @@ func ByTime(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldTime, opts...).ToFunc() } -// ByExecutionInfoCount orders the results by execution_info count. -func ByExecutionInfoCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newExecutionInfoStep(), opts...) - } -} - -// ByExecutionInfo orders the results by execution_info terms. -func ByExecutionInfo(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByExecutionInfoField orders the results by execution_info field. +func ByExecutionInfoField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newExecutionInfoStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newExecutionInfoStep(), sql.OrderByField(field, opts...)) } } @@ -108,13 +108,13 @@ func newExecutionInfoStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ExecutionInfoInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, ExecutionInfoTable, ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, true, ExecutionInfoTable, ExecutionInfoColumn), ) } func newChildStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(ChildInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ChildTable, ChildPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ChildTable, ChildColumn), ) } diff --git a/ent/gen/ent/timingbreakdown/where.go b/ent/gen/ent/timingbreakdown/where.go index 563c69b..0c9401b 100644 --- a/ent/gen/ent/timingbreakdown/where.go +++ b/ent/gen/ent/timingbreakdown/where.go @@ -218,7 +218,7 @@ func HasExecutionInfo() predicate.TimingBreakdown { return predicate.TimingBreakdown(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, ExecutionInfoTable, ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, true, ExecutionInfoTable, ExecutionInfoColumn), ) sqlgraph.HasNeighbors(s, step) }) @@ -241,7 +241,7 @@ func HasChild() predicate.TimingBreakdown { return predicate.TimingBreakdown(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, ChildTable, ChildPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, ChildTable, ChildColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/timingbreakdown_create.go b/ent/gen/ent/timingbreakdown_create.go index 7a98b5b..d1875ba 100644 --- a/ent/gen/ent/timingbreakdown_create.go +++ b/ent/gen/ent/timingbreakdown_create.go @@ -48,19 +48,23 @@ func (tbc *TimingBreakdownCreate) SetNillableTime(s *string) *TimingBreakdownCre return tbc } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (tbc *TimingBreakdownCreate) AddExecutionInfoIDs(ids ...int) *TimingBreakdownCreate { - tbc.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (tbc *TimingBreakdownCreate) SetExecutionInfoID(id int) *TimingBreakdownCreate { + tbc.mutation.SetExecutionInfoID(id) return tbc } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (tbc *TimingBreakdownCreate) AddExecutionInfo(e ...*ExectionInfo) *TimingBreakdownCreate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (tbc *TimingBreakdownCreate) SetNillableExecutionInfoID(id *int) *TimingBreakdownCreate { + if id != nil { + tbc = tbc.SetExecutionInfoID(*id) } - return tbc.AddExecutionInfoIDs(ids...) + return tbc +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (tbc *TimingBreakdownCreate) SetExecutionInfo(e *ExectionInfo) *TimingBreakdownCreate { + return tbc.SetExecutionInfoID(e.ID) } // AddChildIDs adds the "child" edge to the TimingChild entity by IDs. @@ -148,7 +152,7 @@ func (tbc *TimingBreakdownCreate) createSpec() (*TimingBreakdown, *sqlgraph.Crea } if nodes := tbc.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingbreakdown.ExecutionInfoTable, Columns: []string{timingbreakdown.ExecutionInfoColumn}, @@ -160,14 +164,15 @@ func (tbc *TimingBreakdownCreate) createSpec() (*TimingBreakdown, *sqlgraph.Crea for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.exection_info_timing_breakdown = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } if nodes := tbc.mutation.ChildIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), diff --git a/ent/gen/ent/timingbreakdown_query.go b/ent/gen/ent/timingbreakdown_query.go index 5337f29..d9e4bc1 100644 --- a/ent/gen/ent/timingbreakdown_query.go +++ b/ent/gen/ent/timingbreakdown_query.go @@ -20,16 +20,16 @@ import ( // TimingBreakdownQuery is the builder for querying TimingBreakdown entities. type TimingBreakdownQuery struct { config - ctx *QueryContext - order []timingbreakdown.OrderOption - inters []Interceptor - predicates []predicate.TimingBreakdown - withExecutionInfo *ExectionInfoQuery - withChild *TimingChildQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TimingBreakdown) error - withNamedExecutionInfo map[string]*ExectionInfoQuery - withNamedChild map[string]*TimingChildQuery + ctx *QueryContext + order []timingbreakdown.OrderOption + inters []Interceptor + predicates []predicate.TimingBreakdown + withExecutionInfo *ExectionInfoQuery + withChild *TimingChildQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TimingBreakdown) error + withNamedChild map[string]*TimingChildQuery // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -80,7 +80,7 @@ func (tbq *TimingBreakdownQuery) QueryExecutionInfo() *ExectionInfoQuery { step := sqlgraph.NewStep( sqlgraph.From(timingbreakdown.Table, timingbreakdown.FieldID, selector), sqlgraph.To(exectioninfo.Table, exectioninfo.FieldID), - sqlgraph.Edge(sqlgraph.O2M, true, timingbreakdown.ExecutionInfoTable, timingbreakdown.ExecutionInfoColumn), + sqlgraph.Edge(sqlgraph.O2O, true, timingbreakdown.ExecutionInfoTable, timingbreakdown.ExecutionInfoColumn), ) fromU = sqlgraph.SetNeighbors(tbq.driver.Dialect(), step) return fromU, nil @@ -102,7 +102,7 @@ func (tbq *TimingBreakdownQuery) QueryChild() *TimingChildQuery { step := sqlgraph.NewStep( sqlgraph.From(timingbreakdown.Table, timingbreakdown.FieldID, selector), sqlgraph.To(timingchild.Table, timingchild.FieldID), - sqlgraph.Edge(sqlgraph.M2M, false, timingbreakdown.ChildTable, timingbreakdown.ChildPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2M, false, timingbreakdown.ChildTable, timingbreakdown.ChildColumn), ) fromU = sqlgraph.SetNeighbors(tbq.driver.Dialect(), step) return fromU, nil @@ -409,12 +409,19 @@ func (tbq *TimingBreakdownQuery) prepareQuery(ctx context.Context) error { func (tbq *TimingBreakdownQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TimingBreakdown, error) { var ( nodes = []*TimingBreakdown{} + withFKs = tbq.withFKs _spec = tbq.querySpec() loadedTypes = [2]bool{ tbq.withExecutionInfo != nil, tbq.withChild != nil, } ) + if tbq.withExecutionInfo != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, timingbreakdown.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TimingBreakdown).scanValues(nil, columns) } @@ -437,9 +444,8 @@ func (tbq *TimingBreakdownQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nodes, nil } if query := tbq.withExecutionInfo; query != nil { - if err := tbq.loadExecutionInfo(ctx, query, nodes, - func(n *TimingBreakdown) { n.Edges.ExecutionInfo = []*ExectionInfo{} }, - func(n *TimingBreakdown, e *ExectionInfo) { n.Edges.ExecutionInfo = append(n.Edges.ExecutionInfo, e) }); err != nil { + if err := tbq.loadExecutionInfo(ctx, query, nodes, nil, + func(n *TimingBreakdown, e *ExectionInfo) { n.Edges.ExecutionInfo = e }); err != nil { return nil, err } } @@ -450,13 +456,6 @@ func (tbq *TimingBreakdownQuery) sqlAll(ctx context.Context, hooks ...queryHook) return nil, err } } - for name, query := range tbq.withNamedExecutionInfo { - if err := tbq.loadExecutionInfo(ctx, query, nodes, - func(n *TimingBreakdown) { n.appendNamedExecutionInfo(name) }, - func(n *TimingBreakdown, e *ExectionInfo) { n.appendNamedExecutionInfo(name, e) }); err != nil { - return nil, err - } - } for name, query := range tbq.withNamedChild { if err := tbq.loadChild(ctx, query, nodes, func(n *TimingBreakdown) { n.appendNamedChild(name) }, @@ -473,94 +472,65 @@ func (tbq *TimingBreakdownQuery) sqlAll(ctx context.Context, hooks ...queryHook) } func (tbq *TimingBreakdownQuery) loadExecutionInfo(ctx context.Context, query *ExectionInfoQuery, nodes []*TimingBreakdown, init func(*TimingBreakdown), assign func(*TimingBreakdown, *ExectionInfo)) error { - fks := make([]driver.Value, 0, len(nodes)) - nodeids := make(map[int]*TimingBreakdown) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TimingBreakdown) for i := range nodes { - fks = append(fks, nodes[i].ID) - nodeids[nodes[i].ID] = nodes[i] - if init != nil { - init(nodes[i]) + if nodes[i].exection_info_timing_breakdown == nil { + continue + } + fk := *nodes[i].exection_info_timing_breakdown + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.withFKs = true - query.Where(predicate.ExectionInfo(func(s *sql.Selector) { - s.Where(sql.InValues(s.C(timingbreakdown.ExecutionInfoColumn), fks...)) - })) + if len(ids) == 0 { + return nil + } + query.Where(exectioninfo.IDIn(ids...)) neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - fk := n.exection_info_timing_breakdown - if fk == nil { - return fmt.Errorf(`foreign-key "exection_info_timing_breakdown" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected referenced foreign-key "exection_info_timing_breakdown" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "exection_info_timing_breakdown" returned %v`, n.ID) + } + for i := range nodes { + assign(nodes[i], n) } - assign(node, n) } return nil } func (tbq *TimingBreakdownQuery) loadChild(ctx context.Context, query *TimingChildQuery, nodes []*TimingBreakdown, init func(*TimingBreakdown), assign func(*TimingBreakdown, *TimingChild)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TimingBreakdown) - nids := make(map[int]map[*TimingBreakdown]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node + fks := make([]driver.Value, 0, len(nodes)) + nodeids := make(map[int]*TimingBreakdown) + for i := range nodes { + fks = append(fks, nodes[i].ID) + nodeids[nodes[i].ID] = nodes[i] if init != nil { - init(node) + init(nodes[i]) } } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(timingbreakdown.ChildTable) - s.Join(joinT).On(s.C(timingchild.FieldID), joinT.C(timingbreakdown.ChildPrimaryKey[1])) - s.Where(sql.InValues(joinT.C(timingbreakdown.ChildPrimaryKey[0]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(timingbreakdown.ChildPrimaryKey[0])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err - } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TimingBreakdown]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TimingChild](ctx, query, qr, query.inters) + query.withFKs = true + query.Where(predicate.TimingChild(func(s *sql.Selector) { + s.Where(sql.InValues(s.C(timingbreakdown.ChildColumn), fks...)) + })) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] - if !ok { - return fmt.Errorf(`unexpected "child" node returned %v`, n.ID) + fk := n.timing_breakdown_child + if fk == nil { + return fmt.Errorf(`foreign-key "timing_breakdown_child" is nil for node %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + node, ok := nodeids[*fk] + if !ok { + return fmt.Errorf(`unexpected referenced foreign-key "timing_breakdown_child" returned %v for node %v`, *fk, n.ID) } + assign(node, n) } return nil } @@ -649,20 +619,6 @@ func (tbq *TimingBreakdownQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedExecutionInfo tells the query-builder to eager-load the nodes that are connected to the "execution_info" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tbq *TimingBreakdownQuery) WithNamedExecutionInfo(name string, opts ...func(*ExectionInfoQuery)) *TimingBreakdownQuery { - query := (&ExectionInfoClient{config: tbq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tbq.withNamedExecutionInfo == nil { - tbq.withNamedExecutionInfo = make(map[string]*ExectionInfoQuery) - } - tbq.withNamedExecutionInfo[name] = query - return tbq -} - // WithNamedChild tells the query-builder to eager-load the nodes that are connected to the "child" // edge with the given name. The optional arguments are used to configure the query builder of the edge. func (tbq *TimingBreakdownQuery) WithNamedChild(name string, opts ...func(*TimingChildQuery)) *TimingBreakdownQuery { diff --git a/ent/gen/ent/timingbreakdown_update.go b/ent/gen/ent/timingbreakdown_update.go index 0fa5fe7..d7e9c5f 100644 --- a/ent/gen/ent/timingbreakdown_update.go +++ b/ent/gen/ent/timingbreakdown_update.go @@ -69,19 +69,23 @@ func (tbu *TimingBreakdownUpdate) ClearTime() *TimingBreakdownUpdate { return tbu } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (tbu *TimingBreakdownUpdate) AddExecutionInfoIDs(ids ...int) *TimingBreakdownUpdate { - tbu.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (tbu *TimingBreakdownUpdate) SetExecutionInfoID(id int) *TimingBreakdownUpdate { + tbu.mutation.SetExecutionInfoID(id) return tbu } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (tbu *TimingBreakdownUpdate) AddExecutionInfo(e ...*ExectionInfo) *TimingBreakdownUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (tbu *TimingBreakdownUpdate) SetNillableExecutionInfoID(id *int) *TimingBreakdownUpdate { + if id != nil { + tbu = tbu.SetExecutionInfoID(*id) } - return tbu.AddExecutionInfoIDs(ids...) + return tbu +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (tbu *TimingBreakdownUpdate) SetExecutionInfo(e *ExectionInfo) *TimingBreakdownUpdate { + return tbu.SetExecutionInfoID(e.ID) } // AddChildIDs adds the "child" edge to the TimingChild entity by IDs. @@ -104,27 +108,12 @@ func (tbu *TimingBreakdownUpdate) Mutation() *TimingBreakdownMutation { return tbu.mutation } -// ClearExecutionInfo clears all "execution_info" edges to the ExectionInfo entity. +// ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. func (tbu *TimingBreakdownUpdate) ClearExecutionInfo() *TimingBreakdownUpdate { tbu.mutation.ClearExecutionInfo() return tbu } -// RemoveExecutionInfoIDs removes the "execution_info" edge to ExectionInfo entities by IDs. -func (tbu *TimingBreakdownUpdate) RemoveExecutionInfoIDs(ids ...int) *TimingBreakdownUpdate { - tbu.mutation.RemoveExecutionInfoIDs(ids...) - return tbu -} - -// RemoveExecutionInfo removes "execution_info" edges to ExectionInfo entities. -func (tbu *TimingBreakdownUpdate) RemoveExecutionInfo(e ...*ExectionInfo) *TimingBreakdownUpdate { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return tbu.RemoveExecutionInfoIDs(ids...) -} - // ClearChild clears all "child" edges to the TimingChild entity. func (tbu *TimingBreakdownUpdate) ClearChild() *TimingBreakdownUpdate { tbu.mutation.ClearChild() @@ -196,7 +185,7 @@ func (tbu *TimingBreakdownUpdate) sqlSave(ctx context.Context) (n int, err error } if tbu.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingbreakdown.ExecutionInfoTable, Columns: []string{timingbreakdown.ExecutionInfoColumn}, @@ -207,25 +196,9 @@ func (tbu *TimingBreakdownUpdate) sqlSave(ctx context.Context) (n int, err error } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tbu.mutation.RemovedExecutionInfoIDs(); len(nodes) > 0 && !tbu.mutation.ExecutionInfoCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: timingbreakdown.ExecutionInfoTable, - Columns: []string{timingbreakdown.ExecutionInfoColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tbu.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingbreakdown.ExecutionInfoTable, Columns: []string{timingbreakdown.ExecutionInfoColumn}, @@ -241,10 +214,10 @@ func (tbu *TimingBreakdownUpdate) sqlSave(ctx context.Context) (n int, err error } if tbu.mutation.ChildCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), @@ -254,10 +227,10 @@ func (tbu *TimingBreakdownUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := tbu.mutation.RemovedChildIDs(); len(nodes) > 0 && !tbu.mutation.ChildCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), @@ -270,10 +243,10 @@ func (tbu *TimingBreakdownUpdate) sqlSave(ctx context.Context) (n int, err error } if nodes := tbu.mutation.ChildIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), @@ -344,19 +317,23 @@ func (tbuo *TimingBreakdownUpdateOne) ClearTime() *TimingBreakdownUpdateOne { return tbuo } -// AddExecutionInfoIDs adds the "execution_info" edge to the ExectionInfo entity by IDs. -func (tbuo *TimingBreakdownUpdateOne) AddExecutionInfoIDs(ids ...int) *TimingBreakdownUpdateOne { - tbuo.mutation.AddExecutionInfoIDs(ids...) +// SetExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID. +func (tbuo *TimingBreakdownUpdateOne) SetExecutionInfoID(id int) *TimingBreakdownUpdateOne { + tbuo.mutation.SetExecutionInfoID(id) return tbuo } -// AddExecutionInfo adds the "execution_info" edges to the ExectionInfo entity. -func (tbuo *TimingBreakdownUpdateOne) AddExecutionInfo(e ...*ExectionInfo) *TimingBreakdownUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID +// SetNillableExecutionInfoID sets the "execution_info" edge to the ExectionInfo entity by ID if the given value is not nil. +func (tbuo *TimingBreakdownUpdateOne) SetNillableExecutionInfoID(id *int) *TimingBreakdownUpdateOne { + if id != nil { + tbuo = tbuo.SetExecutionInfoID(*id) } - return tbuo.AddExecutionInfoIDs(ids...) + return tbuo +} + +// SetExecutionInfo sets the "execution_info" edge to the ExectionInfo entity. +func (tbuo *TimingBreakdownUpdateOne) SetExecutionInfo(e *ExectionInfo) *TimingBreakdownUpdateOne { + return tbuo.SetExecutionInfoID(e.ID) } // AddChildIDs adds the "child" edge to the TimingChild entity by IDs. @@ -379,27 +356,12 @@ func (tbuo *TimingBreakdownUpdateOne) Mutation() *TimingBreakdownMutation { return tbuo.mutation } -// ClearExecutionInfo clears all "execution_info" edges to the ExectionInfo entity. +// ClearExecutionInfo clears the "execution_info" edge to the ExectionInfo entity. func (tbuo *TimingBreakdownUpdateOne) ClearExecutionInfo() *TimingBreakdownUpdateOne { tbuo.mutation.ClearExecutionInfo() return tbuo } -// RemoveExecutionInfoIDs removes the "execution_info" edge to ExectionInfo entities by IDs. -func (tbuo *TimingBreakdownUpdateOne) RemoveExecutionInfoIDs(ids ...int) *TimingBreakdownUpdateOne { - tbuo.mutation.RemoveExecutionInfoIDs(ids...) - return tbuo -} - -// RemoveExecutionInfo removes "execution_info" edges to ExectionInfo entities. -func (tbuo *TimingBreakdownUpdateOne) RemoveExecutionInfo(e ...*ExectionInfo) *TimingBreakdownUpdateOne { - ids := make([]int, len(e)) - for i := range e { - ids[i] = e[i].ID - } - return tbuo.RemoveExecutionInfoIDs(ids...) -} - // ClearChild clears all "child" edges to the TimingChild entity. func (tbuo *TimingBreakdownUpdateOne) ClearChild() *TimingBreakdownUpdateOne { tbuo.mutation.ClearChild() @@ -501,7 +463,7 @@ func (tbuo *TimingBreakdownUpdateOne) sqlSave(ctx context.Context) (_node *Timin } if tbuo.mutation.ExecutionInfoCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingbreakdown.ExecutionInfoTable, Columns: []string{timingbreakdown.ExecutionInfoColumn}, @@ -512,25 +474,9 @@ func (tbuo *TimingBreakdownUpdateOne) sqlSave(ctx context.Context) (_node *Timin } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } - if nodes := tbuo.mutation.RemovedExecutionInfoIDs(); len(nodes) > 0 && !tbuo.mutation.ExecutionInfoCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, - Inverse: true, - Table: timingbreakdown.ExecutionInfoTable, - Columns: []string{timingbreakdown.ExecutionInfoColumn}, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(exectioninfo.FieldID, field.TypeInt), - }, - } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } if nodes := tbuo.mutation.ExecutionInfoIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.O2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingbreakdown.ExecutionInfoTable, Columns: []string{timingbreakdown.ExecutionInfoColumn}, @@ -546,10 +492,10 @@ func (tbuo *TimingBreakdownUpdateOne) sqlSave(ctx context.Context) (_node *Timin } if tbuo.mutation.ChildCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), @@ -559,10 +505,10 @@ func (tbuo *TimingBreakdownUpdateOne) sqlSave(ctx context.Context) (_node *Timin } if nodes := tbuo.mutation.RemovedChildIDs(); len(nodes) > 0 && !tbuo.mutation.ChildCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), @@ -575,10 +521,10 @@ func (tbuo *TimingBreakdownUpdateOne) sqlSave(ctx context.Context) (_node *Timin } if nodes := tbuo.mutation.ChildIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2M, Inverse: false, Table: timingbreakdown.ChildTable, - Columns: timingbreakdown.ChildPrimaryKey, + Columns: []string{timingbreakdown.ChildColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingchild.FieldID, field.TypeInt), diff --git a/ent/gen/ent/timingchild.go b/ent/gen/ent/timingchild.go index 34743a4..5172293 100644 --- a/ent/gen/ent/timingchild.go +++ b/ent/gen/ent/timingchild.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/timingbreakdown" "github.com/buildbarn/bb-portal/ent/gen/ent/timingchild" ) @@ -22,28 +23,29 @@ type TimingChild struct { Time string `json:"time,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TimingChildQuery when eager-loading is set. - Edges TimingChildEdges `json:"edges"` - selectValues sql.SelectValues + Edges TimingChildEdges `json:"edges"` + timing_breakdown_child *int + selectValues sql.SelectValues } // TimingChildEdges holds the relations/edges for other nodes in the graph. type TimingChildEdges struct { // TimingBreakdown holds the value of the timing_breakdown edge. - TimingBreakdown []*TimingBreakdown `json:"timing_breakdown,omitempty"` + TimingBreakdown *TimingBreakdown `json:"timing_breakdown,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedTimingBreakdown map[string][]*TimingBreakdown } // TimingBreakdownOrErr returns the TimingBreakdown value or an error if the edge -// was not loaded in eager-loading. -func (e TimingChildEdges) TimingBreakdownOrErr() ([]*TimingBreakdown, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TimingChildEdges) TimingBreakdownOrErr() (*TimingBreakdown, error) { + if e.TimingBreakdown != nil { return e.TimingBreakdown, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: timingbreakdown.Label} } return nil, &NotLoadedError{edge: "timing_breakdown"} } @@ -57,6 +59,8 @@ func (*TimingChild) scanValues(columns []string) ([]any, error) { values[i] = new(sql.NullInt64) case timingchild.FieldName, timingchild.FieldTime: values[i] = new(sql.NullString) + case timingchild.ForeignKeys[0]: // timing_breakdown_child + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -90,6 +94,13 @@ func (tc *TimingChild) assignValues(columns []string, values []any) error { } else if value.Valid { tc.Time = value.String } + case timingchild.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field timing_breakdown_child", value) + } else if value.Valid { + tc.timing_breakdown_child = new(int) + *tc.timing_breakdown_child = int(value.Int64) + } default: tc.selectValues.Set(columns[i], values[i]) } @@ -140,29 +151,5 @@ func (tc *TimingChild) String() string { return builder.String() } -// NamedTimingBreakdown returns the TimingBreakdown named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tc *TimingChild) NamedTimingBreakdown(name string) ([]*TimingBreakdown, error) { - if tc.Edges.namedTimingBreakdown == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tc.Edges.namedTimingBreakdown[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tc *TimingChild) appendNamedTimingBreakdown(name string, edges ...*TimingBreakdown) { - if tc.Edges.namedTimingBreakdown == nil { - tc.Edges.namedTimingBreakdown = make(map[string][]*TimingBreakdown) - } - if len(edges) == 0 { - tc.Edges.namedTimingBreakdown[name] = []*TimingBreakdown{} - } else { - tc.Edges.namedTimingBreakdown[name] = append(tc.Edges.namedTimingBreakdown[name], edges...) - } -} - // TimingChilds is a parsable slice of TimingChild. type TimingChilds []*TimingChild diff --git a/ent/gen/ent/timingchild/timingchild.go b/ent/gen/ent/timingchild/timingchild.go index 0fdfc33..06a3f2b 100644 --- a/ent/gen/ent/timingchild/timingchild.go +++ b/ent/gen/ent/timingchild/timingchild.go @@ -20,11 +20,13 @@ const ( EdgeTimingBreakdown = "timing_breakdown" // Table holds the table name of the timingchild in the database. Table = "timing_childs" - // TimingBreakdownTable is the table that holds the timing_breakdown relation/edge. The primary key declared below. - TimingBreakdownTable = "timing_breakdown_child" + // TimingBreakdownTable is the table that holds the timing_breakdown relation/edge. + TimingBreakdownTable = "timing_childs" // TimingBreakdownInverseTable is the table name for the TimingBreakdown entity. // It exists in this package in order to avoid circular dependency with the "timingbreakdown" package. TimingBreakdownInverseTable = "timing_breakdowns" + // TimingBreakdownColumn is the table column denoting the timing_breakdown relation/edge. + TimingBreakdownColumn = "timing_breakdown_child" ) // Columns holds all SQL columns for timingchild fields. @@ -34,11 +36,11 @@ var Columns = []string{ FieldTime, } -var ( - // TimingBreakdownPrimaryKey and TimingBreakdownColumn2 are the table columns denoting the - // primary key for the timing_breakdown relation (M2M). - TimingBreakdownPrimaryKey = []string{"timing_breakdown_id", "timing_child_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "timing_childs" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "timing_breakdown_child", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -47,6 +49,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -68,23 +75,16 @@ func ByTime(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldTime, opts...).ToFunc() } -// ByTimingBreakdownCount orders the results by timing_breakdown count. -func ByTimingBreakdownCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newTimingBreakdownStep(), opts...) - } -} - -// ByTimingBreakdown orders the results by timing_breakdown terms. -func ByTimingBreakdown(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByTimingBreakdownField orders the results by timing_breakdown field. +func ByTimingBreakdownField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newTimingBreakdownStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newTimingBreakdownStep(), sql.OrderByField(field, opts...)) } } func newTimingBreakdownStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(TimingBreakdownInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, TimingBreakdownTable, TimingBreakdownPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, TimingBreakdownTable, TimingBreakdownColumn), ) } diff --git a/ent/gen/ent/timingchild/where.go b/ent/gen/ent/timingchild/where.go index 382b73d..f76aac4 100644 --- a/ent/gen/ent/timingchild/where.go +++ b/ent/gen/ent/timingchild/where.go @@ -218,7 +218,7 @@ func HasTimingBreakdown() predicate.TimingChild { return predicate.TimingChild(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, TimingBreakdownTable, TimingBreakdownPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, TimingBreakdownTable, TimingBreakdownColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/timingchild_create.go b/ent/gen/ent/timingchild_create.go index 52172ee..3b46f35 100644 --- a/ent/gen/ent/timingchild_create.go +++ b/ent/gen/ent/timingchild_create.go @@ -47,19 +47,23 @@ func (tcc *TimingChildCreate) SetNillableTime(s *string) *TimingChildCreate { return tcc } -// AddTimingBreakdownIDs adds the "timing_breakdown" edge to the TimingBreakdown entity by IDs. -func (tcc *TimingChildCreate) AddTimingBreakdownIDs(ids ...int) *TimingChildCreate { - tcc.mutation.AddTimingBreakdownIDs(ids...) +// SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. +func (tcc *TimingChildCreate) SetTimingBreakdownID(id int) *TimingChildCreate { + tcc.mutation.SetTimingBreakdownID(id) return tcc } -// AddTimingBreakdown adds the "timing_breakdown" edges to the TimingBreakdown entity. -func (tcc *TimingChildCreate) AddTimingBreakdown(t ...*TimingBreakdown) *TimingChildCreate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID if the given value is not nil. +func (tcc *TimingChildCreate) SetNillableTimingBreakdownID(id *int) *TimingChildCreate { + if id != nil { + tcc = tcc.SetTimingBreakdownID(*id) } - return tcc.AddTimingBreakdownIDs(ids...) + return tcc +} + +// SetTimingBreakdown sets the "timing_breakdown" edge to the TimingBreakdown entity. +func (tcc *TimingChildCreate) SetTimingBreakdown(t *TimingBreakdown) *TimingChildCreate { + return tcc.SetTimingBreakdownID(t.ID) } // Mutation returns the TimingChildMutation object of the builder. @@ -132,10 +136,10 @@ func (tcc *TimingChildCreate) createSpec() (*TimingChild, *sqlgraph.CreateSpec) } if nodes := tcc.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, + Columns: []string{timingchild.TimingBreakdownColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), @@ -144,6 +148,7 @@ func (tcc *TimingChildCreate) createSpec() (*TimingChild, *sqlgraph.CreateSpec) for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.timing_breakdown_child = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/timingchild_query.go b/ent/gen/ent/timingchild_query.go index 411990d..d972d4d 100644 --- a/ent/gen/ent/timingchild_query.go +++ b/ent/gen/ent/timingchild_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // TimingChildQuery is the builder for querying TimingChild entities. type TimingChildQuery struct { config - ctx *QueryContext - order []timingchild.OrderOption - inters []Interceptor - predicates []predicate.TimingChild - withTimingBreakdown *TimingBreakdownQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TimingChild) error - withNamedTimingBreakdown map[string]*TimingBreakdownQuery + ctx *QueryContext + order []timingchild.OrderOption + inters []Interceptor + predicates []predicate.TimingChild + withTimingBreakdown *TimingBreakdownQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TimingChild) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (tcq *TimingChildQuery) QueryTimingBreakdown() *TimingBreakdownQuery { step := sqlgraph.NewStep( sqlgraph.From(timingchild.Table, timingchild.FieldID, selector), sqlgraph.To(timingbreakdown.Table, timingbreakdown.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, timingchild.TimingBreakdownTable, timingchild.TimingBreakdownPrimaryKey...), + sqlgraph.Edge(sqlgraph.M2O, true, timingchild.TimingBreakdownTable, timingchild.TimingBreakdownColumn), ) fromU = sqlgraph.SetNeighbors(tcq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (tcq *TimingChildQuery) prepareQuery(ctx context.Context) error { func (tcq *TimingChildQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TimingChild, error) { var ( nodes = []*TimingChild{} + withFKs = tcq.withFKs _spec = tcq.querySpec() loadedTypes = [1]bool{ tcq.withTimingBreakdown != nil, } ) + if tcq.withTimingBreakdown != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, timingchild.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TimingChild).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (tcq *TimingChildQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] return nodes, nil } if query := tcq.withTimingBreakdown; query != nil { - if err := tcq.loadTimingBreakdown(ctx, query, nodes, - func(n *TimingChild) { n.Edges.TimingBreakdown = []*TimingBreakdown{} }, - func(n *TimingChild, e *TimingBreakdown) { n.Edges.TimingBreakdown = append(n.Edges.TimingBreakdown, e) }); err != nil { - return nil, err - } - } - for name, query := range tcq.withNamedTimingBreakdown { - if err := tcq.loadTimingBreakdown(ctx, query, nodes, - func(n *TimingChild) { n.appendNamedTimingBreakdown(name) }, - func(n *TimingChild, e *TimingBreakdown) { n.appendNamedTimingBreakdown(name, e) }); err != nil { + if err := tcq.loadTimingBreakdown(ctx, query, nodes, nil, + func(n *TimingChild, e *TimingBreakdown) { n.Edges.TimingBreakdown = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (tcq *TimingChildQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([] } func (tcq *TimingChildQuery) loadTimingBreakdown(ctx context.Context, query *TimingBreakdownQuery, nodes []*TimingChild, init func(*TimingChild), assign func(*TimingChild, *TimingBreakdown)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TimingChild) - nids := make(map[int]map[*TimingChild]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TimingChild) + for i := range nodes { + if nodes[i].timing_breakdown_child == nil { + continue } + fk := *nodes[i].timing_breakdown_child + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(timingchild.TimingBreakdownTable) - s.Join(joinT).On(s.C(timingbreakdown.FieldID), joinT.C(timingchild.TimingBreakdownPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(timingchild.TimingBreakdownPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(timingchild.TimingBreakdownPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TimingChild]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*TimingBreakdown](ctx, query, qr, query.inters) + query.Where(timingbreakdown.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "timing_breakdown" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "timing_breakdown_child" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (tcq *TimingChildQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedTimingBreakdown tells the query-builder to eager-load the nodes that are connected to the "timing_breakdown" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tcq *TimingChildQuery) WithNamedTimingBreakdown(name string, opts ...func(*TimingBreakdownQuery)) *TimingChildQuery { - query := (&TimingBreakdownClient{config: tcq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tcq.withNamedTimingBreakdown == nil { - tcq.withNamedTimingBreakdown = make(map[string]*TimingBreakdownQuery) - } - tcq.withNamedTimingBreakdown[name] = query - return tcq -} - // TimingChildGroupBy is the group-by builder for TimingChild entities. type TimingChildGroupBy struct { selector diff --git a/ent/gen/ent/timingchild_update.go b/ent/gen/ent/timingchild_update.go index 00d9967..4303eac 100644 --- a/ent/gen/ent/timingchild_update.go +++ b/ent/gen/ent/timingchild_update.go @@ -68,19 +68,23 @@ func (tcu *TimingChildUpdate) ClearTime() *TimingChildUpdate { return tcu } -// AddTimingBreakdownIDs adds the "timing_breakdown" edge to the TimingBreakdown entity by IDs. -func (tcu *TimingChildUpdate) AddTimingBreakdownIDs(ids ...int) *TimingChildUpdate { - tcu.mutation.AddTimingBreakdownIDs(ids...) +// SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. +func (tcu *TimingChildUpdate) SetTimingBreakdownID(id int) *TimingChildUpdate { + tcu.mutation.SetTimingBreakdownID(id) return tcu } -// AddTimingBreakdown adds the "timing_breakdown" edges to the TimingBreakdown entity. -func (tcu *TimingChildUpdate) AddTimingBreakdown(t ...*TimingBreakdown) *TimingChildUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID if the given value is not nil. +func (tcu *TimingChildUpdate) SetNillableTimingBreakdownID(id *int) *TimingChildUpdate { + if id != nil { + tcu = tcu.SetTimingBreakdownID(*id) } - return tcu.AddTimingBreakdownIDs(ids...) + return tcu +} + +// SetTimingBreakdown sets the "timing_breakdown" edge to the TimingBreakdown entity. +func (tcu *TimingChildUpdate) SetTimingBreakdown(t *TimingBreakdown) *TimingChildUpdate { + return tcu.SetTimingBreakdownID(t.ID) } // Mutation returns the TimingChildMutation object of the builder. @@ -88,27 +92,12 @@ func (tcu *TimingChildUpdate) Mutation() *TimingChildMutation { return tcu.mutation } -// ClearTimingBreakdown clears all "timing_breakdown" edges to the TimingBreakdown entity. +// ClearTimingBreakdown clears the "timing_breakdown" edge to the TimingBreakdown entity. func (tcu *TimingChildUpdate) ClearTimingBreakdown() *TimingChildUpdate { tcu.mutation.ClearTimingBreakdown() return tcu } -// RemoveTimingBreakdownIDs removes the "timing_breakdown" edge to TimingBreakdown entities by IDs. -func (tcu *TimingChildUpdate) RemoveTimingBreakdownIDs(ids ...int) *TimingChildUpdate { - tcu.mutation.RemoveTimingBreakdownIDs(ids...) - return tcu -} - -// RemoveTimingBreakdown removes "timing_breakdown" edges to TimingBreakdown entities. -func (tcu *TimingChildUpdate) RemoveTimingBreakdown(t ...*TimingBreakdown) *TimingChildUpdate { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcu.RemoveTimingBreakdownIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (tcu *TimingChildUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, tcu.sqlSave, tcu.mutation, tcu.hooks) @@ -159,39 +148,23 @@ func (tcu *TimingChildUpdate) sqlSave(ctx context.Context) (n int, err error) { } if tcu.mutation.TimingBreakdownCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tcu.mutation.RemovedTimingBreakdownIDs(); len(nodes) > 0 && !tcu.mutation.TimingBreakdownCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, + Columns: []string{timingchild.TimingBreakdownColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tcu.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, + Columns: []string{timingchild.TimingBreakdownColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), @@ -262,19 +235,23 @@ func (tcuo *TimingChildUpdateOne) ClearTime() *TimingChildUpdateOne { return tcuo } -// AddTimingBreakdownIDs adds the "timing_breakdown" edge to the TimingBreakdown entity by IDs. -func (tcuo *TimingChildUpdateOne) AddTimingBreakdownIDs(ids ...int) *TimingChildUpdateOne { - tcuo.mutation.AddTimingBreakdownIDs(ids...) +// SetTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID. +func (tcuo *TimingChildUpdateOne) SetTimingBreakdownID(id int) *TimingChildUpdateOne { + tcuo.mutation.SetTimingBreakdownID(id) return tcuo } -// AddTimingBreakdown adds the "timing_breakdown" edges to the TimingBreakdown entity. -func (tcuo *TimingChildUpdateOne) AddTimingBreakdown(t ...*TimingBreakdown) *TimingChildUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID +// SetNillableTimingBreakdownID sets the "timing_breakdown" edge to the TimingBreakdown entity by ID if the given value is not nil. +func (tcuo *TimingChildUpdateOne) SetNillableTimingBreakdownID(id *int) *TimingChildUpdateOne { + if id != nil { + tcuo = tcuo.SetTimingBreakdownID(*id) } - return tcuo.AddTimingBreakdownIDs(ids...) + return tcuo +} + +// SetTimingBreakdown sets the "timing_breakdown" edge to the TimingBreakdown entity. +func (tcuo *TimingChildUpdateOne) SetTimingBreakdown(t *TimingBreakdown) *TimingChildUpdateOne { + return tcuo.SetTimingBreakdownID(t.ID) } // Mutation returns the TimingChildMutation object of the builder. @@ -282,27 +259,12 @@ func (tcuo *TimingChildUpdateOne) Mutation() *TimingChildMutation { return tcuo.mutation } -// ClearTimingBreakdown clears all "timing_breakdown" edges to the TimingBreakdown entity. +// ClearTimingBreakdown clears the "timing_breakdown" edge to the TimingBreakdown entity. func (tcuo *TimingChildUpdateOne) ClearTimingBreakdown() *TimingChildUpdateOne { tcuo.mutation.ClearTimingBreakdown() return tcuo } -// RemoveTimingBreakdownIDs removes the "timing_breakdown" edge to TimingBreakdown entities by IDs. -func (tcuo *TimingChildUpdateOne) RemoveTimingBreakdownIDs(ids ...int) *TimingChildUpdateOne { - tcuo.mutation.RemoveTimingBreakdownIDs(ids...) - return tcuo -} - -// RemoveTimingBreakdown removes "timing_breakdown" edges to TimingBreakdown entities. -func (tcuo *TimingChildUpdateOne) RemoveTimingBreakdown(t ...*TimingBreakdown) *TimingChildUpdateOne { - ids := make([]int, len(t)) - for i := range t { - ids[i] = t[i].ID - } - return tcuo.RemoveTimingBreakdownIDs(ids...) -} - // Where appends a list predicates to the TimingChildUpdate builder. func (tcuo *TimingChildUpdateOne) Where(ps ...predicate.TimingChild) *TimingChildUpdateOne { tcuo.mutation.Where(ps...) @@ -383,39 +345,23 @@ func (tcuo *TimingChildUpdateOne) sqlSave(ctx context.Context) (_node *TimingChi } if tcuo.mutation.TimingBreakdownCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tcuo.mutation.RemovedTimingBreakdownIDs(); len(nodes) > 0 && !tcuo.mutation.TimingBreakdownCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, + Columns: []string{timingchild.TimingBreakdownColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tcuo.mutation.TimingBreakdownIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.M2O, Inverse: true, Table: timingchild.TimingBreakdownTable, - Columns: timingchild.TimingBreakdownPrimaryKey, + Columns: []string{timingchild.TimingBreakdownColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(timingbreakdown.FieldID, field.TypeInt), diff --git a/ent/gen/ent/timingmetrics.go b/ent/gen/ent/timingmetrics.go index 4fcd0f8..c8aec2e 100644 --- a/ent/gen/ent/timingmetrics.go +++ b/ent/gen/ent/timingmetrics.go @@ -8,6 +8,7 @@ import ( "entgo.io/ent" "entgo.io/ent/dialect/sql" + "github.com/buildbarn/bb-portal/ent/gen/ent/metrics" "github.com/buildbarn/bb-portal/ent/gen/ent/timingmetrics" ) @@ -28,28 +29,29 @@ type TimingMetrics struct { ActionsExecutionStartInMs int64 `json:"actions_execution_start_in_ms,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the TimingMetricsQuery when eager-loading is set. - Edges TimingMetricsEdges `json:"edges"` - selectValues sql.SelectValues + Edges TimingMetricsEdges `json:"edges"` + metrics_timing_metrics *int + selectValues sql.SelectValues } // TimingMetricsEdges holds the relations/edges for other nodes in the graph. type TimingMetricsEdges struct { // Metrics holds the value of the metrics edge. - Metrics []*Metrics `json:"metrics,omitempty"` + Metrics *Metrics `json:"metrics,omitempty"` // loadedTypes holds the information for reporting if a // type was loaded (or requested) in eager-loading or not. loadedTypes [1]bool // totalCount holds the count of the edges above. totalCount [1]map[string]int - - namedMetrics map[string][]*Metrics } // MetricsOrErr returns the Metrics value or an error if the edge -// was not loaded in eager-loading. -func (e TimingMetricsEdges) MetricsOrErr() ([]*Metrics, error) { - if e.loadedTypes[0] { +// was not loaded in eager-loading, or loaded but was not found. +func (e TimingMetricsEdges) MetricsOrErr() (*Metrics, error) { + if e.Metrics != nil { return e.Metrics, nil + } else if e.loadedTypes[0] { + return nil, &NotFoundError{label: metrics.Label} } return nil, &NotLoadedError{edge: "metrics"} } @@ -61,6 +63,8 @@ func (*TimingMetrics) scanValues(columns []string) ([]any, error) { switch columns[i] { case timingmetrics.FieldID, timingmetrics.FieldCPUTimeInMs, timingmetrics.FieldWallTimeInMs, timingmetrics.FieldAnalysisPhaseTimeInMs, timingmetrics.FieldExecutionPhaseTimeInMs, timingmetrics.FieldActionsExecutionStartInMs: values[i] = new(sql.NullInt64) + case timingmetrics.ForeignKeys[0]: // metrics_timing_metrics + values[i] = new(sql.NullInt64) default: values[i] = new(sql.UnknownType) } @@ -112,6 +116,13 @@ func (tm *TimingMetrics) assignValues(columns []string, values []any) error { } else if value.Valid { tm.ActionsExecutionStartInMs = value.Int64 } + case timingmetrics.ForeignKeys[0]: + if value, ok := values[i].(*sql.NullInt64); !ok { + return fmt.Errorf("unexpected type %T for edge-field metrics_timing_metrics", value) + } else if value.Valid { + tm.metrics_timing_metrics = new(int) + *tm.metrics_timing_metrics = int(value.Int64) + } default: tm.selectValues.Set(columns[i], values[i]) } @@ -171,29 +182,5 @@ func (tm *TimingMetrics) String() string { return builder.String() } -// NamedMetrics returns the Metrics named value or an error if the edge was not -// loaded in eager-loading with this name. -func (tm *TimingMetrics) NamedMetrics(name string) ([]*Metrics, error) { - if tm.Edges.namedMetrics == nil { - return nil, &NotLoadedError{edge: name} - } - nodes, ok := tm.Edges.namedMetrics[name] - if !ok { - return nil, &NotLoadedError{edge: name} - } - return nodes, nil -} - -func (tm *TimingMetrics) appendNamedMetrics(name string, edges ...*Metrics) { - if tm.Edges.namedMetrics == nil { - tm.Edges.namedMetrics = make(map[string][]*Metrics) - } - if len(edges) == 0 { - tm.Edges.namedMetrics[name] = []*Metrics{} - } else { - tm.Edges.namedMetrics[name] = append(tm.Edges.namedMetrics[name], edges...) - } -} - // TimingMetricsSlice is a parsable slice of TimingMetrics. type TimingMetricsSlice []*TimingMetrics diff --git a/ent/gen/ent/timingmetrics/timingmetrics.go b/ent/gen/ent/timingmetrics/timingmetrics.go index c7760b6..2d17c54 100644 --- a/ent/gen/ent/timingmetrics/timingmetrics.go +++ b/ent/gen/ent/timingmetrics/timingmetrics.go @@ -26,11 +26,13 @@ const ( EdgeMetrics = "metrics" // Table holds the table name of the timingmetrics in the database. Table = "timing_metrics" - // MetricsTable is the table that holds the metrics relation/edge. The primary key declared below. - MetricsTable = "metrics_timing_metrics" + // MetricsTable is the table that holds the metrics relation/edge. + MetricsTable = "timing_metrics" // MetricsInverseTable is the table name for the Metrics entity. // It exists in this package in order to avoid circular dependency with the "metrics" package. MetricsInverseTable = "metrics" + // MetricsColumn is the table column denoting the metrics relation/edge. + MetricsColumn = "metrics_timing_metrics" ) // Columns holds all SQL columns for timingmetrics fields. @@ -43,11 +45,11 @@ var Columns = []string{ FieldActionsExecutionStartInMs, } -var ( - // MetricsPrimaryKey and MetricsColumn2 are the table columns denoting the - // primary key for the metrics relation (M2M). - MetricsPrimaryKey = []string{"metrics_id", "timing_metrics_id"} -) +// ForeignKeys holds the SQL foreign-keys that are owned by the "timing_metrics" +// table and are not defined as standalone fields in the schema. +var ForeignKeys = []string{ + "metrics_timing_metrics", +} // ValidColumn reports if the column name is valid (part of the table columns). func ValidColumn(column string) bool { @@ -56,6 +58,11 @@ func ValidColumn(column string) bool { return true } } + for i := range ForeignKeys { + if column == ForeignKeys[i] { + return true + } + } return false } @@ -92,23 +99,16 @@ func ByActionsExecutionStartInMs(opts ...sql.OrderTermOption) OrderOption { return sql.OrderByField(FieldActionsExecutionStartInMs, opts...).ToFunc() } -// ByMetricsCount orders the results by metrics count. -func ByMetricsCount(opts ...sql.OrderTermOption) OrderOption { - return func(s *sql.Selector) { - sqlgraph.OrderByNeighborsCount(s, newMetricsStep(), opts...) - } -} - -// ByMetrics orders the results by metrics terms. -func ByMetrics(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption { +// ByMetricsField orders the results by metrics field. +func ByMetricsField(field string, opts ...sql.OrderTermOption) OrderOption { return func(s *sql.Selector) { - sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), append([]sql.OrderTerm{term}, terms...)...) + sqlgraph.OrderByNeighborTerms(s, newMetricsStep(), sql.OrderByField(field, opts...)) } } func newMetricsStep() *sqlgraph.Step { return sqlgraph.NewStep( sqlgraph.From(Table, FieldID), sqlgraph.To(MetricsInverseTable, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) } diff --git a/ent/gen/ent/timingmetrics/where.go b/ent/gen/ent/timingmetrics/where.go index cbe7748..bf33a7a 100644 --- a/ent/gen/ent/timingmetrics/where.go +++ b/ent/gen/ent/timingmetrics/where.go @@ -333,7 +333,7 @@ func HasMetrics() predicate.TimingMetrics { return predicate.TimingMetrics(func(s *sql.Selector) { step := sqlgraph.NewStep( sqlgraph.From(Table, FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, MetricsTable, MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, MetricsTable, MetricsColumn), ) sqlgraph.HasNeighbors(s, step) }) diff --git a/ent/gen/ent/timingmetrics_create.go b/ent/gen/ent/timingmetrics_create.go index 1dffcfa..452a2df 100644 --- a/ent/gen/ent/timingmetrics_create.go +++ b/ent/gen/ent/timingmetrics_create.go @@ -89,19 +89,23 @@ func (tmc *TimingMetricsCreate) SetNillableActionsExecutionStartInMs(i *int64) * return tmc } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmc *TimingMetricsCreate) AddMetricIDs(ids ...int) *TimingMetricsCreate { - tmc.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmc *TimingMetricsCreate) SetMetricsID(id int) *TimingMetricsCreate { + tmc.mutation.SetMetricsID(id) return tmc } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmc *TimingMetricsCreate) AddMetrics(m ...*Metrics) *TimingMetricsCreate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmc *TimingMetricsCreate) SetNillableMetricsID(id *int) *TimingMetricsCreate { + if id != nil { + tmc = tmc.SetMetricsID(*id) } - return tmc.AddMetricIDs(ids...) + return tmc +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmc *TimingMetricsCreate) SetMetrics(m *Metrics) *TimingMetricsCreate { + return tmc.SetMetricsID(m.ID) } // Mutation returns the TimingMetricsMutation object of the builder. @@ -186,10 +190,10 @@ func (tmc *TimingMetricsCreate) createSpec() (*TimingMetrics, *sqlgraph.CreateSp } if nodes := tmc.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, + Columns: []string{timingmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -198,6 +202,7 @@ func (tmc *TimingMetricsCreate) createSpec() (*TimingMetrics, *sqlgraph.CreateSp for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } + _node.metrics_timing_metrics = &nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/ent/gen/ent/timingmetrics_query.go b/ent/gen/ent/timingmetrics_query.go index 6222832..a4f4eb1 100644 --- a/ent/gen/ent/timingmetrics_query.go +++ b/ent/gen/ent/timingmetrics_query.go @@ -4,7 +4,6 @@ package ent import ( "context" - "database/sql/driver" "fmt" "math" @@ -19,14 +18,14 @@ import ( // TimingMetricsQuery is the builder for querying TimingMetrics entities. type TimingMetricsQuery struct { config - ctx *QueryContext - order []timingmetrics.OrderOption - inters []Interceptor - predicates []predicate.TimingMetrics - withMetrics *MetricsQuery - modifiers []func(*sql.Selector) - loadTotal []func(context.Context, []*TimingMetrics) error - withNamedMetrics map[string]*MetricsQuery + ctx *QueryContext + order []timingmetrics.OrderOption + inters []Interceptor + predicates []predicate.TimingMetrics + withMetrics *MetricsQuery + withFKs bool + modifiers []func(*sql.Selector) + loadTotal []func(context.Context, []*TimingMetrics) error // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -77,7 +76,7 @@ func (tmq *TimingMetricsQuery) QueryMetrics() *MetricsQuery { step := sqlgraph.NewStep( sqlgraph.From(timingmetrics.Table, timingmetrics.FieldID, selector), sqlgraph.To(metrics.Table, metrics.FieldID), - sqlgraph.Edge(sqlgraph.M2M, true, timingmetrics.MetricsTable, timingmetrics.MetricsPrimaryKey...), + sqlgraph.Edge(sqlgraph.O2O, true, timingmetrics.MetricsTable, timingmetrics.MetricsColumn), ) fromU = sqlgraph.SetNeighbors(tmq.driver.Dialect(), step) return fromU, nil @@ -372,11 +371,18 @@ func (tmq *TimingMetricsQuery) prepareQuery(ctx context.Context) error { func (tmq *TimingMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*TimingMetrics, error) { var ( nodes = []*TimingMetrics{} + withFKs = tmq.withFKs _spec = tmq.querySpec() loadedTypes = [1]bool{ tmq.withMetrics != nil, } ) + if tmq.withMetrics != nil { + withFKs = true + } + if withFKs { + _spec.Node.Columns = append(_spec.Node.Columns, timingmetrics.ForeignKeys...) + } _spec.ScanValues = func(columns []string) ([]any, error) { return (*TimingMetrics).scanValues(nil, columns) } @@ -399,16 +405,8 @@ func (tmq *TimingMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( return nodes, nil } if query := tmq.withMetrics; query != nil { - if err := tmq.loadMetrics(ctx, query, nodes, - func(n *TimingMetrics) { n.Edges.Metrics = []*Metrics{} }, - func(n *TimingMetrics, e *Metrics) { n.Edges.Metrics = append(n.Edges.Metrics, e) }); err != nil { - return nil, err - } - } - for name, query := range tmq.withNamedMetrics { - if err := tmq.loadMetrics(ctx, query, nodes, - func(n *TimingMetrics) { n.appendNamedMetrics(name) }, - func(n *TimingMetrics, e *Metrics) { n.appendNamedMetrics(name, e) }); err != nil { + if err := tmq.loadMetrics(ctx, query, nodes, nil, + func(n *TimingMetrics, e *Metrics) { n.Edges.Metrics = e }); err != nil { return nil, err } } @@ -421,62 +419,33 @@ func (tmq *TimingMetricsQuery) sqlAll(ctx context.Context, hooks ...queryHook) ( } func (tmq *TimingMetricsQuery) loadMetrics(ctx context.Context, query *MetricsQuery, nodes []*TimingMetrics, init func(*TimingMetrics), assign func(*TimingMetrics, *Metrics)) error { - edgeIDs := make([]driver.Value, len(nodes)) - byID := make(map[int]*TimingMetrics) - nids := make(map[int]map[*TimingMetrics]struct{}) - for i, node := range nodes { - edgeIDs[i] = node.ID - byID[node.ID] = node - if init != nil { - init(node) + ids := make([]int, 0, len(nodes)) + nodeids := make(map[int][]*TimingMetrics) + for i := range nodes { + if nodes[i].metrics_timing_metrics == nil { + continue } + fk := *nodes[i].metrics_timing_metrics + if _, ok := nodeids[fk]; !ok { + ids = append(ids, fk) + } + nodeids[fk] = append(nodeids[fk], nodes[i]) } - query.Where(func(s *sql.Selector) { - joinT := sql.Table(timingmetrics.MetricsTable) - s.Join(joinT).On(s.C(metrics.FieldID), joinT.C(timingmetrics.MetricsPrimaryKey[0])) - s.Where(sql.InValues(joinT.C(timingmetrics.MetricsPrimaryKey[1]), edgeIDs...)) - columns := s.SelectedColumns() - s.Select(joinT.C(timingmetrics.MetricsPrimaryKey[1])) - s.AppendSelect(columns...) - s.SetDistinct(false) - }) - if err := query.prepareQuery(ctx); err != nil { - return err + if len(ids) == 0 { + return nil } - qr := QuerierFunc(func(ctx context.Context, q Query) (Value, error) { - return query.sqlAll(ctx, func(_ context.Context, spec *sqlgraph.QuerySpec) { - assign := spec.Assign - values := spec.ScanValues - spec.ScanValues = func(columns []string) ([]any, error) { - values, err := values(columns[1:]) - if err != nil { - return nil, err - } - return append([]any{new(sql.NullInt64)}, values...), nil - } - spec.Assign = func(columns []string, values []any) error { - outValue := int(values[0].(*sql.NullInt64).Int64) - inValue := int(values[1].(*sql.NullInt64).Int64) - if nids[inValue] == nil { - nids[inValue] = map[*TimingMetrics]struct{}{byID[outValue]: {}} - return assign(columns[1:], values[1:]) - } - nids[inValue][byID[outValue]] = struct{}{} - return nil - } - }) - }) - neighbors, err := withInterceptors[[]*Metrics](ctx, query, qr, query.inters) + query.Where(metrics.IDIn(ids...)) + neighbors, err := query.All(ctx) if err != nil { return err } for _, n := range neighbors { - nodes, ok := nids[n.ID] + nodes, ok := nodeids[n.ID] if !ok { - return fmt.Errorf(`unexpected "metrics" node returned %v`, n.ID) + return fmt.Errorf(`unexpected foreign-key "metrics_timing_metrics" returned %v`, n.ID) } - for kn := range nodes { - assign(kn, n) + for i := range nodes { + assign(nodes[i], n) } } return nil @@ -566,20 +535,6 @@ func (tmq *TimingMetricsQuery) sqlQuery(ctx context.Context) *sql.Selector { return selector } -// WithNamedMetrics tells the query-builder to eager-load the nodes that are connected to the "metrics" -// edge with the given name. The optional arguments are used to configure the query builder of the edge. -func (tmq *TimingMetricsQuery) WithNamedMetrics(name string, opts ...func(*MetricsQuery)) *TimingMetricsQuery { - query := (&MetricsClient{config: tmq.config}).Query() - for _, opt := range opts { - opt(query) - } - if tmq.withNamedMetrics == nil { - tmq.withNamedMetrics = make(map[string]*MetricsQuery) - } - tmq.withNamedMetrics[name] = query - return tmq -} - // TimingMetricsGroupBy is the group-by builder for TimingMetrics entities. type TimingMetricsGroupBy struct { selector diff --git a/ent/gen/ent/timingmetrics_update.go b/ent/gen/ent/timingmetrics_update.go index adcb5d3..c7377fd 100644 --- a/ent/gen/ent/timingmetrics_update.go +++ b/ent/gen/ent/timingmetrics_update.go @@ -163,19 +163,23 @@ func (tmu *TimingMetricsUpdate) ClearActionsExecutionStartInMs() *TimingMetricsU return tmu } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmu *TimingMetricsUpdate) AddMetricIDs(ids ...int) *TimingMetricsUpdate { - tmu.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmu *TimingMetricsUpdate) SetMetricsID(id int) *TimingMetricsUpdate { + tmu.mutation.SetMetricsID(id) return tmu } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmu *TimingMetricsUpdate) AddMetrics(m ...*Metrics) *TimingMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmu *TimingMetricsUpdate) SetNillableMetricsID(id *int) *TimingMetricsUpdate { + if id != nil { + tmu = tmu.SetMetricsID(*id) } - return tmu.AddMetricIDs(ids...) + return tmu +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmu *TimingMetricsUpdate) SetMetrics(m *Metrics) *TimingMetricsUpdate { + return tmu.SetMetricsID(m.ID) } // Mutation returns the TimingMetricsMutation object of the builder. @@ -183,27 +187,12 @@ func (tmu *TimingMetricsUpdate) Mutation() *TimingMetricsMutation { return tmu.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (tmu *TimingMetricsUpdate) ClearMetrics() *TimingMetricsUpdate { tmu.mutation.ClearMetrics() return tmu } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (tmu *TimingMetricsUpdate) RemoveMetricIDs(ids ...int) *TimingMetricsUpdate { - tmu.mutation.RemoveMetricIDs(ids...) - return tmu -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (tmu *TimingMetricsUpdate) RemoveMetrics(m ...*Metrics) *TimingMetricsUpdate { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return tmu.RemoveMetricIDs(ids...) -} - // Save executes the query and returns the number of nodes affected by the update operation. func (tmu *TimingMetricsUpdate) Save(ctx context.Context) (int, error) { return withHooks(ctx, tmu.sqlSave, tmu.mutation, tmu.hooks) @@ -287,39 +276,23 @@ func (tmu *TimingMetricsUpdate) sqlSave(ctx context.Context) (n int, err error) } if tmu.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tmu.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !tmu.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, + Columns: []string{timingmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tmu.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, + Columns: []string{timingmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), @@ -485,19 +458,23 @@ func (tmuo *TimingMetricsUpdateOne) ClearActionsExecutionStartInMs() *TimingMetr return tmuo } -// AddMetricIDs adds the "metrics" edge to the Metrics entity by IDs. -func (tmuo *TimingMetricsUpdateOne) AddMetricIDs(ids ...int) *TimingMetricsUpdateOne { - tmuo.mutation.AddMetricIDs(ids...) +// SetMetricsID sets the "metrics" edge to the Metrics entity by ID. +func (tmuo *TimingMetricsUpdateOne) SetMetricsID(id int) *TimingMetricsUpdateOne { + tmuo.mutation.SetMetricsID(id) return tmuo } -// AddMetrics adds the "metrics" edges to the Metrics entity. -func (tmuo *TimingMetricsUpdateOne) AddMetrics(m ...*Metrics) *TimingMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID +// SetNillableMetricsID sets the "metrics" edge to the Metrics entity by ID if the given value is not nil. +func (tmuo *TimingMetricsUpdateOne) SetNillableMetricsID(id *int) *TimingMetricsUpdateOne { + if id != nil { + tmuo = tmuo.SetMetricsID(*id) } - return tmuo.AddMetricIDs(ids...) + return tmuo +} + +// SetMetrics sets the "metrics" edge to the Metrics entity. +func (tmuo *TimingMetricsUpdateOne) SetMetrics(m *Metrics) *TimingMetricsUpdateOne { + return tmuo.SetMetricsID(m.ID) } // Mutation returns the TimingMetricsMutation object of the builder. @@ -505,27 +482,12 @@ func (tmuo *TimingMetricsUpdateOne) Mutation() *TimingMetricsMutation { return tmuo.mutation } -// ClearMetrics clears all "metrics" edges to the Metrics entity. +// ClearMetrics clears the "metrics" edge to the Metrics entity. func (tmuo *TimingMetricsUpdateOne) ClearMetrics() *TimingMetricsUpdateOne { tmuo.mutation.ClearMetrics() return tmuo } -// RemoveMetricIDs removes the "metrics" edge to Metrics entities by IDs. -func (tmuo *TimingMetricsUpdateOne) RemoveMetricIDs(ids ...int) *TimingMetricsUpdateOne { - tmuo.mutation.RemoveMetricIDs(ids...) - return tmuo -} - -// RemoveMetrics removes "metrics" edges to Metrics entities. -func (tmuo *TimingMetricsUpdateOne) RemoveMetrics(m ...*Metrics) *TimingMetricsUpdateOne { - ids := make([]int, len(m)) - for i := range m { - ids[i] = m[i].ID - } - return tmuo.RemoveMetricIDs(ids...) -} - // Where appends a list predicates to the TimingMetricsUpdate builder. func (tmuo *TimingMetricsUpdateOne) Where(ps ...predicate.TimingMetrics) *TimingMetricsUpdateOne { tmuo.mutation.Where(ps...) @@ -639,39 +601,23 @@ func (tmuo *TimingMetricsUpdateOne) sqlSave(ctx context.Context) (_node *TimingM } if tmuo.mutation.MetricsCleared() { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, - Inverse: true, - Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, - Bidi: false, - Target: &sqlgraph.EdgeTarget{ - IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), - }, - } - _spec.Edges.Clear = append(_spec.Edges.Clear, edge) - } - if nodes := tmuo.mutation.RemovedMetricsIDs(); len(nodes) > 0 && !tmuo.mutation.MetricsCleared() { - edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, + Columns: []string{timingmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), }, } - for _, k := range nodes { - edge.Target.Nodes = append(edge.Target.Nodes, k) - } _spec.Edges.Clear = append(_spec.Edges.Clear, edge) } if nodes := tmuo.mutation.MetricsIDs(); len(nodes) > 0 { edge := &sqlgraph.EdgeSpec{ - Rel: sqlgraph.M2M, + Rel: sqlgraph.O2O, Inverse: true, Table: timingmetrics.MetricsTable, - Columns: timingmetrics.MetricsPrimaryKey, + Columns: []string{timingmetrics.MetricsColumn}, Bidi: false, Target: &sqlgraph.EdgeTarget{ IDSpec: sqlgraph.NewFieldSpec(metrics.FieldID, field.TypeInt), diff --git a/ent/schema/actioncachestatistics.go b/ent/schema/actioncachestatistics.go index 0ca66b9..f0949b7 100644 --- a/ent/schema/actioncachestatistics.go +++ b/ent/schema/actioncachestatistics.go @@ -38,7 +38,8 @@ func (ActionCacheStatistics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the associated action summary. edge.From("action_summary", ActionSummary.Type). - Ref("action_cache_statistics"), + Ref("action_cache_statistics"). + Unique(), // Breakdown of the cache misses based on the reasons behind them. edge.To("miss_details", MissDetail.Type), diff --git a/ent/schema/actiondata.go b/ent/schema/actiondata.go index 5dd1932..1c29b68 100644 --- a/ent/schema/actiondata.go +++ b/ent/schema/actiondata.go @@ -48,6 +48,7 @@ func (ActionData) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the associated action summary. edge.From("action_summary", ActionSummary.Type). - Ref("action_data"), + Ref("action_data"). + Unique(), } } diff --git a/ent/schema/actionsummary.go b/ent/schema/actionsummary.go index ba7f983..7a39fd6 100644 --- a/ent/schema/actionsummary.go +++ b/ent/schema/actionsummary.go @@ -52,6 +52,7 @@ func (ActionSummary) Edges() []ent.Edge { edge.To("runner_count", RunnerCount.Type), // Information about the action cache behavior during a single invocation. - edge.To("action_cache_statistics", ActionCacheStatistics.Type), + edge.To("action_cache_statistics", ActionCacheStatistics.Type). + Unique(), } } diff --git a/ent/schema/artifactmetrics.go b/ent/schema/artifactmetrics.go index 43b4771..e031e69 100644 --- a/ent/schema/artifactmetrics.go +++ b/ent/schema/artifactmetrics.go @@ -19,26 +19,32 @@ func (ArtifactMetrics) Fields() []ent.Field { func (ArtifactMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object - edge.From("metrics", Metrics.Type).Ref("artifact_metrics"), + edge.From("metrics", Metrics.Type). + Ref("artifact_metrics"). + Unique(), // Measures all source files newly read this build. Does not include // unchanged sources on incremental builds. - edge.To("source_artifacts_read", FilesMetric.Type), + edge.To("source_artifacts_read", FilesMetric.Type). + Unique(), // Measures all output artifacts from executed actions. This includes // actions that were cached locally (via the action cache) or remotely (via // a remote cache or executor), but does *not* include outputs of actions // that were cached internally in Skyframe. - edge.To("output_artifacts_seen", FilesMetric.Type), + edge.To("output_artifacts_seen", FilesMetric.Type). + Unique(), // Measures all output artifacts from actions that were cached locally // via the action cache. These artifacts were already present on disk at the // start of the build. Does not include Skyframe-cached actions' outputs. - edge.To("output_artifacts_from_action_cache", FilesMetric.Type), + edge.To("output_artifacts_from_action_cache", FilesMetric.Type). + Unique(), // Measures all artifacts that belong to a top-level output group. Does not // deduplicate, so if there are two top-level targets in this build that // share an artifact, it will be counted twice. - edge.To("top_level_artifacts", FilesMetric.Type), + edge.To("top_level_artifacts", FilesMetric.Type). + Unique(), } } diff --git a/ent/schema/bazelinvocation.go b/ent/schema/bazelinvocation.go index 6bf7ba8..421a3cc 100644 --- a/ent/schema/bazelinvocation.go +++ b/ent/schema/bazelinvocation.go @@ -24,7 +24,7 @@ func (BazelInvocation) Fields() []ent.Field { field.UUID("invocation_id", uuid.UUID{}).Unique().Immutable(), // Time the event started. - field.Time("started_at"), + field.Time("started_at").Annotations(entgql.OrderField("STARTED_AT")), // Time the event ended field.Time("ended_at").Optional(), @@ -54,7 +54,7 @@ func (BazelInvocation) Fields() []ent.Field { field.String("user_email").Optional(), // Ldap (username) of the user who launched the invocation if provided. - field.String("user_ldap").Optional(), + field.String("user_ldap").Optional().Annotations(entgql.OrderField("USER_LDAP")), // The full logs from the build.. field.String("build_logs").Optional(), diff --git a/ent/schema/buildgraphmetrics.go b/ent/schema/buildgraphmetrics.go index 1c6e32c..1695e23 100644 --- a/ent/schema/buildgraphmetrics.go +++ b/ent/schema/buildgraphmetrics.go @@ -75,35 +75,41 @@ func (BuildGraphMetrics) Fields() []ent.Field { func (BuildGraphMetrics) Edges() []ent.Edge { return []ent.Edge{ edge.From("metrics", Metrics.Type). - Ref("build_graph_metrics"), + Ref("build_graph_metrics"). + Unique(), // NOTE: these are all missing from the proto, but i'm including them here for now for completeness // Dirtied Values. // Number of SkyValues that were dirtied during the build. Dirtied nodes are // those that transitively depend on a node that changed by itself (e.g. one // representing a file in the file system) - edge.To("dirtied_values", EvaluationStat.Type), + edge.To("dirtied_values", EvaluationStat.Type). + Unique(), // Changed Values. // Number of SkyValues that changed by themselves. For example, when a file // on the file system changes, the SkyValue representing it will change. - edge.To("changed_values", EvaluationStat.Type), + edge.To("changed_values", EvaluationStat.Type). + Unique(), // Built Values. // Number of SkyValues that were built. This means that they were evaluated // and were found to have changed from their previous version. - edge.To("built_values", EvaluationStat.Type), + edge.To("built_values", EvaluationStat.Type). + Unique(), // Cleaned Values. // Number of SkyValues that were evaluated and found clean, i.e. equal to // their previous version. - edge.To("cleaned_values", EvaluationStat.Type), + edge.To("cleaned_values", EvaluationStat.Type). + Unique(), // Evaluated Values. // Number of evaluations to build SkyValues. This includes restarted // evaluations, which means there can be multiple evaluations per built // SkyValue. Subtract built_values from this number to get the number of // restarted evaluations. - edge.To("evaluated_values", EvaluationStat.Type), + edge.To("evaluated_values", EvaluationStat.Type). + Unique(), } } diff --git a/ent/schema/cumulativemetrics.go b/ent/schema/cumulativemetrics.go index dcc865b..5dc1660 100644 --- a/ent/schema/cumulativemetrics.go +++ b/ent/schema/cumulativemetrics.go @@ -32,6 +32,8 @@ func (CumulativeMetrics) Fields() []ent.Field { func (CumulativeMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metircs object. - edge.From("metrics", Metrics.Type).Ref("cumulative_metrics"), + edge.From("metrics", Metrics.Type). + Ref("cumulative_metrics"). + Unique(), } } diff --git a/ent/schema/dynamicexecutionmetrics.go b/ent/schema/dynamicexecutionmetrics.go index 3dfdb38..cc39433 100644 --- a/ent/schema/dynamicexecutionmetrics.go +++ b/ent/schema/dynamicexecutionmetrics.go @@ -19,7 +19,9 @@ func (DynamicExecutionMetrics) Fields() []ent.Field { func (DynamicExecutionMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object. - edge.From("metrics", Metrics.Type).Ref("dynamic_execution_metrics"), + edge.From("metrics", Metrics.Type). + Ref("dynamic_execution_metrics"). + Unique(), // Race statistics grouped by mnemonic, local_name, remote_name. edge.To("race_statistics", RaceStatistics.Type), diff --git a/ent/schema/evaluationstat.go b/ent/schema/evaluationstat.go index b1d3b9d..ebb2820 100644 --- a/ent/schema/evaluationstat.go +++ b/ent/schema/evaluationstat.go @@ -55,6 +55,7 @@ func (EvaluationStat) Edges() []ent.Edge { // evaluations, which means there can be multiple evaluations per built // SkyValue. Subtract built_values from this number to get the number of // restarted evaluations. - Ref("evaluated_values"), + Ref("evaluated_values"). + Unique(), } } diff --git a/ent/schema/executioninfo.go b/ent/schema/executioninfo.go index a7c9055..ab68e67 100644 --- a/ent/schema/executioninfo.go +++ b/ent/schema/executioninfo.go @@ -37,7 +37,9 @@ func (ExectionInfo) Fields() []ent.Field { func (ExectionInfo) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the test result - edge.From("test_result", TestResultBES.Type).Ref("execution_info"), + edge.From("test_result", TestResultBES.Type). + Ref("execution_info"). + Unique(), // Represents a hierarchical timing breakdown of an activity. // The top level time should be the total time of the activity. diff --git a/ent/schema/filesmetric.go b/ent/schema/filesmetric.go index 74b21cd..1099864 100644 --- a/ent/schema/filesmetric.go +++ b/ent/schema/filesmetric.go @@ -48,6 +48,7 @@ func (FilesMetric) Edges() []ent.Edge { // Measures all artifacts that belong to a top-level output group. Does not // deduplicate, so if there are two top-level targets in this build that // share an artifact, it will be counted twice. - Ref("top_level_artifacts"), + Ref("top_level_artifacts"). + Unique(), } } diff --git a/ent/schema/garbagemetrics.go b/ent/schema/garbagemetrics.go index df69e8d..6b0161b 100644 --- a/ent/schema/garbagemetrics.go +++ b/ent/schema/garbagemetrics.go @@ -27,6 +27,7 @@ func (GarbageMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the memory metrics object edge.From("memory_metrics", MemoryMetrics.Type). - Ref("garbage_metrics"), + Ref("garbage_metrics"). + Unique(), } } diff --git a/ent/schema/memorymetrics.go b/ent/schema/memorymetrics.go index a746136..6c26b4f 100644 --- a/ent/schema/memorymetrics.go +++ b/ent/schema/memorymetrics.go @@ -33,7 +33,8 @@ func (MemoryMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the memory metrics object edge.From("metrics", Metrics.Type). - Ref("memory_metrics"), + Ref("memory_metrics"). + Unique(), // Metrics about garbage collection edge.To("garbage_metrics", GarbageMetrics.Type), diff --git a/ent/schema/metrics.go b/ent/schema/metrics.go index 7cfaff5..88c4ae5 100644 --- a/ent/schema/metrics.go +++ b/ent/schema/metrics.go @@ -26,34 +26,44 @@ func (Metrics) Edges() []ent.Edge { Unique(), // The action summmary with details about actions executed. - edge.To("action_summary", ActionSummary.Type), + edge.To("action_summary", ActionSummary.Type). + Unique(), // Details about memory usage and garbage collections. - edge.To("memory_metrics", MemoryMetrics.Type), + edge.To("memory_metrics", MemoryMetrics.Type). + Unique(), // Target metrics. - edge.To("target_metrics", TargetMetrics.Type), + edge.To("target_metrics", TargetMetrics.Type). + Unique(), // Package metrics. - edge.To("package_metrics", PackageMetrics.Type), + edge.To("package_metrics", PackageMetrics.Type). + Unique(), // Timing metrics. - edge.To("timing_metrics", TimingMetrics.Type), + edge.To("timing_metrics", TimingMetrics.Type). + Unique(), // Cumulative metrics. - edge.To("cumulative_metrics", CumulativeMetrics.Type), + edge.To("cumulative_metrics", CumulativeMetrics.Type). + Unique(), // Artifact metrics. - edge.To("artifact_metrics", ArtifactMetrics.Type), + edge.To("artifact_metrics", ArtifactMetrics.Type). + Unique(), // Network metrics if available. - edge.To("network_metrics", NetworkMetrics.Type), + edge.To("network_metrics", NetworkMetrics.Type). + Unique(), // Dynamic execution metrics if available. - edge.To("dynamic_execution_metrics", DynamicExecutionMetrics.Type), + edge.To("dynamic_execution_metrics", DynamicExecutionMetrics.Type). + Unique(), // Build graph metrics. - edge.To("build_graph_metrics", BuildGraphMetrics.Type), + edge.To("build_graph_metrics", BuildGraphMetrics.Type). + Unique(), } } diff --git a/ent/schema/missdetail.go b/ent/schema/missdetail.go index 284ac93..f160e95 100644 --- a/ent/schema/missdetail.go +++ b/ent/schema/missdetail.go @@ -35,6 +35,8 @@ func (MissDetail) Fields() []ent.Field { func (MissDetail) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the action cache statistics object. - edge.From("action_cache_statistics", ActionCacheStatistics.Type).Ref("miss_details"), + edge.From("action_cache_statistics", ActionCacheStatistics.Type). + Ref("miss_details"). + Unique(), } } diff --git a/ent/schema/namedsetoffiles.go b/ent/schema/namedsetoffiles.go index 37646dd..ed56933 100644 --- a/ent/schema/namedsetoffiles.go +++ b/ent/schema/namedsetoffiles.go @@ -20,7 +20,8 @@ func (NamedSetOfFiles) Edges() []ent.Edge { return []ent.Edge{ // Edge back to output group. edge.From("output_group", OutputGroup.Type). - Ref("file_sets"), + Ref("file_sets"). + Unique(), // Files that belong to this named set of files. edge.To("files", TestFile.Type), diff --git a/ent/schema/networkmetrics.go b/ent/schema/networkmetrics.go index 88014ad..16efaa0 100644 --- a/ent/schema/networkmetrics.go +++ b/ent/schema/networkmetrics.go @@ -19,9 +19,12 @@ func (NetworkMetrics) Fields() []ent.Field { func (NetworkMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object. - edge.From("metrics", Metrics.Type).Ref("network_metrics"), + edge.From("metrics", Metrics.Type). + Ref("network_metrics"). + Unique(), // Information about host network. - edge.To("system_network_stats", SystemNetworkStats.Type), + edge.To("system_network_stats", SystemNetworkStats.Type). + Unique(), } } diff --git a/ent/schema/outputgroup.go b/ent/schema/outputgroup.go index 0380ec8..626eab8 100644 --- a/ent/schema/outputgroup.go +++ b/ent/schema/outputgroup.go @@ -29,7 +29,8 @@ func (OutputGroup) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the target completion object. edge.From("target_complete", TargetComplete.Type). - Ref("output_group"), + Ref("output_group"). + Unique(), // Inline Files. // Inlined files that belong to this output group, requested via diff --git a/ent/schema/packageloadmetrics.go b/ent/schema/packageloadmetrics.go index 8663d0b..1d617a8 100644 --- a/ent/schema/packageloadmetrics.go +++ b/ent/schema/packageloadmetrics.go @@ -39,6 +39,8 @@ func (PackageLoadMetrics) Fields() []ent.Field { func (PackageLoadMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the package metrics - edge.From("package_metrics", PackageMetrics.Type).Ref("package_load_metrics"), + edge.From("package_metrics", PackageMetrics.Type). + Ref("package_load_metrics"). + Unique(), } } diff --git a/ent/schema/packagemetrics.go b/ent/schema/packagemetrics.go index 1a1cd93..6b2488b 100644 --- a/ent/schema/packagemetrics.go +++ b/ent/schema/packagemetrics.go @@ -36,7 +36,9 @@ func (PackageMetrics) Fields() []ent.Field { func (PackageMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object. - edge.From("metrics", Metrics.Type).Ref("package_metrics"), + edge.From("metrics", Metrics.Type). + Ref("package_metrics"). + Unique(), // Loading time metrics per package. edge.To("package_load_metrics", PackageLoadMetrics.Type), diff --git a/ent/schema/racestatistics.go b/ent/schema/racestatistics.go index 675c797..81aa116 100644 --- a/ent/schema/racestatistics.go +++ b/ent/schema/racestatistics.go @@ -36,6 +36,8 @@ func (RaceStatistics) Fields() []ent.Field { func (RaceStatistics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the dynamic execution metrics object. - edge.From("dynamic_execution_metrics", DynamicExecutionMetrics.Type).Ref("race_statistics"), + edge.From("dynamic_execution_metrics", DynamicExecutionMetrics.Type). + Ref("race_statistics"). + Unique(), } } diff --git a/ent/schema/resourceusage.go b/ent/schema/resourceusage.go index 54a6668..5b1b9b6 100644 --- a/ent/schema/resourceusage.go +++ b/ent/schema/resourceusage.go @@ -28,6 +28,7 @@ func (ResourceUsage) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the execution info. edge.From("execution_info", ExectionInfo.Type). - Ref("resource_usage"), + Ref("resource_usage"). + Unique(), } } diff --git a/ent/schema/runnercount.go b/ent/schema/runnercount.go index 1571f41..94f8d23 100644 --- a/ent/schema/runnercount.go +++ b/ent/schema/runnercount.go @@ -32,7 +32,8 @@ func (RunnerCount) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the action summary. edge.From("action_summary", ActionSummary.Type). - Ref("runner_count"), + Ref("runner_count"). + Unique(), } } diff --git a/ent/schema/systemnetworkstats.go b/ent/schema/systemnetworkstats.go index 3c83a16..46e37fe 100644 --- a/ent/schema/systemnetworkstats.go +++ b/ent/schema/systemnetworkstats.go @@ -44,6 +44,8 @@ func (SystemNetworkStats) Fields() []ent.Field { func (SystemNetworkStats) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the network metrics object. - edge.From("network_metrics", NetworkMetrics.Type).Ref("system_network_stats").Unique(), + edge.From("network_metrics", NetworkMetrics.Type). + Ref("system_network_stats"). + Unique(), } } diff --git a/ent/schema/targetcomplete.go b/ent/schema/targetcomplete.go index 2c65723..45e65da 100644 --- a/ent/schema/targetcomplete.go +++ b/ent/schema/targetcomplete.go @@ -55,7 +55,8 @@ func (TargetComplete) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the target pair. edge.From("target_pair", TargetPair.Type). - Ref("completion"), + Ref("completion"). + Unique(), // Temporarily, also report the important outputs directly. // This is only to allow existing clients help transition to the deduplicated representation; diff --git a/ent/schema/targetconfigured.go b/ent/schema/targetconfigured.go index be8cf47..e4b9c3c 100644 --- a/ent/schema/targetconfigured.go +++ b/ent/schema/targetconfigured.go @@ -44,6 +44,7 @@ func (TargetConfigured) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the target pair. edge.From("target_pair", TargetPair.Type). - Ref("configuration"), + Ref("configuration"). + Unique(), } } diff --git a/ent/schema/targetmetrics.go b/ent/schema/targetmetrics.go index 0f79b91..653932e 100644 --- a/ent/schema/targetmetrics.go +++ b/ent/schema/targetmetrics.go @@ -35,6 +35,8 @@ func (TargetMetrics) Fields() []ent.Field { func (TargetMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object. - edge.From("metrics", Metrics.Type).Ref("target_metrics"), + edge.From("metrics", Metrics.Type). + Ref("target_metrics"). + Unique(), } } diff --git a/ent/schema/targetpair.go b/ent/schema/targetpair.go index fc3fd8b..4f4e9c2 100644 --- a/ent/schema/targetpair.go +++ b/ent/schema/targetpair.go @@ -60,12 +60,15 @@ func (TargetPair) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the bazel invocation. edge.From("bazel_invocation", BazelInvocation.Type). - Ref("targets"), + Ref("targets"). + Unique(), // Edge to the target configuration object. - edge.To("configuration", TargetConfigured.Type).Unique(), + edge.To("configuration", TargetConfigured.Type). + Unique(), // Edge to the target completed object. - edge.To("completion", TargetComplete.Type).Unique(), + edge.To("completion", TargetComplete.Type). + Unique(), } } diff --git a/ent/schema/testcollection.go b/ent/schema/testcollection.go index bc43dae..9c38da9 100644 --- a/ent/schema/testcollection.go +++ b/ent/schema/testcollection.go @@ -1,7 +1,9 @@ package schema import ( + "entgo.io/contrib/entgql" "entgo.io/ent" + "entgo.io/ent/schema" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" ) @@ -15,7 +17,8 @@ type TestCollection struct { func (TestCollection) Fields() []ent.Field { return []ent.Field{ // The label associated with this test. - field.String("label").Optional(), + field.String("label"). + Optional(), // The overall status of the test. field.Enum("overall_status"). @@ -32,16 +35,26 @@ func (TestCollection) Fields() []ent.Field { Default("NO_STATUS"), // The strategy of the test. - field.String("strategy").Optional(), + field.String("strategy"). + Optional(), // If the test was cached locally. - field.Bool("cached_locally").Optional(), + field.Bool("cached_locally"). + Optional(), // If the test was cached remotely. - field.Bool("cached_remotely").Optional(), + field.Bool("cached_remotely"). + Optional(), + + field.Time("first_seen"). + Optional(). + Nillable(). + Annotations(entgql.OrderField("FIRST_SEEN")), // The test duration in milliseconds. - field.Int64("duration_ms").Optional(), + field.Int64("duration_ms"). + Optional(). + Annotations(entgql.OrderField("DURATION")), } } @@ -50,12 +63,20 @@ func (TestCollection) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the bazel invocaiton. edge.From("bazel_invocation", BazelInvocation.Type). - Ref("test_collection"), + Ref("test_collection").Unique(), // The test summary aossicated with the test. edge.To("test_summary", TestSummary.Type).Unique(), - // A collection of test results associated. + // A collection of test results associated with this collection edge.To("test_results", TestResultBES.Type), } } + +// Annotations of the Test Collection +func (TestCollection) Annotations() []schema.Annotation { + return []schema.Annotation{ + entgql.RelayConnection(), + entgql.QueryField("findTests"), + } +} diff --git a/ent/schema/testfile.go b/ent/schema/testfile.go index f273556..cdaa3a9 100644 --- a/ent/schema/testfile.go +++ b/ent/schema/testfile.go @@ -42,6 +42,8 @@ func (TestFile) Fields() []ent.Field { func (TestFile) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the test result. - edge.From("test_result", TestResultBES.Type).Ref("test_action_output"), + edge.From("test_result", TestResultBES.Type). + Ref("test_action_output"). + Unique(), } } diff --git a/ent/schema/testresult.go b/ent/schema/testresult.go index a2c7c0b..9c1f0fb 100644 --- a/ent/schema/testresult.go +++ b/ent/schema/testresult.go @@ -71,13 +71,16 @@ func (TestResultBES) Fields() []ent.Field { func (TestResultBES) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the test collection. - edge.From("test_collection", TestCollection.Type).Ref("test_results").Unique(), + edge.From("test_collection", TestCollection.Type). + Ref("test_results"). + Unique(), // Files (logs, test.xml, undeclared outputs, etc) generated by that test action. edge.To("test_action_output", TestFile.Type), // Message providing optional meta data on the execution of the test action, // if available. - edge.To("execution_info", ExectionInfo.Type).Unique(), + edge.To("execution_info", ExectionInfo.Type). + Unique(), } } diff --git a/ent/schema/testsummary.go b/ent/schema/testsummary.go index e1f2ce7..c7d6040 100644 --- a/ent/schema/testsummary.go +++ b/ent/schema/testsummary.go @@ -66,7 +66,8 @@ func (TestSummary) Edges() []ent.Edge { return []ent.Edge{ // Edge back tot he test collection. edge.From("test_collection", TestCollection.Type). - Ref("test_summary"), + Ref("test_summary"). + Unique(), // Path to logs of passed runs. edge.To("passed", TestFile.Type), diff --git a/ent/schema/timingbreakdown.go b/ent/schema/timingbreakdown.go index 68edda3..7f602fc 100644 --- a/ent/schema/timingbreakdown.go +++ b/ent/schema/timingbreakdown.go @@ -27,7 +27,9 @@ func (TimingBreakdown) Fields() []ent.Field { func (TimingBreakdown) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the execution info object - edge.From("execution_info", ExectionInfo.Type).Ref("timing_breakdown"), + edge.From("execution_info", ExectionInfo.Type). + Ref("timing_breakdown"). + Unique(), // Timing children (this could probably be better reempleted as a node to itself. // except the relationship to the executio info object. maybe we don't care about that? diff --git a/ent/schema/timingchild.go b/ent/schema/timingchild.go index 2d5ee01..8a6b989 100644 --- a/ent/schema/timingchild.go +++ b/ent/schema/timingchild.go @@ -26,6 +26,8 @@ func (TimingChild) Fields() []ent.Field { // Edges of TimingChild. func (TimingChild) Edges() []ent.Edge { return []ent.Edge{ - edge.From("timing_breakdown", TimingBreakdown.Type).Ref("child"), + edge.From("timing_breakdown", TimingBreakdown.Type). + Ref("child"). + Unique(), } } diff --git a/ent/schema/timingmetrics.go b/ent/schema/timingmetrics.go index 8bbec9d..40c887d 100644 --- a/ent/schema/timingmetrics.go +++ b/ent/schema/timingmetrics.go @@ -43,6 +43,8 @@ func (TimingMetrics) Fields() []ent.Field { func (TimingMetrics) Edges() []ent.Edge { return []ent.Edge{ // Edge back to the metrics object. - edge.From("metrics", Metrics.Type).Ref("timing_metrics"), + edge.From("metrics", Metrics.Type). + Ref("timing_metrics"). + Unique(), } } diff --git a/frontend/.env b/frontend/.env index b015e87..26bb66d 100644 --- a/frontend/.env +++ b/frontend/.env @@ -1,3 +1,6 @@ NEXT_PUBLIC_BES_BACKEND_URL=http://localhost:8081 NEXT_PUBLIC_BES_GRPC_BACKEND_URL=grpc://localhost:8082 NEXT_PUBLIC_BROWSER_URL=https://browser.example.com +NEXT_PUBLIC_COMPANY_NAME="Example Co" +NEXT_PUBLIC_COMPANY_SLACK_CHANNEL_NAME=ExampleCoBuild +NEXT_PUBLIC_COMPANY_SLACK_CHANNEL_URL=https://exampleco.enterprise.slack.com/archives/CXXXXXXX \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..8355fb1 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,9937 @@ +{ + "name": "frontend", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.1.0", + "dependencies": { + "@apollo/client": "^3.10.2", + "@apollo/experimental-nextjs-app-support": "^0.10.0", + "ansi_up": "^6.0.2", + "antd": "^5.17.0", + "dayjs": "^1.11.11", + "framer-motion": "^11.1.9", + "graphql": "^16.8.1", + "linkify-html": "^4.1.3", + "lodash": "^4.17.21", + "next": "14.2.3", + "react": "^18", + "react-countup": "^6.5.3", + "react-dom": "^18", + "recharts": "^2.12.7", + "uuid": "^9.0.1", + "zod": "^3.23.8" + }, + "devDependencies": { + "@graphql-codegen/cli": "^5.0.2", + "@graphql-codegen/client-preset": "^4.2.5", + "@types/lodash": "^4.17.4", + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "@types/uuid": "^9.0.8", + "env-cmd": "^10.1.0", + "eslint": "^8", + "eslint-config-next": "14.2.3", + "typescript": "^5" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@ant-design/colors": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-7.0.2.tgz", + "integrity": "sha512-7KJkhTiPiLHSu+LmMJnehfJ6242OCxSlR3xHVBecYxnMW8MS/878NXct1GqYARyL59fyeFdKRxXTfvR9SnDgJg==", + "dependencies": { + "@ctrl/tinycolor": "^3.6.1" + } + }, + "node_modules/@ant-design/cssinjs": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/@ant-design/cssinjs/-/cssinjs-1.20.0.tgz", + "integrity": "sha512-uG3iWzJxgNkADdZmc6W0Ci3iQAUOvLMcM8SnnmWq3r6JeocACft4ChnY/YWvI2Y+rG/68QBla/O+udke1yH3vg==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "@emotion/hash": "^0.8.0", + "@emotion/unitless": "^0.7.5", + "classnames": "^2.3.1", + "csstype": "^3.1.3", + "rc-util": "^5.35.0", + "stylis": "^4.0.13" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, + "node_modules/@ant-design/icons": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@ant-design/icons/-/icons-5.3.7.tgz", + "integrity": "sha512-bCPXTAg66f5bdccM4TT21SQBDO1Ek2gho9h3nO9DAKXJP4sq+5VBjrQMSxMVXSB3HyEz+cUbHQ5+6ogxCOpaew==", + "dependencies": { + "@ant-design/colors": "^7.0.0", + "@ant-design/icons-svg": "^4.4.0", + "@babel/runtime": "^7.11.2", + "classnames": "^2.2.6", + "rc-util": "^5.31.1" + }, + "engines": { + "node": ">=8" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, + "node_modules/@ant-design/icons-svg": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@ant-design/icons-svg/-/icons-svg-4.4.2.tgz", + "integrity": "sha512-vHbT+zJEVzllwP+CM+ul7reTEfBR0vgxFe7+lREAsAA7YGsYpboiq2sQNeQeRvh09GfQgs/GyFEvZpJ9cLXpXA==" + }, + "node_modules/@ant-design/react-slick": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ant-design/react-slick/-/react-slick-1.1.2.tgz", + "integrity": "sha512-EzlvzE6xQUBrZuuhSAFTdsr4P2bBBHGZwKFemEfq8gIGyIQCxalYfZW/T2ORbtQx5rU69o+WycP3exY/7T1hGA==", + "dependencies": { + "@babel/runtime": "^7.10.4", + "classnames": "^2.2.5", + "json2mq": "^0.2.0", + "resize-observer-polyfill": "^1.5.1", + "throttle-debounce": "^5.0.0" + }, + "peerDependencies": { + "react": ">=16.9.0" + } + }, + "node_modules/@apollo/client": { + "version": "3.10.2", + "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.10.2.tgz", + "integrity": "sha512-J4e2RQN4DgMthCqBtBOXOawYTWmdJprAWqv8rdYLmeW4/Pnrlrl4Hvb/zvENlitDAiRUU33Dc/3OJqIMEJAxNw==", + "dependencies": { + "@graphql-typed-document-node/core": "^3.1.1", + "@wry/caches": "^1.0.0", + "@wry/equality": "^0.5.6", + "@wry/trie": "^0.5.0", + "graphql-tag": "^2.12.6", + "hoist-non-react-statics": "^3.3.2", + "optimism": "^0.18.0", + "prop-types": "^15.7.2", + "rehackt": "^0.1.0", + "response-iterator": "^0.2.6", + "symbol-observable": "^4.0.0", + "ts-invariant": "^0.10.3", + "tslib": "^2.3.0", + "zen-observable-ts": "^1.2.5" + }, + "peerDependencies": { + "graphql": "^15.0.0 || ^16.0.0", + "graphql-ws": "^5.5.5", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", + "subscriptions-transport-ws": "^0.9.0 || ^0.11.0" + }, + "peerDependenciesMeta": { + "graphql-ws": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "subscriptions-transport-ws": { + "optional": true + } + } + }, + "node_modules/@apollo/client-react-streaming": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@apollo/client-react-streaming/-/client-react-streaming-0.10.0.tgz", + "integrity": "sha512-iZ2jYghRS71xFv6O3Js5Ojrrmk4SnIEKwPRKIswQyAtqjHrfvUTyXCDzxrhPcGQe/y7su/XcE7Xp0kOp7yTnlg==", + "dependencies": { + "superjson": "^1.12.2 || ^2.0.0", + "ts-invariant": "^0.10.3" + }, + "peerDependencies": { + "@apollo/client": "^3.9.6", + "react": "^18" + } + }, + "node_modules/@apollo/experimental-nextjs-app-support": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/@apollo/experimental-nextjs-app-support/-/experimental-nextjs-app-support-0.10.0.tgz", + "integrity": "sha512-S3mfZRnAAAaKwA8RNckS4TWYLX5utpmRTwG3WGFtpooYx8QQG8xft0p0a9eTQ53Jrw3nSMJc/wOOsT/5noMCQg==", + "dependencies": { + "@apollo/client-react-streaming": "0.10.0" + }, + "peerDependencies": { + "@apollo/client": "^3.9.6", + "next": "^13.4.1 || ^14.0.0", + "react": "^18" + } + }, + "node_modules/@ardatan/relay-compiler": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/@ardatan/relay-compiler/-/relay-compiler-12.0.0.tgz", + "integrity": "sha512-9anThAaj1dQr6IGmzBMcfzOQKTa5artjuPmw8NYK/fiGEMjADbSguBY2FMDykt+QhilR3wc9VA/3yVju7JHg7Q==", + "dev": true, + "dependencies": { + "@babel/core": "^7.14.0", + "@babel/generator": "^7.14.0", + "@babel/parser": "^7.14.0", + "@babel/runtime": "^7.0.0", + "@babel/traverse": "^7.14.0", + "@babel/types": "^7.0.0", + "babel-preset-fbjs": "^3.4.0", + "chalk": "^4.0.0", + "fb-watchman": "^2.0.0", + "fbjs": "^3.0.0", + "glob": "^7.1.1", + "immutable": "~3.7.6", + "invariant": "^2.2.4", + "nullthrows": "^1.1.1", + "relay-runtime": "12.0.0", + "signedsource": "^1.0.0", + "yargs": "^15.3.1" + }, + "bin": { + "relay-compiler": "bin/relay-compiler" + }, + "peerDependencies": { + "graphql": "*" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "node_modules/@ardatan/relay-compiler/node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@ardatan/relay-compiler/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@ardatan/sync-fetch": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/@ardatan/sync-fetch/-/sync-fetch-0.0.1.tgz", + "integrity": "sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA==", + "dev": true, + "dependencies": { + "node-fetch": "^2.6.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.24.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.2.tgz", + "integrity": "sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.24.2", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.24.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.4.tgz", + "integrity": "sha512-vg8Gih2MLK+kOkHJp4gBEIkyaIi00jgWot2D9QOmmfLC8jINSOzmCLta6Bvz/JSBCqnegV0L80jhxkol5GWNfQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.5.tgz", + "integrity": "sha512-tVQRucExLQ02Boi4vdPp49svNGcfL2GhdTCT9aldhXgCJVAI21EtRfBettiuLUwce/7r6bFdgs6JFkcdTiFttA==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.24.2", + "@babel/generator": "^7.24.5", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-module-transforms": "^7.24.5", + "@babel/helpers": "^7.24.5", + "@babel/parser": "^7.24.5", + "@babel/template": "^7.24.0", + "@babel/traverse": "^7.24.5", + "@babel/types": "^7.24.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@babel/generator": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.5.tgz", + "integrity": "sha512-x32i4hEXvr+iI0NEoEfDKzlemF8AmtOP8CcrRaEcpzysWuoEb1KknpcvMsHKPONoKZiDuItklgWhB18xEhr9PA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.5", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz", + "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.23.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz", + "integrity": "sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.23.5", + "@babel/helper-validator-option": "^7.23.5", + "browserslist": "^4.22.2", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.5.tgz", + "integrity": "sha512-uRc4Cv8UQWnE4NXlYTIIdM7wfFkOqlFztcC/gVXDKohKoVB3OyonfelUBaJzSwpBntZ2KYGF/9S7asCHsXwW6g==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-member-expression-to-functions": "^7.24.5", + "@babel/helper-optimise-call-expression": "^7.22.5", + "@babel/helper-replace-supers": "^7.24.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.24.5", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", + "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", + "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", + "dev": true, + "dependencies": { + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.5.tgz", + "integrity": "sha512-4owRteeihKWKamtqg4JmWSsEZU445xpFRXPEwp44HbgbxdWlUV1b4Agg4lkA806Lil5XM/e+FJyS0vj5T6vmcA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", + "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.5.tgz", + "integrity": "sha512-9GxeY8c2d2mdQUP1Dye0ks3VDyIMS98kt/llQ2nUId8IsWqTF0l1LkSX0/uP7l7MCDrzXS009Hyhe2gzTiGW8A==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-module-imports": "^7.24.3", + "@babel/helper-simple-access": "^7.24.5", + "@babel/helper-split-export-declaration": "^7.24.5", + "@babel/helper-validator-identifier": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz", + "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.5.tgz", + "integrity": "sha512-xjNLDopRzW2o6ba0gKbkZq5YWEBaK3PCyTOY1K2P/O07LGMhMqlMXPxwN4S5/RhWuCobT8z0jrlKGlYmeR1OhQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.1.tgz", + "integrity": "sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-member-expression-to-functions": "^7.23.0", + "@babel/helper-optimise-call-expression": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.5.tgz", + "integrity": "sha512-uH3Hmf5q5n7n8mz7arjUlDOCbttY/DW4DYhE6FUsjKJ/oYC1kQQUvwEQWxRwUpX9qQKRXeqLwWxrqilMrf32sQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz", + "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.5.tgz", + "integrity": "sha512-5CHncttXohrHk8GWOFCcCl4oRD9fKosWlIRgWm4ql9VYioKm52Mk2xsmoohvm7f3JoiLSM5ZgJuRaf5QZZYd3Q==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.1.tgz", + "integrity": "sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz", + "integrity": "sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.23.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz", + "integrity": "sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.5.tgz", + "integrity": "sha512-CiQmBMMpMQHwM5m01YnrM6imUG1ebgYJ+fAIW4FZe6m4qHTPaRHti+R8cggAwkdz4oXhtO4/K9JWlh+8hIfR2Q==", + "dev": true, + "dependencies": { + "@babel/template": "^7.24.0", + "@babel/traverse": "^7.24.5", + "@babel/types": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.5.tgz", + "integrity": "sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.5", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/parser": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.5.tgz", + "integrity": "sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-proposal-class-properties": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", + "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-class-properties instead.", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-object-rest-spread": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz", + "integrity": "sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-object-rest-spread instead.", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.20.5", + "@babel/helper-compilation-targets": "^7.20.7", + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.20.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-flow": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.24.1.tgz", + "integrity": "sha512-sxi2kLTI5DeW5vDtMUsk4mTPwvlUDbjOnoWayhynCwrw4QXRld4QEYwqzY8JmQXaJUtgUuCIurtSRH5sn4c7mA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.1.tgz", + "integrity": "sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.1.tgz", + "integrity": "sha512-2eCtxZXf+kbkMIsXS4poTvT4Yu5rXiRa+9xGVT56raghjmBTKMpFNc9R4IDiB4emao9eO22Ox7CxuJG7BgExqA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.1.tgz", + "integrity": "sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.1.tgz", + "integrity": "sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.5.tgz", + "integrity": "sha512-sMfBc3OxghjC95BkYrYocHL3NaOplrcaunblzwXhGmlPwpmfsxr4vK+mBBt49r+S240vahmv+kUxkeKgs+haCw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.5.tgz", + "integrity": "sha512-gWkLP25DFj2dwe9Ck8uwMOpko4YsqyfZJrOmqqcegeDYEbp7rmn4U6UQZNj08UF6MaX39XenSpKRCvpDRBtZ7Q==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-plugin-utils": "^7.24.5", + "@babel/helper-replace-supers": "^7.24.1", + "@babel/helper-split-export-declaration": "^7.24.5", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.1.tgz", + "integrity": "sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/template": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.5.tgz", + "integrity": "sha512-SZuuLyfxvsm+Ah57I/i1HVjveBENYK9ue8MJ7qkc7ndoNjqquJiElzA7f5yaAXjyW2hKojosOTAQQRX50bPSVg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-flow-strip-types": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.24.1.tgz", + "integrity": "sha512-iIYPIWt3dUmUKKE10s3W+jsQ3icFkw0JyRVyY1B7G4yK/nngAOHLVx8xlhA6b/Jzl/Y0nis8gjqhqKtRDQqHWQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-flow": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.1.tgz", + "integrity": "sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.1.tgz", + "integrity": "sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.1.tgz", + "integrity": "sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.1.tgz", + "integrity": "sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.1.tgz", + "integrity": "sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-simple-access": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.1.tgz", + "integrity": "sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-replace-supers": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.5.tgz", + "integrity": "sha512-9Co00MqZ2aoky+4j2jhofErthm6QVLKbpQrvz20c3CH9KQCLHyNB+t2ya4/UrRpQGR+Wrwjg9foopoeSdnHOkA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.1.tgz", + "integrity": "sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-display-name": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.1.tgz", + "integrity": "sha512-mvoQg2f9p2qlpDQRBC7M3c3XTr0k7cp/0+kFKKO/7Gtu0LSw16eKB+Fabe2bDT/UpsyasTBBkAnbdsLrkD5XMw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx": { + "version": "7.23.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.23.4.tgz", + "integrity": "sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-module-imports": "^7.22.15", + "@babel/helper-plugin-utils": "^7.22.5", + "@babel/plugin-syntax-jsx": "^7.23.3", + "@babel/types": "^7.23.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.1.tgz", + "integrity": "sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.1.tgz", + "integrity": "sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.1.tgz", + "integrity": "sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.24.5", + "license": "MIT", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", + "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.23.5", + "@babel/parser": "^7.24.0", + "@babel/types": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.5.tgz", + "integrity": "sha512-7aaBLeDQ4zYcUFDUD41lJc1fG8+5IU9DaNSJAgal866FGvmD5EbWQgnEC6kO1gGLsX0esNkfnJSndbTXA3r7UA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.2", + "@babel/generator": "^7.24.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.24.5", + "@babel/parser": "^7.24.5", + "@babel/types": "^7.24.5", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.24.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.5.tgz", + "integrity": "sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.24.1", + "@babel/helper-validator-identifier": "^7.24.5", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@ctrl/tinycolor": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.6.1.tgz", + "integrity": "sha512-SITSV6aIXsuVNV3f3O0f2n/cgyEDWoSqtZMYiAmcsYHydcKrOz3gUxB/iXd/Qf08+IZX4KpgNbvUdMBmWz+kcA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/@emotion/hash": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz", + "integrity": "sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==" + }, + "node_modules/@emotion/unitless": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", + "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@graphql-codegen/add": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/add/-/add-5.0.2.tgz", + "integrity": "sha512-ouBkSvMFUhda5VoKumo/ZvsZM9P5ZTyDsI8LW18VxSNWOjrTeLXBWHG8Gfaai0HwhflPtCYVABbriEcOmrRShQ==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/cli": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-5.0.2.tgz", + "integrity": "sha512-MBIaFqDiLKuO4ojN6xxG9/xL9wmfD3ZjZ7RsPjwQnSHBCUXnEkdKvX+JVpx87Pq29Ycn8wTJUguXnTZ7Di0Mlw==", + "dev": true, + "dependencies": { + "@babel/generator": "^7.18.13", + "@babel/template": "^7.18.10", + "@babel/types": "^7.18.13", + "@graphql-codegen/client-preset": "^4.2.2", + "@graphql-codegen/core": "^4.0.2", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/apollo-engine-loader": "^8.0.0", + "@graphql-tools/code-file-loader": "^8.0.0", + "@graphql-tools/git-loader": "^8.0.0", + "@graphql-tools/github-loader": "^8.0.0", + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/prisma-loader": "^8.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", + "@whatwg-node/fetch": "^0.8.0", + "chalk": "^4.1.0", + "cosmiconfig": "^8.1.3", + "debounce": "^1.2.0", + "detect-indent": "^6.0.0", + "graphql-config": "^5.0.2", + "inquirer": "^8.0.0", + "is-glob": "^4.0.1", + "jiti": "^1.17.1", + "json-to-pretty-yaml": "^1.2.2", + "listr2": "^4.0.5", + "log-symbols": "^4.0.0", + "micromatch": "^4.0.5", + "shell-quote": "^1.7.3", + "string-env-interpolation": "^1.0.1", + "ts-log": "^2.2.3", + "tslib": "^2.4.0", + "yaml": "^2.3.1", + "yargs": "^17.0.0" + }, + "bin": { + "gql-gen": "cjs/bin.js", + "graphql-code-generator": "cjs/bin.js", + "graphql-codegen": "cjs/bin.js", + "graphql-codegen-esm": "esm/bin.js" + }, + "peerDependencies": { + "@parcel/watcher": "^2.1.0", + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "peerDependenciesMeta": { + "@parcel/watcher": { + "optional": true + } + } + }, + "node_modules/@graphql-codegen/client-preset": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/@graphql-codegen/client-preset/-/client-preset-4.2.5.tgz", + "integrity": "sha512-hAdB6HN8EDmkoBtr0bPUN/7NH6svzqbcTDMWBCRXPESXkl7y80po+IXrXUjsSrvhKG8xkNXgJNz/2mjwHzywcA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/template": "^7.20.7", + "@graphql-codegen/add": "^5.0.2", + "@graphql-codegen/gql-tag-operations": "4.0.6", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typed-document-node": "^5.0.6", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/typescript-operations": "^4.2.0", + "@graphql-codegen/visitor-plugin-common": "^5.1.0", + "@graphql-tools/documents": "^1.0.0", + "@graphql-tools/utils": "^10.0.0", + "@graphql-typed-document-node/core": "3.2.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/core": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-4.0.2.tgz", + "integrity": "sha512-IZbpkhwVqgizcjNiaVzNAzm/xbWT6YnGgeOLwVjm4KbJn3V2jchVtuzHH09G5/WkkLSk2wgbXNdwjM41JxO6Eg==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/schema": "^10.0.0", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/gql-tag-operations": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/gql-tag-operations/-/gql-tag-operations-4.0.6.tgz", + "integrity": "sha512-y6iXEDpDNjwNxJw3WZqX1/Znj0QHW7+y8O+t2V8qvbTT+3kb2lr9ntc8By7vCr6ctw9tXI4XKaJgpTstJDOwFA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/plugin-helpers": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.3.tgz", + "integrity": "sha512-yZ1rpULIWKBZqCDlvGIJRSyj1B2utkEdGmXZTBT/GVayP4hyRYlkd36AJV/LfEsVD8dnsKL5rLz2VTYmRNlJ5Q==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.0", + "change-case-all": "1.0.15", + "common-tags": "1.8.2", + "import-from": "4.0.0", + "lodash": "~4.17.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/schema-ast": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-4.0.2.tgz", + "integrity": "sha512-5mVAOQQK3Oz7EtMl/l3vOQdc2aYClUzVDHHkMvZlunc+KlGgl81j8TLa+X7ANIllqU4fUEsQU3lJmk4hXP6K7Q==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typed-document-node": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typed-document-node/-/typed-document-node-5.0.6.tgz", + "integrity": "sha512-US0J95hOE2/W/h42w4oiY+DFKG7IetEN1mQMgXXeat1w6FAR5PlIz4JrRrEkiVfVetZ1g7K78SOwBD8/IJnDiA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typescript": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-4.0.6.tgz", + "integrity": "sha512-IBG4N+Blv7KAL27bseruIoLTjORFCT3r+QYyMC3g11uY3/9TPpaUyjSdF70yBe5GIQ6dAgDU+ENUC1v7EPi0rw==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/schema-ast": "^4.0.2", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/typescript-operations": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript-operations/-/typescript-operations-4.2.0.tgz", + "integrity": "sha512-lmuwYb03XC7LNRS8oo9M4/vlOrq/wOKmTLBHlltK2YJ1BO/4K/Q9Jdv/jDmJpNydHVR1fmeF4wAfsIp1f9JibA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-codegen/visitor-plugin-common": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-5.1.0.tgz", + "integrity": "sha512-eamQxtA9bjJqI2lU5eYoA1GbdMIRT2X8m8vhWYsVQVWD3qM7sx/IqJU0kx0J3Vd4/CSd36BzL6RKwksibytDIg==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/optimize": "^2.0.0", + "@graphql-tools/relay-operation-optimizer": "^7.0.0", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "dependency-graph": "^0.11.0", + "graphql-tag": "^2.11.0", + "parse-filepath": "^1.0.2", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.1.tgz", + "integrity": "sha512-NaPeVjtrfbPXcl+MLQCJLWtqe2/E4bbAqcauEOQ+3sizw1Fc2CNmhHRF8a6W4D0ekvTRRXAMptXYgA2uConbrA==", + "dev": true, + "dependencies": { + "@ardatan/sync-fetch": "^0.0.1", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.11.tgz", + "integrity": "sha512-LS8tSomZa3YHnntpWt3PP43iFEEl6YeIsvDakczHBKlay5LdkXFr8w7v8H6akpG5nRrzydyB0k1iE2eoL6aKIQ==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/batch-execute": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-9.0.4.tgz", + "integrity": "sha512-kkebDLXgDrep5Y0gK1RN3DMUlLqNhg60OAz0lTCqrYeja6DshxLtLkj+zV4mVbBA4mQOEoBmw6g1LZs3dA84/w==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "dataloader": "^2.2.2", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/code-file-loader": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-8.1.1.tgz", + "integrity": "sha512-q4KN25EPSUztc8rA8YUU3ufh721Yk12xXDbtUA+YstczWS7a1RJlghYMFEfR1HsHSYbF7cUqkbnTKSGM3o52bQ==", + "dev": true, + "dependencies": { + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/delegate": { + "version": "10.0.9", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-10.0.9.tgz", + "integrity": "sha512-H+jGPLB0X23wlslw1JuB3y5j35NwZLUGhmjgaLYKkquAI/rtcs4+UwoW3hZ4SCN7h2LAKDa6HhsYYCRXyhdePA==", + "dev": true, + "dependencies": { + "@graphql-tools/batch-execute": "^9.0.4", + "@graphql-tools/executor": "^1.2.1", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", + "dataloader": "^2.2.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/documents": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/documents/-/documents-1.0.0.tgz", + "integrity": "sha512-rHGjX1vg/nZ2DKqRGfDPNC55CWZBMldEVcH+91BThRa6JeT80NqXknffLLEZLRUxyikCfkwMsk6xR3UNMqG0Rg==", + "dev": true, + "dependencies": { + "lodash.sortby": "^4.7.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-1.2.6.tgz", + "integrity": "sha512-+1kjfqzM5T2R+dCw7F4vdJ3CqG+fY/LYJyhNiWEFtq0ToLwYzR/KKyD8YuzTirEjSxWTVlcBh7endkx5n5F6ew==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.1.1", + "@graphql-typed-document-node/core": "3.2.0", + "@repeaterjs/repeater": "^3.0.4", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-graphql-ws": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.2.tgz", + "integrity": "sha512-+9ZK0rychTH1LUv4iZqJ4ESbmULJMTsv3XlFooPUngpxZkk00q6LqHKJRrsLErmQrVaC7cwQCaRBJa0teK17Lg==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "@types/ws": "^8.0.0", + "graphql-ws": "^5.14.0", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.4.0", + "ws": "^8.13.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-http": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-1.0.9.tgz", + "integrity": "sha512-+NXaZd2MWbbrWHqU4EhXcrDbogeiCDmEbrAN+rMn4Nu2okDjn2MTFDbTIab87oEubQCH4Te1wDkWPKrzXup7+Q==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "@repeaterjs/repeater": "^3.0.4", + "@whatwg-node/fetch": "^0.9.0", + "extract-files": "^11.0.0", + "meros": "^1.2.1", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.11.tgz", + "integrity": "sha512-LS8tSomZa3YHnntpWt3PP43iFEEl6YeIsvDakczHBKlay5LdkXFr8w7v8H6akpG5nRrzydyB0k1iE2eoL6aKIQ==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/executor-legacy-ws": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.6.tgz", + "integrity": "sha512-lDSxz9VyyquOrvSuCCnld3256Hmd+QI2lkmkEv7d4mdzkxkK4ddAWW1geQiWrQvWmdsmcnGGlZ7gDGbhEExwqg==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "@types/ws": "^8.0.0", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.4.0", + "ws": "^8.15.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/git-loader": { + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-8.0.5.tgz", + "integrity": "sha512-P97/1mhruDiA6D5WUmx3n/aeGPLWj2+4dpzDOxFGGU+z9NcI/JdygMkeFpGZNHeJfw+kHfxgPcMPnxHcyhAoVA==", + "dev": true, + "dependencies": { + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", + "is-glob": "4.0.3", + "micromatch": "^4.0.4", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/github-loader": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-8.0.1.tgz", + "integrity": "sha512-W4dFLQJ5GtKGltvh/u1apWRFKBQOsDzFxO9cJkOYZj1VzHCpRF43uLST4VbCfWve+AwBqOuKr7YgkHoxpRMkcg==", + "dev": true, + "dependencies": { + "@ardatan/sync-fetch": "^0.0.1", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/graphql-tag-pluck": "^8.0.0", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.11.tgz", + "integrity": "sha512-LS8tSomZa3YHnntpWt3PP43iFEEl6YeIsvDakczHBKlay5LdkXFr8w7v8H6akpG5nRrzydyB0k1iE2eoL6aKIQ==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/graphql-file-loader": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.1.tgz", + "integrity": "sha512-7gswMqWBabTSmqbaNyWSmRRpStWlcCkBc73E6NZNlh4YNuiyKOwbvSkOUYFOqFMfEL+cFsXgAvr87Vz4XrYSbA==", + "dev": true, + "dependencies": { + "@graphql-tools/import": "7.0.1", + "@graphql-tools/utils": "^10.0.13", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/graphql-tag-pluck": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.3.0.tgz", + "integrity": "sha512-gNqukC+s7iHC7vQZmx1SEJQmLnOguBq+aqE2zV2+o1hxkExvKqyFli1SY/9gmukFIKpKutCIj+8yLOM+jARutw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.22.9", + "@babel/parser": "^7.16.8", + "@babel/plugin-syntax-import-assertions": "^7.20.0", + "@babel/traverse": "^7.16.8", + "@babel/types": "^7.16.8", + "@graphql-tools/utils": "^10.0.13", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/import": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-7.0.1.tgz", + "integrity": "sha512-935uAjAS8UAeXThqHfYVr4HEAp6nHJ2sximZKO1RzUTq5WoALMAhhGARl0+ecm6X+cqNUwIChJbjtaa6P/ML0w==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "resolve-from": "5.0.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/import/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@graphql-tools/json-file-loader": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-8.0.1.tgz", + "integrity": "sha512-lAy2VqxDAHjVyqeJonCP6TUemrpYdDuKt25a10X6zY2Yn3iFYGnuIDQ64cv3ytyGY6KPyPB+Kp+ZfOkNDG3FQA==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "globby": "^11.0.3", + "tslib": "^2.4.0", + "unixify": "^1.0.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/load": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-8.0.2.tgz", + "integrity": "sha512-S+E/cmyVmJ3CuCNfDuNF2EyovTwdWfQScXv/2gmvJOti2rGD8jTt9GYVzXaxhblLivQR9sBUCNZu/w7j7aXUCA==", + "dev": true, + "dependencies": { + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", + "p-limit": "3.1.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/merge": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-9.0.4.tgz", + "integrity": "sha512-MivbDLUQ+4Q8G/Hp/9V72hbn810IJDEZQ57F01sHnlrrijyadibfVhaQfW/pNH+9T/l8ySZpaR/DpL5i+ruZ+g==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/optimize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-2.0.0.tgz", + "integrity": "sha512-nhdT+CRGDZ+bk68ic+Jw1OZ99YCDIKYA5AlVAnBHJvMawSx9YQqQAIj4refNc1/LRieGiuWvhbG3jvPVYho0Dg==", + "dev": true, + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/prisma-loader/-/prisma-loader-8.0.4.tgz", + "integrity": "sha512-hqKPlw8bOu/GRqtYr0+dINAI13HinTVYBDqhwGAPIFmLr5s+qKskzgCiwbsckdrb5LWVFmVZc+UXn80OGiyBzg==", + "dev": true, + "dependencies": { + "@graphql-tools/url-loader": "^8.0.2", + "@graphql-tools/utils": "^10.0.13", + "@types/js-yaml": "^4.0.0", + "@whatwg-node/fetch": "^0.9.0", + "chalk": "^4.1.0", + "debug": "^4.3.1", + "dotenv": "^16.0.0", + "graphql-request": "^6.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "jose": "^5.0.0", + "js-yaml": "^4.0.0", + "lodash": "^4.17.20", + "scuid": "^1.1.0", + "tslib": "^2.4.0", + "yaml-ast-parser": "^0.0.43" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.11.tgz", + "integrity": "sha512-LS8tSomZa3YHnntpWt3PP43iFEEl6YeIsvDakczHBKlay5LdkXFr8w7v8H6akpG5nRrzydyB0k1iE2eoL6aKIQ==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/relay-operation-optimizer": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-7.0.1.tgz", + "integrity": "sha512-y0ZrQ/iyqWZlsS/xrJfSir3TbVYJTYmMOu4TaSz6F4FRDTQ3ie43BlKkhf04rC28pnUOS4BO9pDcAo1D30l5+A==", + "dev": true, + "dependencies": { + "@ardatan/relay-compiler": "12.0.0", + "@graphql-tools/utils": "^10.0.13", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/schema": { + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-10.0.3.tgz", + "integrity": "sha512-p28Oh9EcOna6i0yLaCFOnkcBDQECVf3SCexT6ktb86QNj9idnkhI+tCxnwZDh58Qvjd2nURdkbevvoZkvxzCog==", + "dev": true, + "dependencies": { + "@graphql-tools/merge": "^9.0.3", + "@graphql-tools/utils": "^10.0.13", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/url-loader": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-8.0.2.tgz", + "integrity": "sha512-1dKp2K8UuFn7DFo1qX5c1cyazQv2h2ICwA9esHblEqCYrgf69Nk8N7SODmsfWg94OEaI74IqMoM12t7eIGwFzQ==", + "dev": true, + "dependencies": { + "@ardatan/sync-fetch": "^0.0.1", + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/executor-graphql-ws": "^1.1.2", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/executor-legacy-ws": "^1.0.6", + "@graphql-tools/utils": "^10.0.13", + "@graphql-tools/wrap": "^10.0.2", + "@types/ws": "^8.0.0", + "@whatwg-node/fetch": "^0.9.0", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.11", + "ws": "^8.12.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.11.tgz", + "integrity": "sha512-LS8tSomZa3YHnntpWt3PP43iFEEl6YeIsvDakczHBKlay5LdkXFr8w7v8H6akpG5nRrzydyB0k1iE2eoL6aKIQ==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/utils": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-10.2.0.tgz", + "integrity": "sha512-HYV7dO6pNA2nGKawygaBpk8y+vXOUjjzzO43W/Kb7EPRmXUEQKjHxPYRvQbiF72u1N3XxwGK5jnnFk9WVhUwYw==", + "dev": true, + "dependencies": { + "@graphql-typed-document-node/core": "^3.1.1", + "cross-inspect": "1.0.0", + "dset": "^3.1.2", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/wrap": { + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-10.0.5.tgz", + "integrity": "sha512-Cbr5aYjr3HkwdPvetZp1cpDWTGdD1Owgsb3z/ClzhmrboiK86EnQDxDvOJiQkDCPWE9lNBwj8Y4HfxroY0D9DQ==", + "dev": true, + "dependencies": { + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.1.1", + "tslib": "^2.4.0", + "value-or-promise": "^1.0.12" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-typed-document-node/core": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@graphql-typed-document-node/core/-/core-3.2.0.tgz", + "integrity": "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==", + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.14", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@kamilkisiela/fast-url-parser": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@kamilkisiela/fast-url-parser/-/fast-url-parser-1.1.4.tgz", + "integrity": "sha512-gbkePEBupNydxCelHCESvFSFM8XPh1Zs/OAVRW/rKpEqPAl5PbOM90Si8mv9bvnR53uPD2s/FiRxdvSejpRJew==", + "dev": true + }, + "node_modules/@next/env": { + "version": "14.2.3", + "license": "MIT" + }, + "node_modules/@next/eslint-plugin-next": { + "version": "14.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "glob": "10.3.10" + } + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "14.2.3", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.3.tgz", + "integrity": "sha512-6adp7waE6P1TYFSXpY366xwsOnEXM+y1kgRpjSRVI2CBDOcbRjsJ67Z6EgKIqWIue52d2q/Mx8g9MszARj8IEA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.3.tgz", + "integrity": "sha512-cuzCE/1G0ZSnTAHJPUT1rPgQx1w5tzSX7POXSLaS7w2nIUJUD+e25QoXD/hMfxbsT9rslEXugWypJMILBj/QsA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.3.tgz", + "integrity": "sha512-0D4/oMM2Y9Ta3nGuCcQN8jjJjmDPYpHX9OJzqk42NZGJocU2MqhBq5tWkJrUQOQY9N+In9xOdymzapM09GeiZw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.3.tgz", + "integrity": "sha512-ENPiNnBNDInBLyUU5ii8PMQh+4XLr4pG51tOp6aJ9xqFQ2iRI6IH0Ds2yJkAzNV1CfyagcyzPfROMViS2wOZ9w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.3.tgz", + "integrity": "sha512-BTAbq0LnCbF5MtoM7I/9UeUu/8ZBY0i8SFjUMCbPDOLv+un67e2JgyN4pmgfXBwy/I+RHu8q+k+MCkDN6P9ViQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.3.tgz", + "integrity": "sha512-AEHIw/dhAMLNFJFJIJIyOFDzrzI5bAjI9J26gbO5xhAKHYTZ9Or04BesFPXiAYXDNdrwTP2dQceYA4dL1geu8A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-ia32-msvc": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.3.tgz", + "integrity": "sha512-vga40n1q6aYb0CLrM+eEmisfKCR45ixQYXuBXxOOmmoV8sYST9k7E3US32FsY+CkkF7NtzdcebiFT4CHuMSyZw==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.3.tgz", + "integrity": "sha512-Q1/zm43RWynxrO7lW4ehciQVj+5ePBhOK+/K2P7pLFX3JaJ/IZVC69SHidrmZSOkqz7ECIOhhy7XhAFG4JYyHA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@peculiar/asn1-schema": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz", + "integrity": "sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA==", + "dev": true, + "dependencies": { + "asn1js": "^3.0.5", + "pvtsutils": "^1.3.5", + "tslib": "^2.6.2" + } + }, + "node_modules/@peculiar/json-schema": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@peculiar/json-schema/-/json-schema-1.1.12.tgz", + "integrity": "sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w==", + "dev": true, + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@peculiar/webcrypto": { + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.6.tgz", + "integrity": "sha512-YBcMfqNSwn3SujUJvAaySy5tlYbYm6tVt9SKoXu8BaTdKGROiJDgPR3TXpZdAKUfklzm3lRapJEAltiMQtBgZg==", + "dev": true, + "dependencies": { + "@peculiar/asn1-schema": "^2.3.8", + "@peculiar/json-schema": "^1.1.12", + "pvtsutils": "^1.3.5", + "tslib": "^2.6.2", + "webcrypto-core": "^1.7.9" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@rc-component/async-validator": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@rc-component/async-validator/-/async-validator-5.0.2.tgz", + "integrity": "sha512-ukdSere8wCNa5MpymNOYvOuHDODHlwBeaYEm0TtG91M7i72T7/cOVZhmDg9QcNe5+VP1YHJQEvquqA1bN5pTsg==", + "dependencies": { + "@babel/runtime": "^7.24.4" + }, + "engines": { + "node": ">=14.x" + } + }, + "node_modules/@rc-component/color-picker": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@rc-component/color-picker/-/color-picker-1.5.3.tgz", + "integrity": "sha512-+tGGH3nLmYXTalVe0L8hSZNs73VTP5ueSHwUlDC77KKRaN7G4DS4wcpG5DTDzdcV/Yas+rzA6UGgIyzd8fS4cw==", + "dependencies": { + "@babel/runtime": "^7.23.6", + "@ctrl/tinycolor": "^3.6.1", + "classnames": "^2.2.6", + "rc-util": "^5.38.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/context": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@rc-component/context/-/context-1.4.0.tgz", + "integrity": "sha512-kFcNxg9oLRMoL3qki0OMxK+7g5mypjgaaJp/pkOis/6rVxma9nJBF/8kCIuTYHUQNr0ii7MxqE33wirPZLJQ2w==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/mini-decimal": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mini-decimal/-/mini-decimal-1.1.0.tgz", + "integrity": "sha512-jS4E7T9Li2GuYwI6PyiVXmxTiM6b07rlD9Ge8uGZSCz3WlzcG5ZK7g5bbuKNeZ9pgUuPK/5guV781ujdVpm4HQ==", + "dependencies": { + "@babel/runtime": "^7.18.0" + }, + "engines": { + "node": ">=8.x" + } + }, + "node_modules/@rc-component/mutate-observer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rc-component/mutate-observer/-/mutate-observer-1.1.0.tgz", + "integrity": "sha512-QjrOsDXQusNwGZPf4/qRQasg7UFEj06XiCJ8iuiq/Io7CrHrgVi6Uuetw60WAMG1799v+aM8kyc+1L/GBbHSlw==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/portal": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@rc-component/portal/-/portal-1.1.2.tgz", + "integrity": "sha512-6f813C0IsasTZms08kfA8kPAGxbbkYToa8ALaiDIGGECU4i9hj8Plgbx0sNJDrey3EtHO30hmdaxtT0138xZcg==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/tour": { + "version": "1.14.2", + "resolved": "https://registry.npmjs.org/@rc-component/tour/-/tour-1.14.2.tgz", + "integrity": "sha512-A75DZ8LVvahBIvxooj3Gvf2sxe+CGOkmzPNX7ek0i0AJHyKZ1HXe5ieIGo3m0FMdZfVOlbCJ952Duq8VKAHk6g==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "@rc-component/portal": "^1.0.0-9", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.2", + "rc-util": "^5.24.4" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@rc-component/trigger": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@rc-component/trigger/-/trigger-2.1.1.tgz", + "integrity": "sha512-UjHkedkgtEcgQu87w1VuWug1idoDJV7VUt0swxHXRcmei2uu1AuUzGBPEUlmOmXGJ+YtTgZfVLi7kuAUKoZTMA==", + "dependencies": { + "@babel/runtime": "^7.23.2", + "@rc-component/portal": "^1.1.0", + "classnames": "^2.3.2", + "rc-motion": "^2.0.0", + "rc-resize-observer": "^1.3.1", + "rc-util": "^5.38.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@repeaterjs/repeater": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@repeaterjs/repeater/-/repeater-3.0.5.tgz", + "integrity": "sha512-l3YHBLAol6d/IKnB9LhpD0cEZWAoe3eFKUyTYWmFmCO2Q/WOckxLQAUyMZWwZV2M/m3+4vgRoaolFqaII82/TA==", + "dev": true + }, + "node_modules/@rushstack/eslint-patch": { + "version": "1.10.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "license": "Apache-2.0" + }, + "node_modules/@swc/helpers": { + "version": "0.5.5", + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3", + "tslib": "^2.4.0" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", + "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-shape": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.6.tgz", + "integrity": "sha512-5KKk5aKGu2I+O6SONMYSNflgiP0WfZIQvVUMan50wHsLG1G94JlxEVnCpQARfTtzytuY0p/9PXXZb3I7giofIA==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/js-yaml": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", + "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", + "dev": true + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/lodash": { + "version": "4.17.4", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.4.tgz", + "integrity": "sha512-wYCP26ZLxaT3R39kiN2+HcJ4kTd3U1waI/cY7ivWYqFP6pW3ZNpvi6Wd6PHZx7T/t8z0vlkXMg3QYLa7DZ/IJQ==", + "dev": true + }, + "node_modules/@types/node": { + "version": "20.12.8", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/prop-types": { + "version": "15.7.12", + "devOptional": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.1", + "devOptional": true, + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "dev": true + }, + "node_modules/@types/ws": { + "version": "8.5.10", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", + "integrity": "sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "7.2.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/scope-manager": "7.2.0", + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/typescript-estree": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "7.2.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "@typescript-eslint/visitor-keys": "7.2.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/lru-cache": { + "version": "6.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.6.0", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "7.2.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "dev": true, + "license": "ISC" + }, + "node_modules/@whatwg-node/events": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.0.3.tgz", + "integrity": "sha512-IqnKIDWfXBJkvy/k6tzskWTc2NK3LcqHlb+KHGCrjOCH4jfQckRX0NAiIcC/vIqQkzLYw2r2CTSwAxcrtcD6lA==", + "dev": true + }, + "node_modules/@whatwg-node/fetch": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.8.8.tgz", + "integrity": "sha512-CdcjGC2vdKhc13KKxgsc6/616BQ7ooDIgPeTuAiE8qfCnS0mGzcfCOoZXypQSz73nxI+GWc7ZReIAVhxoE1KCg==", + "dev": true, + "dependencies": { + "@peculiar/webcrypto": "^1.4.0", + "@whatwg-node/node-fetch": "^0.3.6", + "busboy": "^1.6.0", + "urlpattern-polyfill": "^8.0.0", + "web-streams-polyfill": "^3.2.1" + } + }, + "node_modules/@whatwg-node/node-fetch": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.3.6.tgz", + "integrity": "sha512-w9wKgDO4C95qnXZRwZTfCmLWqyRnooGjcIwG0wADWjw9/HN0p7dtvtgSvItZtUyNteEvgTrd8QojNEqV6DAGTA==", + "dev": true, + "dependencies": { + "@whatwg-node/events": "^0.0.3", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "fast-url-parser": "^1.1.3", + "tslib": "^2.3.1" + } + }, + "node_modules/@wry/caches": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@wry/caches/-/caches-1.0.1.tgz", + "integrity": "sha512-bXuaUNLVVkD20wcGBWRyo7j9N3TxePEWFZj2Y+r9OoUzfqmavM84+mFykRicNsBqatba5JLay1t48wxaXaWnlA==", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/context": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@wry/context/-/context-0.7.4.tgz", + "integrity": "sha512-jmT7Sb4ZQWI5iyu3lobQxICu2nC/vbUhP0vIdd6tHC9PTfenmRmuIFqktc6GH9cgi+ZHnsLWPvfSvc4DrYmKiQ==", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/equality": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@wry/equality/-/equality-0.5.7.tgz", + "integrity": "sha512-BRFORjsTuQv5gxcXsuDXx6oGRhuVsEGwZy6LOzRRfgu+eSfxbhUQ9L9YtSEIuIjY/o7g3iWFjrc5eSY1GXP2Dw==", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@wry/trie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@wry/trie/-/trie-0.5.0.tgz", + "integrity": "sha512-FNoYzHawTMk/6KMQoEG5O4PuioX19UbwdQKF44yw0nLfOypfQdjtfZzo/UIJWAJ23sNIFbD1Ug9lbaDGMwbqQA==", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/acorn": { + "version": "8.11.3", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi_up": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/ansi_up/-/ansi_up-6.0.2.tgz", + "integrity": "sha512-3G3vKvl1ilEp7J1u6BmULpMA0xVoW/f4Ekqhl8RTrJrhEBkonKn5k3bUc5Xt+qDayA6iDX0jyUh3AbZjB/l0tw==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/antd": { + "version": "5.17.0", + "resolved": "https://registry.npmjs.org/antd/-/antd-5.17.0.tgz", + "integrity": "sha512-jrzMIcaTJIy12/GJ2PfgchgZGuAlDodlaOKd05/TxEtFilRHnv8oaf0qfqNGG3slvvuy4J/57xn21jM4cLl7Hw==", + "dependencies": { + "@ant-design/colors": "^7.0.2", + "@ant-design/cssinjs": "^1.19.1", + "@ant-design/icons": "^5.3.6", + "@ant-design/react-slick": "~1.1.2", + "@babel/runtime": "^7.24.5", + "@ctrl/tinycolor": "^3.6.1", + "@rc-component/color-picker": "~1.5.3", + "@rc-component/mutate-observer": "^1.1.0", + "@rc-component/tour": "~1.14.2", + "@rc-component/trigger": "^2.1.1", + "classnames": "^2.5.1", + "copy-to-clipboard": "^3.3.3", + "dayjs": "^1.11.10", + "qrcode.react": "^3.1.0", + "rc-cascader": "~3.25.0", + "rc-checkbox": "~3.2.0", + "rc-collapse": "~3.7.3", + "rc-dialog": "~9.4.0", + "rc-drawer": "~7.1.0", + "rc-dropdown": "~4.2.0", + "rc-field-form": "~2.0.0", + "rc-image": "~7.6.0", + "rc-input": "~1.4.5", + "rc-input-number": "~9.0.0", + "rc-mentions": "~2.11.1", + "rc-menu": "~9.13.0", + "rc-motion": "^2.9.0", + "rc-notification": "~5.4.0", + "rc-pagination": "~4.0.4", + "rc-picker": "~4.5.0", + "rc-progress": "~4.0.0", + "rc-rate": "~2.12.0", + "rc-resize-observer": "^1.4.0", + "rc-segmented": "~2.3.0", + "rc-select": "~14.13.1", + "rc-slider": "~10.6.2", + "rc-steps": "~6.0.1", + "rc-switch": "~4.1.0", + "rc-table": "~7.45.5", + "rc-tabs": "~15.0.0 ", + "rc-textarea": "~1.6.3", + "rc-tooltip": "~6.2.0", + "rc-tree": "~5.8.5", + "rc-tree-select": "~5.20.0", + "rc-upload": "~4.5.2", + "rc-util": "^5.39.1", + "scroll-into-view-if-needed": "^3.1.0", + "throttle-debounce": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/ant-design" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/aria-query": { + "version": "5.3.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-tree-filter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-tree-filter/-/array-tree-filter-2.1.0.tgz", + "integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw==" + }, + "node_modules/array-union": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.toreversed": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "es-shim-unscopables": "^1.0.0" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.1.0", + "es-shim-unscopables": "^1.0.2" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", + "dev": true + }, + "node_modules/asn1js": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz", + "integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==", + "dev": true, + "dependencies": { + "pvtsutils": "^1.3.2", + "pvutils": "^1.1.3", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/ast-types-flow": { + "version": "0.0.8", + "dev": true, + "license": "MIT" + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/auto-bind": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/auto-bind/-/auto-bind-4.0.0.tgz", + "integrity": "sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/axe-core": { + "version": "4.7.0", + "dev": true, + "license": "MPL-2.0", + "engines": { + "node": ">=4" + } + }, + "node_modules/axobject-query": { + "version": "3.2.1", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/babel-plugin-syntax-trailing-function-commas": { + "version": "7.0.0-beta.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz", + "integrity": "sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ==", + "dev": true + }, + "node_modules/babel-preset-fbjs": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/babel-preset-fbjs/-/babel-preset-fbjs-3.4.0.tgz", + "integrity": "sha512-9ywCsCvo1ojrw0b+XYk7aFvTH6D9064t0RIL1rtMf3nsa02Xw41MS7sZw216Im35xj/UY0PDBQsa1brUDDF1Ow==", + "dev": true, + "dependencies": { + "@babel/plugin-proposal-class-properties": "^7.0.0", + "@babel/plugin-proposal-object-rest-spread": "^7.0.0", + "@babel/plugin-syntax-class-properties": "^7.0.0", + "@babel/plugin-syntax-flow": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.0.0", + "@babel/plugin-syntax-object-rest-spread": "^7.0.0", + "@babel/plugin-transform-arrow-functions": "^7.0.0", + "@babel/plugin-transform-block-scoped-functions": "^7.0.0", + "@babel/plugin-transform-block-scoping": "^7.0.0", + "@babel/plugin-transform-classes": "^7.0.0", + "@babel/plugin-transform-computed-properties": "^7.0.0", + "@babel/plugin-transform-destructuring": "^7.0.0", + "@babel/plugin-transform-flow-strip-types": "^7.0.0", + "@babel/plugin-transform-for-of": "^7.0.0", + "@babel/plugin-transform-function-name": "^7.0.0", + "@babel/plugin-transform-literals": "^7.0.0", + "@babel/plugin-transform-member-expression-literals": "^7.0.0", + "@babel/plugin-transform-modules-commonjs": "^7.0.0", + "@babel/plugin-transform-object-super": "^7.0.0", + "@babel/plugin-transform-parameters": "^7.0.0", + "@babel/plugin-transform-property-literals": "^7.0.0", + "@babel/plugin-transform-react-display-name": "^7.0.0", + "@babel/plugin-transform-react-jsx": "^7.0.0", + "@babel/plugin-transform-shorthand-properties": "^7.0.0", + "@babel/plugin-transform-spread": "^7.0.0", + "@babel/plugin-transform-template-literals": "^7.0.0", + "babel-plugin-syntax-trailing-function-commas": "^7.0.0-beta.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", + "node-releases": "^2.0.14", + "update-browserslist-db": "^1.0.13" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "dev": true, + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001615", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/capital-case": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/capital-case/-/capital-case-1.0.4.tgz", + "integrity": "sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/change-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-4.1.2.tgz", + "integrity": "sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==", + "dev": true, + "dependencies": { + "camel-case": "^4.1.2", + "capital-case": "^1.0.4", + "constant-case": "^3.0.4", + "dot-case": "^3.0.4", + "header-case": "^2.0.4", + "no-case": "^3.0.4", + "param-case": "^3.0.4", + "pascal-case": "^3.1.2", + "path-case": "^3.0.4", + "sentence-case": "^3.0.4", + "snake-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/change-case-all": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/change-case-all/-/change-case-all-1.0.15.tgz", + "integrity": "sha512-3+GIFhk3sNuvFAJKU46o26OdzudQlPNBCu1ZQi3cMeMHhty1bhDxu2WrEilVNYaGvqUtR1VSigFcJOiS13dRhQ==", + "dev": true, + "dependencies": { + "change-case": "^4.1.2", + "is-lower-case": "^2.0.2", + "is-upper-case": "^2.0.2", + "lower-case": "^2.0.2", + "lower-case-first": "^2.0.2", + "sponge-case": "^1.0.1", + "swap-case": "^2.0.2", + "title-case": "^3.0.3", + "upper-case": "^2.0.2", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-width": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/client-only": { + "version": "0.0.1", + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "dev": true + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/compute-scroll-into-view": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.1.0.tgz", + "integrity": "sha512-rj8l8pD4bJ1nx+dAkMhV1xB5RuZEyVysfxJqB1pRchh1KVvwOv9b7CGB8ZfjTImVv2oF+sYMUkMZq6Na5Ftmbg==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/constant-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-3.0.4.tgz", + "integrity": "sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case": "^2.0.2" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/copy-anything": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-3.0.5.tgz", + "integrity": "sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==", + "dependencies": { + "is-what": "^4.1.8" + }, + "engines": { + "node": ">=12.13" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/copy-to-clipboard": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", + "dependencies": { + "toggle-selection": "^1.0.6" + } + }, + "node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/countup.js": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/countup.js/-/countup.js-2.8.0.tgz", + "integrity": "sha512-f7xEhX0awl4NOElHulrl4XRfKoNH3rB+qfNSZZyjSZhaAoUk6elvhH+MNxMmlmuUJ2/QNTWPSA7U4mNtIAKljQ==", + "license": "MIT" + }, + "node_modules/cross-fetch": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.8.tgz", + "integrity": "sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==", + "dev": true, + "dependencies": { + "node-fetch": "^2.6.12" + } + }, + "node_modules/cross-inspect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/cross-inspect/-/cross-inspect-1.0.0.tgz", + "integrity": "sha512-4PFfn4b5ZN6FMNGSZlyb7wUhuN8wvj8t/VQHZdM4JsDcruGJ8L2kf9zao98QIrBPFCpdk27qst/AGTl7pL3ypQ==", + "dev": true, + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "license": "MIT" + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/damerau-levenshtein": { + "version": "1.0.8", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dataloader": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.2.tgz", + "integrity": "sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g==", + "dev": true + }, + "node_modules/dayjs": { + "version": "1.11.11", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.11.tgz", + "integrity": "sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==" + }, + "node_modules/debounce": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", + "integrity": "sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.4", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decimal.js-light": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", + "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", + "license": "MIT" + }, + "node_modules/deep-is": { + "version": "0.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "dev": true, + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dependency-graph": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-0.11.0.tgz", + "integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/detect-indent": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz", + "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "2.1.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, + "node_modules/dset": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/dset/-/dset-3.1.3.tgz", + "integrity": "sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.4.757", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.757.tgz", + "integrity": "sha512-jftDaCknYSSt/+KKeXzH3LX5E2CvRLm75P3Hj+J/dv3CL0qUYcOt13d5FN1NiL5IJbbhzHrb3BomeG2tkSlZmw==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/enhanced-resolve": { + "version": "5.16.0", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/env-cmd": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/env-cmd/-/env-cmd-10.1.0.tgz", + "integrity": "sha512-mMdWTT9XKN7yNth/6N6g2GuKuJTsKMDHlQFUDacb/heQRRWOTIZ42t1rMHnQu4jYxU1ajdTeJM+9eEETlqToMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "commander": "^4.0.0", + "cross-spawn": "^7.0.0" + }, + "bin": { + "env-cmd": "bin/env-cmd.js" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-abstract": { + "version": "1.23.3", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", + "object-keys": "^1.1.1", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-iterator-helpers": { + "version": "1.0.19", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.7", + "iterator.prototype": "^1.1.2", + "safe-array-concat": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.0" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.57.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-next": { + "version": "14.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@next/eslint-plugin-next": "14.2.3", + "@rushstack/eslint-patch": "^1.3.3", + "@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || 7.0.0 - 7.2.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^3.5.2", + "eslint-plugin-import": "^2.28.1", + "eslint-plugin-jsx-a11y": "^6.7.1", + "eslint-plugin-react": "^7.33.2", + "eslint-plugin-react-hooks": "^4.5.0 || 5.0.0-canary-7118f5dd7-20230705" + }, + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0", + "typescript": ">=3.3.1" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint-import-resolver-typescript": { + "version": "3.6.1", + "dev": true, + "license": "ISC", + "dependencies": { + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "eslint-module-utils": "^2.7.4", + "fast-glob": "^3.3.1", + "get-tsconfig": "^4.5.0", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.8.1", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils/node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint-plugin-import": { + "version": "2.29.1", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.8.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", + "semver": "^6.3.1", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import/node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint-plugin-jsx-a11y": { + "version": "6.8.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.23.2", + "aria-query": "^5.3.0", + "array-includes": "^3.1.7", + "array.prototype.flatmap": "^1.3.2", + "ast-types-flow": "^0.0.8", + "axe-core": "=4.7.0", + "axobject-query": "^3.2.1", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "es-iterator-helpers": "^1.0.15", + "hasown": "^2.0.0", + "jsx-ast-utils": "^3.3.5", + "language-tags": "^1.0.9", + "minimatch": "^3.1.2", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-jsx-a11y/node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint-plugin-react": { + "version": "7.34.1", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.7", + "array.prototype.findlast": "^1.2.4", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.toreversed": "^1.1.2", + "array.prototype.tosorted": "^1.1.3", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.0.17", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7", + "object.hasown": "^1.1.3", + "object.values": "^1.1.7", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.10" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "4.6.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/doctrine": { + "version": "3.0.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "license": "MIT" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/extract-files": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/extract-files/-/extract-files-11.0.0.tgz", + "integrity": "sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ==", + "dev": true, + "engines": { + "node": "^12.20 || >= 14.13" + }, + "funding": { + "url": "https://github.com/sponsors/jaydenseric" + } + }, + "node_modules/fast-decode-uri-component": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz", + "integrity": "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==", + "dev": true + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-equals": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz", + "integrity": "sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-querystring": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", + "dev": true, + "dependencies": { + "fast-decode-uri-component": "^1.0.1" + } + }, + "node_modules/fast-url-parser": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", + "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", + "dev": true, + "dependencies": { + "punycode": "^1.3.2" + } + }, + "node_modules/fast-url-parser/node_modules/punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.17.1", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fbjs": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.5.tgz", + "integrity": "sha512-ztsSx77JBtkuMrEypfhgc3cI0+0h+svqeie7xHbh1k/IKdcydnvadp/mUaGgjAOXQmQSxsqgaRhS3q9fy+1kxg==", + "dev": true, + "dependencies": { + "cross-fetch": "^3.1.5", + "fbjs-css-vars": "^1.0.0", + "loose-envify": "^1.0.0", + "object-assign": "^4.1.0", + "promise": "^7.1.1", + "setimmediate": "^1.0.5", + "ua-parser-js": "^1.0.35" + } + }, + "node_modules/fbjs-css-vars": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz", + "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==", + "dev": true + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "dev": true, + "license": "ISC" + }, + "node_modules/for-each": { + "version": "0.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/foreground-child": { + "version": "3.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/framer-motion": { + "version": "11.1.9", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.1.9.tgz", + "integrity": "sha512-flECDIPV4QDNcOrDafVFiIazp8X01HFpzc01eDKJsdNH/wrATcYydJSH9JbPWMS8UD5lZlw+J1sK8LG2kICgqw==", + "dependencies": { + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/function-bind": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.7.3", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "10.3.10", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.5", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "9.0.4", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/graphql": { + "version": "16.8.1", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", + "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", + "engines": { + "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" + } + }, + "node_modules/graphql-config": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-5.0.3.tgz", + "integrity": "sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ==", + "dev": true, + "dependencies": { + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/merge": "^9.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", + "cosmiconfig": "^8.1.0", + "jiti": "^1.18.2", + "minimatch": "^4.2.3", + "string-env-interpolation": "^1.0.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">= 16.0.0" + }, + "peerDependencies": { + "cosmiconfig-toml-loader": "^1.0.0", + "graphql": "^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "peerDependenciesMeta": { + "cosmiconfig-toml-loader": { + "optional": true + } + } + }, + "node_modules/graphql-config/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/graphql-config/node_modules/minimatch": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-4.2.3.tgz", + "integrity": "sha512-lIUdtK5hdofgCTu3aT0sOaHsYR37viUuIc0rwnnDXImbwFRcumyLMeZaM0t0I/fgxS6s6JMfu0rLD1Wz9pv1ng==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/graphql-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/graphql-request/-/graphql-request-6.1.0.tgz", + "integrity": "sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw==", + "dev": true, + "dependencies": { + "@graphql-typed-document-node/core": "^3.2.0", + "cross-fetch": "^3.1.5" + }, + "peerDependencies": { + "graphql": "14 - 16" + } + }, + "node_modules/graphql-tag": { + "version": "2.12.6", + "resolved": "https://registry.npmjs.org/graphql-tag/-/graphql-tag-2.12.6.tgz", + "integrity": "sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } + }, + "node_modules/graphql-ws": { + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.16.0.tgz", + "integrity": "sha512-Ju2RCU2dQMgSKtArPbEtsK5gNLnsQyTNIo/T7cZNp96niC1x0KdJNZV0TIoilceBPQwfb5itrGl8pkFeOUMl4A==", + "devOptional": true, + "workspaces": [ + "website" + ], + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "graphql": ">=0.11 <=16" + } + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/header-case": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/header-case/-/header-case-2.0.4.tgz", + "integrity": "sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==", + "dev": true, + "dependencies": { + "capital-case": "^1.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "dependencies": { + "react-is": "^16.7.0" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dev": true, + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/immutable": { + "version": "3.7.6", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.7.6.tgz", + "integrity": "sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-from/-/import-from-4.0.0.tgz", + "integrity": "sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ==", + "dev": true, + "engines": { + "node": ">=12.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "license": "ISC" + }, + "node_modules/inquirer": { + "version": "8.2.6", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.6.tgz", + "integrity": "sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^6.0.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/inquirer/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/inquirer/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/internal-slot": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/invariant": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "dev": true, + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/is-absolute": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz", + "integrity": "sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==", + "dev": true, + "dependencies": { + "is-relative": "^1.0.0", + "is-windows": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-async-function": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "dev": true, + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", + "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-2.0.2.tgz", + "integrity": "sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-relative": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", + "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", + "dev": true, + "dependencies": { + "is-unc-path": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.13", + "dev": true, + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-unc-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", + "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", + "dev": true, + "dependencies": { + "unc-path-regex": "^0.1.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-upper-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-upper-case/-/is-upper-case-2.0.2.tgz", + "integrity": "sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-what": { + "version": "4.1.16", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-4.1.16.tgz", + "integrity": "sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==", + "engines": { + "node": ">=12.13" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "dev": true, + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/isomorphic-ws": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", + "integrity": "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==", + "dev": true, + "peerDependencies": { + "ws": "*" + } + }, + "node_modules/iterator.prototype": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "get-intrinsic": "^1.2.1", + "has-symbols": "^1.0.3", + "reflect.getprototypeof": "^1.0.4", + "set-function-name": "^2.0.1" + } + }, + "node_modules/jackspeak": { + "version": "2.3.6", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jiti": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.0.tgz", + "integrity": "sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/jose": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/jose/-/jose-5.2.4.tgz", + "integrity": "sha512-6ScbIk2WWCeXkmzF6bRPmEuaqy1m8SbsRFMa/FLrSCkGIhj8OLVG/IH+XHVmNMx/KUo8cVWEE6oKR4dJ+S0Rkg==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/json-to-pretty-yaml": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/json-to-pretty-yaml/-/json-to-pretty-yaml-1.2.2.tgz", + "integrity": "sha512-rvm6hunfCcqegwYaG5T4yKJWxc9FXFgBVrcTZ4XfSVRwa5HA/Xs+vB/Eo9treYYHCeNM0nrSUr82V/M31Urc7A==", + "dev": true, + "dependencies": { + "remedial": "^1.0.7", + "remove-trailing-spaces": "^1.0.6" + }, + "engines": { + "node": ">= 0.2.0" + } + }, + "node_modules/json2mq": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/json2mq/-/json2mq-0.2.0.tgz", + "integrity": "sha512-SzoRg7ux5DWTII9J2qkrZrqV1gt+rTaoufMxEzXbS26Uid0NwaJd123HcoB80TgubEppxxIGdNxCx50fEoEWQA==", + "dependencies": { + "string-convert": "^0.2.0" + } + }, + "node_modules/json5": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.5", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/language-subtag-registry": { + "version": "0.3.22", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/language-tags": { + "version": "1.0.9", + "dev": true, + "license": "MIT", + "dependencies": { + "language-subtag-registry": "^0.3.20" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/linkify-html": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/linkify-html/-/linkify-html-4.1.3.tgz", + "integrity": "sha512-Ejb8X/pOxB4IVqG1U37tnF85UW3JtX+eHudH3zlZ2pODz2e/J7zQ/vj+VDWffwhTecJqdRehhluwrRmKoJz+iQ==", + "peerDependencies": { + "linkifyjs": "^4.0.0" + } + }, + "node_modules/linkifyjs": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/linkifyjs/-/linkifyjs-4.1.3.tgz", + "integrity": "sha512-auMesunaJ8yfkHvK4gfg1K0SaKX/6Wn9g2Aac/NwX+l5VdmFZzo/hdPGxEOETj+ryRa4/fiOPjeeKURSAJx1sg==", + "peer": true + }, + "node_modules/listr2": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-4.0.5.tgz", + "integrity": "sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA==", + "dev": true, + "dependencies": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.5", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "enquirer": ">= 2.3.0 < 3" + }, + "peerDependenciesMeta": { + "enquirer": { + "optional": true + } + } + }, + "node_modules/listr2/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/listr2/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/log-update/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lower-case-first": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case-first/-/lower-case-first-2.0.2.tgz", + "integrity": "sha512-EVm/rR94FJTZi3zefZ82fLWab+GX14LJN4HrWBcuo6Evmsl9hEfnqxgcHCKb9q+mNf6EVdsjx/qucYFIIB84pg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "dev": true, + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/meros": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/meros/-/meros-1.3.0.tgz", + "integrity": "sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w==", + "dev": true, + "engines": { + "node": ">=13" + }, + "peerDependencies": { + "@types/node": ">=13" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + } + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimatch/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.0.4", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.7", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "dev": true, + "license": "MIT" + }, + "node_modules/next": { + "version": "14.2.3", + "license": "MIT", + "dependencies": { + "@next/env": "14.2.3", + "@swc/helpers": "0.5.5", + "busboy": "1.6.0", + "caniuse-lite": "^1.0.30001579", + "graceful-fs": "^4.2.11", + "postcss": "8.4.31", + "styled-jsx": "5.1.1" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": ">=18.17.0" + }, + "optionalDependencies": { + "@next/swc-darwin-arm64": "14.2.3", + "@next/swc-darwin-x64": "14.2.3", + "@next/swc-linux-arm64-gnu": "14.2.3", + "@next/swc-linux-arm64-musl": "14.2.3", + "@next/swc-linux-x64-gnu": "14.2.3", + "@next/swc-linux-x64-musl": "14.2.3", + "@next/swc-win32-arm64-msvc": "14.2.3", + "@next/swc-win32-ia32-msvc": "14.2.3", + "@next/swc-win32-x64-msvc": "14.2.3" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0", + "@playwright/test": "^1.41.2", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@playwright/test": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dev": true, + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", + "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==", + "dev": true, + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nullthrows": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", + "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==", + "dev": true + }, + "node_modules/object-assign": { + "version": "4.1.1", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.hasown": { + "version": "1.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.values": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optimism": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/optimism/-/optimism-0.18.0.tgz", + "integrity": "sha512-tGn8+REwLRNFnb9WmcY5IfpOqeX2kpaYJ1s6Ae3mn12AeydLkR3j+jSCmVQFoXqU8D41PAJ1RG1rCRNWmNZVmQ==", + "dependencies": { + "@wry/caches": "^1.0.0", + "@wry/context": "^0.7.0", + "@wry/trie": "^0.4.3", + "tslib": "^2.3.0" + } + }, + "node_modules/optimism/node_modules/@wry/trie": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@wry/trie/-/trie-0.4.3.tgz", + "integrity": "sha512-I6bHwH0fSf6RqQcnnXLJKhkSXG45MFral3GxPaY4uAl0LYDZM+YDVDAiU9bYwjTuysy1S0IeecWtmq1SZA3M1w==", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", + "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", + "dev": true, + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/param-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", + "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", + "dev": true, + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-filepath": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/parse-filepath/-/parse-filepath-1.0.2.tgz", + "integrity": "sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q==", + "dev": true, + "dependencies": { + "is-absolute": "^1.0.0", + "map-cache": "^0.2.0", + "path-root": "^0.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/path-case/-/path-case-3.0.4.tgz", + "integrity": "sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==", + "dev": true, + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/path-root": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/path-root/-/path-root-0.1.1.tgz", + "integrity": "sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg==", + "dev": true, + "dependencies": { + "path-root-regex": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-root-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/path-root-regex/-/path-root-regex-0.1.2.tgz", + "integrity": "sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-scurry": { + "version": "1.10.2", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.4.31", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "dev": true, + "dependencies": { + "asap": "~2.0.3" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/pvtsutils": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz", + "integrity": "sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA==", + "dev": true, + "dependencies": { + "tslib": "^2.6.1" + } + }, + "node_modules/pvutils": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.3.tgz", + "integrity": "sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/qrcode.react": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/qrcode.react/-/qrcode.react-3.1.0.tgz", + "integrity": "sha512-oyF+Urr3oAMUG/OiOuONL3HXM+53wvuH3mtIWQrYmsXoAq0DkvZp2RYUWFSMFtbdOpuS++9v+WAkzNVkMlNW6Q==", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/rc-cascader": { + "version": "3.25.0", + "resolved": "https://registry.npmjs.org/rc-cascader/-/rc-cascader-3.25.0.tgz", + "integrity": "sha512-mBY6/CykOvzAYnIye0rpt5JkMAXJaX8zZawOwSndbKuFakYE+leqBQWIZoN9HIgAptPpTi2Aty3RvbaBmk8SKQ==", + "dependencies": { + "@babel/runtime": "^7.12.5", + "array-tree-filter": "^2.1.0", + "classnames": "^2.3.1", + "rc-select": "~14.13.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.37.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-checkbox": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/rc-checkbox/-/rc-checkbox-3.2.0.tgz", + "integrity": "sha512-8inzw4y9dAhZmv/Ydl59Qdy5tdp9CKg4oPVcRigi+ga/yKPZS5m5SyyQPtYSgbcqHRYOdUhiPSeKfktc76du1A==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.25.2" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-collapse": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/rc-collapse/-/rc-collapse-3.7.3.tgz", + "integrity": "sha512-60FJcdTRn0X5sELF18TANwtVi7FtModq649H11mYF1jh83DniMoM4MqY627sEKRCTm4+WXfGDcB7hY5oW6xhyw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.3.4", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-dialog": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/rc-dialog/-/rc-dialog-9.4.0.tgz", + "integrity": "sha512-AScCexaLACvf8KZRqCPz12BJ8olszXOS4lKlkMyzDQHS1m0zj1KZMYgmMCh39ee0Dcv8kyrj8mTqxuLyhH+QuQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/portal": "^1.0.0-8", + "classnames": "^2.2.6", + "rc-motion": "^2.3.0", + "rc-util": "^5.21.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-drawer": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/rc-drawer/-/rc-drawer-7.1.0.tgz", + "integrity": "sha512-nBE1rF5iZvpavoyqhSSz2mk/yANltA7g3aF0U45xkx381n3we/RKs9cJfNKp9mSWCedOKWt9FLEwZDaAaOGn2w==", + "dependencies": { + "@babel/runtime": "^7.23.9", + "@rc-component/portal": "^1.1.1", + "classnames": "^2.2.6", + "rc-motion": "^2.6.1", + "rc-util": "^5.38.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-dropdown": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/rc-dropdown/-/rc-dropdown-4.2.0.tgz", + "integrity": "sha512-odM8Ove+gSh0zU27DUj5cG1gNKg7mLWBYzB5E4nNLrLwBmYEgYP43vHKDGOVZcJSVElQBI0+jTQgjnq0NfLjng==", + "dependencies": { + "@babel/runtime": "^7.18.3", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-util": "^5.17.0" + }, + "peerDependencies": { + "react": ">=16.11.0", + "react-dom": ">=16.11.0" + } + }, + "node_modules/rc-field-form": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/rc-field-form/-/rc-field-form-2.0.0.tgz", + "integrity": "sha512-Xwp8SoSbWQOsz3s7IwEkDj/fF73Wa+vVPtP+gnaq6wVvPX8aPhx1vrHpFcgmiIOKdKIdezGLFsTyU3kNXn+IEA==", + "dependencies": { + "@babel/runtime": "^7.18.0", + "@rc-component/async-validator": "^5.0.1", + "rc-util": "^5.32.2" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-image": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/rc-image/-/rc-image-7.6.0.tgz", + "integrity": "sha512-tL3Rvd1sS+frZQ01i+tkeUPaOeFz2iG9/scAt/Cfs0hyCRVA/w0Pu1J/JxIX8blalvmHE0bZQRYdOmRAzWu4Hg==", + "dependencies": { + "@babel/runtime": "^7.11.2", + "@rc-component/portal": "^1.0.2", + "classnames": "^2.2.6", + "rc-dialog": "~9.4.0", + "rc-motion": "^2.6.2", + "rc-util": "^5.34.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-input": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/rc-input/-/rc-input-1.4.5.tgz", + "integrity": "sha512-AjzykhwnwYTRSwwgCu70CGKBIAv6bP2nqnFptnNTprph/TF1BAs0Qxl91mie/BR6n827WIJB6ZjaRf9iiMwAfw==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-util": "^5.18.1" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, + "node_modules/rc-input-number": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/rc-input-number/-/rc-input-number-9.0.0.tgz", + "integrity": "sha512-RfcDBDdWFFetouWFXBA+WPEC8LzBXyngr9b+yTLVIygfFu7HiLRGn/s/v9wwno94X7KFvnb28FNynMGj9XJlDQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/mini-decimal": "^1.0.1", + "classnames": "^2.2.5", + "rc-input": "~1.4.0", + "rc-util": "^5.28.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-mentions": { + "version": "2.11.1", + "resolved": "https://registry.npmjs.org/rc-mentions/-/rc-mentions-2.11.1.tgz", + "integrity": "sha512-upb4AK1SRFql7qGnbLEvJqLMugVVIyjmwBJW9L0eLoN9po4JmJZaBzmKA4089fNtsU8k6l/tdZiVafyooeKnLw==", + "dependencies": { + "@babel/runtime": "^7.22.5", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.6", + "rc-input": "~1.4.0", + "rc-menu": "~9.13.0", + "rc-textarea": "~1.6.1", + "rc-util": "^5.34.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-menu": { + "version": "9.13.0", + "resolved": "https://registry.npmjs.org/rc-menu/-/rc-menu-9.13.0.tgz", + "integrity": "sha512-1l8ooCB3HcYJKCltC/s7OxRKRjgymdl9htrCeGZcXNaMct0RxZRK6OPV3lPhVksIvAGMgzPd54ClpZ5J4b8cZA==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "2.x", + "rc-motion": "^2.4.3", + "rc-overflow": "^1.3.1", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-motion": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/rc-motion/-/rc-motion-2.9.0.tgz", + "integrity": "sha512-XIU2+xLkdIr1/h6ohPZXyPBMvOmuyFZQ/T0xnawz+Rh+gh4FINcnZmMT5UTIj6hgI0VLDjTaPeRd+smJeSPqiQ==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-util": "^5.21.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-notification": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/rc-notification/-/rc-notification-5.4.0.tgz", + "integrity": "sha512-li19y9RoYJciF3WRFvD+DvWS70jdL8Fr+Gfb/OshK+iY6iTkwzoigmSIp76/kWh5tF5i/i9im12X3nsF85GYdA==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.9.0", + "rc-util": "^5.20.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-overflow": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/rc-overflow/-/rc-overflow-1.3.2.tgz", + "integrity": "sha512-nsUm78jkYAoPygDAcGZeC2VwIg/IBGSodtOY3pMof4W3M9qRJgqaDYm03ZayHlde3I6ipliAxbN0RUcGf5KOzw==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.37.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-pagination": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/rc-pagination/-/rc-pagination-4.0.4.tgz", + "integrity": "sha512-GGrLT4NgG6wgJpT/hHIpL9nELv27A1XbSZzECIuQBQTVSf4xGKxWr6I/jhpRPauYEWEbWVw22ObG6tJQqwJqWQ==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.3.2", + "rc-util": "^5.38.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-picker": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-4.5.0.tgz", + "integrity": "sha512-suqz9bzuhBQlf7u+bZd1bJLPzhXpk12w6AjQ9BTPTiFwexVZgUKViG1KNLyfFvW6tCUZZK0HmCCX7JAyM+JnCg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.2.1", + "rc-overflow": "^1.3.2", + "rc-resize-observer": "^1.4.0", + "rc-util": "^5.38.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "date-fns": ">= 2.x", + "dayjs": ">= 1.x", + "luxon": ">= 3.x", + "moment": ">= 2.x", + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + }, + "peerDependenciesMeta": { + "date-fns": { + "optional": true + }, + "dayjs": { + "optional": true + }, + "luxon": { + "optional": true + }, + "moment": { + "optional": true + } + } + }, + "node_modules/rc-progress": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/rc-progress/-/rc-progress-4.0.0.tgz", + "integrity": "sha512-oofVMMafOCokIUIBnZLNcOZFsABaUw8PPrf1/y0ZBvKZNpOiu5h4AO9vv11Sw0p4Hb3D0yGWuEattcQGtNJ/aw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.6", + "rc-util": "^5.16.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-rate": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/rc-rate/-/rc-rate-2.12.0.tgz", + "integrity": "sha512-g092v5iZCdVzbjdn28FzvWebK2IutoVoiTeqoLTj9WM7SjA/gOJIw5/JFZMRyJYYVe1jLAU2UhAfstIpCNRozg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.0.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-resize-observer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/rc-resize-observer/-/rc-resize-observer-1.4.0.tgz", + "integrity": "sha512-PnMVyRid9JLxFavTjeDXEXo65HCRqbmLBw9xX9gfC4BZiSzbLXKzW3jPz+J0P71pLbD5tBMTT+mkstV5gD0c9Q==", + "dependencies": { + "@babel/runtime": "^7.20.7", + "classnames": "^2.2.1", + "rc-util": "^5.38.0", + "resize-observer-polyfill": "^1.5.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-segmented": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/rc-segmented/-/rc-segmented-2.3.0.tgz", + "integrity": "sha512-I3FtM5Smua/ESXutFfb8gJ8ZPcvFR+qUgeeGFQHBOvRiRKyAk4aBE5nfqrxXx+h8/vn60DQjOt6i4RNtrbOobg==", + "dependencies": { + "@babel/runtime": "^7.11.1", + "classnames": "^2.2.1", + "rc-motion": "^2.4.4", + "rc-util": "^5.17.0" + }, + "peerDependencies": { + "react": ">=16.0.0", + "react-dom": ">=16.0.0" + } + }, + "node_modules/rc-select": { + "version": "14.13.2", + "resolved": "https://registry.npmjs.org/rc-select/-/rc-select-14.13.2.tgz", + "integrity": "sha512-Xwt5ZcS5PKGR6bJL/dBRH6AFtC8FgVu2a+2T8NuyldhppKZlmZREK3nc5gONf+VlN+IbCxbr6vivgkbdPZJYng==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/trigger": "^2.1.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-overflow": "^1.3.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.2" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-slider": { + "version": "10.6.2", + "resolved": "https://registry.npmjs.org/rc-slider/-/rc-slider-10.6.2.tgz", + "integrity": "sha512-FjkoFjyvUQWcBo1F3RgSglky3ar0+qHLM41PlFVYB4Bj3RD8E/Mv7kqMouLFBU+3aFglMzzctAIWRwajEuueSw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.5", + "rc-util": "^5.36.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-steps": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/rc-steps/-/rc-steps-6.0.1.tgz", + "integrity": "sha512-lKHL+Sny0SeHkQKKDJlAjV5oZ8DwCdS2hFhAkIjuQt1/pB81M0cA0ErVFdHq9+jmPmFw1vJB2F5NBzFXLJxV+g==", + "dependencies": { + "@babel/runtime": "^7.16.7", + "classnames": "^2.2.3", + "rc-util": "^5.16.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-switch": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/rc-switch/-/rc-switch-4.1.0.tgz", + "integrity": "sha512-TI8ufP2Az9oEbvyCeVE4+90PDSljGyuwix3fV58p7HV2o4wBnVToEyomJRVyTaZeqNPAp+vqeo4Wnj5u0ZZQBg==", + "dependencies": { + "@babel/runtime": "^7.21.0", + "classnames": "^2.2.1", + "rc-util": "^5.30.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-table": { + "version": "7.45.5", + "resolved": "https://registry.npmjs.org/rc-table/-/rc-table-7.45.5.tgz", + "integrity": "sha512-R5sOfToOk7CalSkebZpqM8lkKWOJR7uXPGEhjjTSoj5egyHBwMxaACoPj2oI+6qLSll9yZrG5K+8HTN57b2Ahg==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "@rc-component/context": "^1.4.0", + "classnames": "^2.2.5", + "rc-resize-observer": "^1.1.0", + "rc-util": "^5.37.0", + "rc-virtual-list": "^3.11.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-tabs": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/rc-tabs/-/rc-tabs-15.0.0.tgz", + "integrity": "sha512-7m541VcEiJSpHZmosMZNMIhemxtIN+f0WDhZNyXQ1/cZ40aaWsknlbj0FH6HryLoKEQvBnCI89hgQuT7MBSOBA==", + "dependencies": { + "@babel/runtime": "^7.11.2", + "classnames": "2.x", + "rc-dropdown": "~4.2.0", + "rc-menu": "~9.13.0", + "rc-motion": "^2.6.2", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.34.1" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-textarea": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/rc-textarea/-/rc-textarea-1.6.3.tgz", + "integrity": "sha512-8k7+8Y2GJ/cQLiClFMg8kUXOOdvcFQrnGeSchOvI2ZMIVvX5a3zQpLxoODL0HTrvU63fPkRmMuqaEcOF9dQemA==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "^2.2.1", + "rc-input": "~1.4.0", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.27.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-tooltip": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/rc-tooltip/-/rc-tooltip-6.2.0.tgz", + "integrity": "sha512-iS/3iOAvtDh9GIx1ulY7EFUXUtktFccNLsARo3NPgLf0QW9oT0w3dA9cYWlhqAKmD+uriEwdWz1kH0Qs4zk2Aw==", + "dependencies": { + "@babel/runtime": "^7.11.2", + "@rc-component/trigger": "^2.0.0", + "classnames": "^2.3.1" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-tree": { + "version": "5.8.7", + "resolved": "https://registry.npmjs.org/rc-tree/-/rc-tree-5.8.7.tgz", + "integrity": "sha512-cpsIQZ4nNYwpj6cqPRt52e/69URuNdgQF9wZ10InmEf8W3+i0A41OVmZWwHuX9gegQSqj+DPmaDkZFKQZ+ZV1w==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-motion": "^2.0.1", + "rc-util": "^5.16.1", + "rc-virtual-list": "^3.5.1" + }, + "engines": { + "node": ">=10.x" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-tree-select": { + "version": "5.20.0", + "resolved": "https://registry.npmjs.org/rc-tree-select/-/rc-tree-select-5.20.0.tgz", + "integrity": "sha512-zFtkHx5/6PnXSi3oSbBSFbIPiJJQdpSU3qz/joLe75URgvxmTHi989O8MtMgpwyZwrCMOJpGi6L1uy+13uzZPw==", + "dependencies": { + "@babel/runtime": "^7.10.1", + "classnames": "2.x", + "rc-select": "~14.13.0", + "rc-tree": "~5.8.1", + "rc-util": "^5.16.1" + }, + "peerDependencies": { + "react": "*", + "react-dom": "*" + } + }, + "node_modules/rc-upload": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/rc-upload/-/rc-upload-4.5.2.tgz", + "integrity": "sha512-QO3ne77DwnAPKFn0bA5qJM81QBjQi0e0NHdkvpFyY73Bea2NfITiotqJqVjHgeYPOJu5lLVR32TNGP084aSoXA==", + "dependencies": { + "@babel/runtime": "^7.18.3", + "classnames": "^2.2.5", + "rc-util": "^5.2.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-util": { + "version": "5.39.3", + "resolved": "https://registry.npmjs.org/rc-util/-/rc-util-5.39.3.tgz", + "integrity": "sha512-j9wOELkLQ8gC/NkUg3qg9mHZcJf+5mYYv40JrDHqnaf8VSycji4pCf7kJ5fdTXQPDIF0vr5zpb/T2HdrMs9rWA==", + "dependencies": { + "@babel/runtime": "^7.18.3", + "react-is": "^18.2.0" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/rc-util/node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==" + }, + "node_modules/rc-virtual-list": { + "version": "3.11.5", + "resolved": "https://registry.npmjs.org/rc-virtual-list/-/rc-virtual-list-3.11.5.tgz", + "integrity": "sha512-iZRW99m5jAxtwKNPLwUrPryurcnKpXBdTyhuBp6ythf7kg/otKO5cCiIvL55GQwU0QGSlouQS0tnkciRMJUwRQ==", + "dependencies": { + "@babel/runtime": "^7.20.0", + "classnames": "^2.2.6", + "rc-resize-observer": "^1.0.0", + "rc-util": "^5.36.0" + }, + "engines": { + "node": ">=8.x" + }, + "peerDependencies": { + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/react": { + "version": "18.3.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-countup": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/react-countup/-/react-countup-6.5.3.tgz", + "integrity": "sha512-udnqVQitxC7QWADSPDOxVWULkLvKUWrDapn5i53HE4DPRVgs+Y5rr4bo25qEl8jSh+0l2cToJgGMx+clxPM3+w==", + "license": "MIT", + "dependencies": { + "countup.js": "^2.8.0" + }, + "peerDependencies": { + "react": ">= 16.3.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-is": { + "version": "16.13.1", + "license": "MIT" + }, + "node_modules/react-smooth": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.1.tgz", + "integrity": "sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w==", + "license": "MIT", + "dependencies": { + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "license": "BSD-3-Clause", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/recharts": { + "version": "2.12.7", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.7.tgz", + "integrity": "sha512-hlLJMhPQfv4/3NBSAyq3gzGg4h2v69RJh6KU7b3pXYNNAELs9kEoXOjbkxdXpALqKBoVmVptGfLpxdaVYqjmXQ==", + "license": "MIT", + "dependencies": { + "clsx": "^2.0.0", + "eventemitter3": "^4.0.1", + "lodash": "^4.17.21", + "react-is": "^16.10.2", + "react-smooth": "^4.0.0", + "recharts-scale": "^0.4.4", + "tiny-invariant": "^1.3.1", + "victory-vendor": "^36.6.8" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/recharts-scale": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz", + "integrity": "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==", + "license": "MIT", + "dependencies": { + "decimal.js-light": "^2.4.1" + } + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "which-builtin-type": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "license": "MIT" + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/rehackt": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/rehackt/-/rehackt-0.1.0.tgz", + "integrity": "sha512-7kRDOuLHB87D/JESKxQoRwv4DzbIdwkAGQ7p6QKGdVlY1IZheUnVhlk/4UZlNUVxdAXpyxikE3URsG067ybVzw==", + "peerDependencies": { + "@types/react": "*", + "react": "*" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react": { + "optional": true + } + } + }, + "node_modules/relay-runtime": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/relay-runtime/-/relay-runtime-12.0.0.tgz", + "integrity": "sha512-QU6JKr1tMsry22DXNy9Whsq5rmvwr3LSZiiWV/9+DFpuTWvp+WFhobWMc8TC4OjKFfNhEZy7mOiqUAn5atQtug==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.0.0", + "fbjs": "^3.0.0", + "invariant": "^2.2.4" + } + }, + "node_modules/remedial": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/remedial/-/remedial-1.0.8.tgz", + "integrity": "sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", + "dev": true + }, + "node_modules/remove-trailing-spaces": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/remove-trailing-spaces/-/remove-trailing-spaces-1.0.8.tgz", + "integrity": "sha512-O3vsMYfWighyFbTd8hk8VaSj9UAGENxAtX+//ugIst2RMk5e03h6RoIS+0ylsFxY1gvmPuAY/PO4It+gPEeySA==", + "dev": true + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/resize-observer-polyfill": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", + "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" + }, + "node_modules/resolve": { + "version": "1.22.8", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/response-iterator": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/response-iterator/-/response-iterator-0.2.6.tgz", + "integrity": "sha512-pVzEEzrsg23Sh053rmDUvLSkGXluZio0qu8VT6ukrYuvtjVfCbDZH9d6PGXb8HZfzdNZt8feXv/jvUzlhRgLnw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/reusify": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", + "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "dev": true + }, + "node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-async": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-regex-test": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-regex": "^1.1.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/scheduler": { + "version": "0.23.2", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/scroll-into-view-if-needed": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.1.0.tgz", + "integrity": "sha512-49oNpRjWRvnU8NyGVmUaYG4jtTkNonFZI86MmGRDqBphEK2EXT9gdEUoQPZhuBM8yWHxCWbobltqYO5M4XrUvQ==", + "dependencies": { + "compute-scroll-into-view": "^3.0.2" + } + }, + "node_modules/scuid": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/scuid/-/scuid-1.1.0.tgz", + "integrity": "sha512-MuCAyrGZcTLfQoH2XoBlQ8C6bzwN88XT/0slOGz0pn8+gIP85BOAfYa44ZXQUTOwRwPU0QvgU+V+OSajl/59Xg==", + "dev": true + }, + "node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/sentence-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/sentence-case/-/sentence-case-3.0.4.tgz", + "integrity": "sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3", + "upper-case-first": "^2.0.2" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz", + "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel": { + "version": "1.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/signedsource": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/signedsource/-/signedsource-1.0.0.tgz", + "integrity": "sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/snake-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", + "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", + "dev": true, + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sponge-case": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sponge-case/-/sponge-case-1.0.1.tgz", + "integrity": "sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-convert": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/string-convert/-/string-convert-0.2.1.tgz", + "integrity": "sha512-u/1tdPl4yQnPBjnVrmdLo9gtuLvELKsAoRapekWggdiQNvvvum+jYF329d84NAa660KQw7pB2n36KrIKVoXa3A==" + }, + "node_modules/string-env-interpolation": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz", + "integrity": "sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg==", + "dev": true + }, + "node_modules/string-width": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/string-width/node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "license": "MIT" + }, + "node_modules/string-width/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.11", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.9", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/styled-jsx": { + "version": "5.1.1", + "license": "MIT", + "dependencies": { + "client-only": "0.0.1" + }, + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/stylis": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.2.tgz", + "integrity": "sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==" + }, + "node_modules/superjson": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/superjson/-/superjson-2.2.1.tgz", + "integrity": "sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==", + "dependencies": { + "copy-anything": "^3.0.2" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/swap-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/swap-case/-/swap-case-2.0.2.tgz", + "integrity": "sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/symbol-observable": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-4.0.0.tgz", + "integrity": "sha512-b19dMThMV4HVFynSAM1++gBHAbk2Tc/osgLIBZMKsyqh34jb2e8Os7T6ZW/Bt3pJFdBTd2JwAnAAEQV7rSNvcQ==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/tapable": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/throttle-debounce": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-5.0.0.tgz", + "integrity": "sha512-2iQTSgkkc1Zyk0MeVrt/3BvuOXYPl/R8Z0U2xxo9rjwNciaHDG3R+Lm6dh4EeUci49DanvBnuqI6jshoQQRGEg==", + "engines": { + "node": ">=12.22" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, + "node_modules/title-case": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/title-case/-/title-case-3.0.3.tgz", + "integrity": "sha512-e1zGYRvbffpcHIrnuqT0Dh+gEJtDaxDSoG4JAIpq4oDFyooziLBIiYQv0GBT4FUAnUop5uZ1hiIAj7oAF6sOCA==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toggle-selection": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz", + "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "dev": true + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-invariant": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/ts-invariant/-/ts-invariant-0.10.3.tgz", + "integrity": "sha512-uivwYcQaxAucv1CzRp2n/QdYPo4ILf9VXgH19zEIjFx2EJufV16P0JtJVpYHy89DItG6Kwj2oIUjrcK5au+4tQ==", + "dependencies": { + "tslib": "^2.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ts-log": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/ts-log/-/ts-log-2.2.5.tgz", + "integrity": "sha512-PGcnJoTBnVGy6yYNFxWVNkdcAuAMstvutN9MgDJIV6L0oG8fB+ZNNy1T+wJzah8RPGor1mZuPQkVfXNDpy9eHA==", + "dev": true + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tslib": { + "version": "2.6.2", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.4.5", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ua-parser-js": { + "version": "1.0.37", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.37.tgz", + "integrity": "sha512-bhTyI94tZofjo+Dn8SN6Zv8nBDvyXTymAdM3LDI/0IboIUwTu1rEhW7v2TfiVsoYWgkQ4kOVqnI8APUFbIQIFQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "engines": { + "node": "*" + } + }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/unc-path-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", + "integrity": "sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "dev": true, + "license": "MIT" + }, + "node_modules/unixify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unixify/-/unixify-1.0.0.tgz", + "integrity": "sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg==", + "dev": true, + "dependencies": { + "normalize-path": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.15.tgz", + "integrity": "sha512-K9HWH62x3/EalU1U6sjSZiylm9C8tgq2mSvshZpqc7QE69RaA2qjhkW2HlNA0tFpEbtyFz7HTqbSdN4MSwUodA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/upper-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/upper-case/-/upper-case-2.0.2.tgz", + "integrity": "sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/upper-case-first": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/upper-case-first/-/upper-case-first-2.0.2.tgz", + "integrity": "sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/urlpattern-polyfill": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz", + "integrity": "sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==", + "dev": true + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/value-or-promise": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/value-or-promise/-/value-or-promise-1.0.12.tgz", + "integrity": "sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/victory-vendor": { + "version": "36.9.2", + "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.9.2.tgz", + "integrity": "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==", + "license": "MIT AND ISC", + "dependencies": { + "@types/d3-array": "^3.0.3", + "@types/d3-ease": "^3.0.0", + "@types/d3-interpolate": "^3.0.1", + "@types/d3-scale": "^4.0.2", + "@types/d3-shape": "^3.1.0", + "@types/d3-time": "^3.0.0", + "@types/d3-timer": "^3.0.0", + "d3-array": "^3.1.6", + "d3-ease": "^3.0.1", + "d3-interpolate": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-shape": "^3.1.0", + "d3-time": "^3.0.0", + "d3-timer": "^3.0.1" + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", + "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", + "dev": true, + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/webcrypto-core": { + "version": "1.7.9", + "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.9.tgz", + "integrity": "sha512-FE+a4PPkOmBbgNDIyRmcHhgXn+2ClRl3JzJdDu/P4+B8y81LqKe6RAsI9b3lAOHe1T1BMkSjsRHTYRikImZnVA==", + "dev": true, + "dependencies": { + "@peculiar/asn1-schema": "^2.3.8", + "@peculiar/json-schema": "^1.1.12", + "asn1js": "^3.0.1", + "pvtsutils": "^1.3.5", + "tslib": "^2.6.2" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "dev": true + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dev": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "function.prototype.name": "^1.1.5", + "has-tostringtag": "^1.0.0", + "is-async-function": "^2.0.0", + "is-date-object": "^1.0.5", + "is-finalizationregistry": "^1.0.2", + "is-generator-function": "^1.0.10", + "is-regex": "^1.1.4", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-module": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", + "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==", + "dev": true + }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.2.tgz", + "integrity": "sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA==", + "dev": true, + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yaml-ast-parser": { + "version": "0.0.43", + "resolved": "https://registry.npmjs.org/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz", + "integrity": "sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zen-observable": { + "version": "0.8.15", + "resolved": "https://registry.npmjs.org/zen-observable/-/zen-observable-0.8.15.tgz", + "integrity": "sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ==" + }, + "node_modules/zen-observable-ts": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-1.2.5.tgz", + "integrity": "sha512-QZWQekv6iB72Naeake9hS1KxHlotfRpe+WGNbNx5/ta+R3DNjVO2bswf63gXlWDcs+EMd7XY8HfVQyP1X6T4Zg==", + "dependencies": { + "zen-observable": "0.8.15" + } + }, + "node_modules/zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json index 910e731..a0d093b 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -8,7 +8,8 @@ "start": "next start", "lint": "next lint", "generate": "graphql-codegen --config ./src/graphql/codegen.ts", - "predev": "npm run generate" + "predev": "npm run generate", + "build:test": "NODE_ENV=test next build" }, "dependencies": { "@ant-design/colors": "^7.0.2", @@ -26,7 +27,6 @@ "next": "14.2.3", "rc-menu": "^9.13.0", "react": "^18", - "react-collapsible": "^2.10.0", "react-countup": "^6.5.3", "react-dom": "^18", "recharts": "^2.12.7", @@ -42,6 +42,7 @@ "@types/react": "^18", "@types/react-dom": "^18", "@types/uuid": "^9.0.8", + "env-cmd": "^10.1.0", "eslint": "^8", "eslint-config-next": "14.2.3", "typescript": "^5" diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx index 3d3f6d2..0e25b43 100644 --- a/frontend/src/app/layout.tsx +++ b/frontend/src/app/layout.tsx @@ -65,24 +65,26 @@ export default function RootLayout({ children }: { children: React.ReactNode }) Buildbarn Portal - - - - - - - - {children} - - - - - - +
+ + + + + + + + {children} + + + + + + +
diff --git a/frontend/src/app/page.tsx b/frontend/src/app/page.tsx index 19935c2..333b594 100644 --- a/frontend/src/app/page.tsx +++ b/frontend/src/app/page.tsx @@ -15,36 +15,36 @@ export default function Home() {
- Welcome to Buildbarn Portal + Welcome to the {process.env.NEXT_PUBLIC_COMPANY_NAME} Buildbarn Portal Providing insights into Bazel build outputs - + Upload one or more{' '} *.bep.ndjson{' '} - file(s) produced with Bazel’s{' '} + file(s) produced with Bazel's{' '} --build_event_json_file{' '} flag to analyze } action="/api/v1/bep/upload" /> - + Alternatively, add the following lines to your{' '} .bazelrc{' '} to start sending build events to the service: - -
{bazelrcLines}
+ +
{bazelrcLines}
+
-
} /> diff --git a/frontend/src/app/tests/[slug]/graphql.ts b/frontend/src/app/tests/[slug]/graphql.ts new file mode 100644 index 0000000..e69de29 diff --git a/frontend/src/app/tests/[slug]/page.tsx b/frontend/src/app/tests/[slug]/page.tsx new file mode 100644 index 0000000..f1aff41 --- /dev/null +++ b/frontend/src/app/tests/[slug]/page.tsx @@ -0,0 +1,34 @@ +'use client'; + +import React from 'react'; +import Content from '@/components/Content'; +import PortalCard from '@/components/PortalCard'; +import { Space } from 'antd'; +import { ExperimentFilled } from '@ant-design/icons'; +import TestDetails from '@/components/TestDetails'; + +interface PageParams { + params: { + slug: string + } +} + +const Page: React.FC = ({ params }) => { + const label = decodeURIComponent(params.slug) + return ( + + } + titleBits={[Test Details]} + > + + + + } + /> + ); +} + +export default Page; diff --git a/frontend/src/app/tests/index.graphql.ts b/frontend/src/app/tests/index.graphql.ts new file mode 100644 index 0000000..86b0fcf --- /dev/null +++ b/frontend/src/app/tests/index.graphql.ts @@ -0,0 +1,92 @@ +import { gql } from "@/graphql/__generated__"; + +export const GET_TEST_GRID_DATA = gql(/* GraphQl */` +query GetTestsWithOffset( + $label: String, + $offset: Int, + $limit: Int, + $sortBy: String, + $direction: String) { + getTestsWithOffset( + label: $label + offset: $offset + limit: $limit + sortBy: $sortBy + direction: $direction + ) { + total + result { + label + sum + min + max + avg + count + passRate + } + } + } +`); + +export const GET_UNIQUE_TEST_LABELS = gql(/* GraphQL */ ` + query GetUniqueTestLabels{ + getUniqueTestLabels + } +`); + +export const GET_AVERAGE_PASS_PERCENTAGE_FOR_LABEL = gql(/* GraphQL */ ` + + query GetAveragePassPercentageForLabel( + $label: String! + ) { + getAveragePassPercentageForLabel(label:$label) + } + +`); + +export const GET_TEST_DURATION_AGGREGATION = gql(/* GraphQL */ ` + query GetTestDurationAggregation( + $label: String + ) { + getTestDurationAggregation(label:$label) { + label + count + sum + min + max + } + } +`); + +export const FIND_TESTS = gql(/* GraphQL */ ` + query FindTests( + $first: Int! + $where: TestCollectionWhereInput + $orderBy: TestCollectionOrder + $after: Cursor + ){ + findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){ + totalCount + pageInfo{ + startCursor + endCursor + hasNextPage + hasPreviousPage + } + edges { + node { + id + durationMs + firstSeen + label + overallStatus + bazelInvocation { + invocationID + } + } + } + } +} +`); + +export default FIND_TESTS; diff --git a/frontend/src/app/tests/page.tsx b/frontend/src/app/tests/page.tsx new file mode 100644 index 0000000..089b33e --- /dev/null +++ b/frontend/src/app/tests/page.tsx @@ -0,0 +1,26 @@ +'use client'; + +import React from 'react'; +import Content from '@/components/Content'; +import PortalCard from '@/components/PortalCard'; +import { Space } from 'antd'; +import { ExperimentFilled } from '@ant-design/icons'; +import TestGrid from '@/components/TestGrid'; + +const Page: React.FC = () => { + return ( + + } + titleBits={[Tests Overview]}> + + + + } + /> + ); +} + +export default Page; diff --git a/frontend/src/app/trends/index.graphql.ts b/frontend/src/app/trends/index.graphql.ts new file mode 100644 index 0000000..08c73e5 --- /dev/null +++ b/frontend/src/app/trends/index.graphql.ts @@ -0,0 +1,28 @@ +import { gql } from "@/graphql/__generated__"; + +const FIND_BUILD_DURATIONS = gql(/* GraphQL */ ` + query FindBuildTimes( + $first: Int! + $where: BazelInvocationWhereInput + ) { + findBazelInvocations(first: $first, where: $where ) { + pageInfo{ + startCursor + endCursor + hasNextPage + hasPreviousPage + + } + totalCount + edges { + node { + invocationID + startedAt + endedAt + } + } + } + } +`); + +export default FIND_BUILD_DURATIONS; diff --git a/frontend/src/app/trends/page.tsx b/frontend/src/app/trends/page.tsx new file mode 100644 index 0000000..dde3120 --- /dev/null +++ b/frontend/src/app/trends/page.tsx @@ -0,0 +1,121 @@ +'use client'; + +import React, { useState } from 'react'; +import Content from '@/components/Content'; +import PortalCard from '@/components/PortalCard'; +import { Space, Statistic, Row, Badge } from 'antd'; +import { ClockCircleFilled, LineChartOutlined } from '@ant-design/icons'; +import { FindBuildTimesQueryVariables, BazelInvocationNodeFragment } from '@/graphql/__generated__/graphql'; +import { useQuery } from '@apollo/client'; +import FIND_BUILD_DURATIONS from './index.graphql'; +import { AreaChart, XAxis, YAxis, CartesianGrid, Tooltip, Area } from 'recharts'; +import type { StatisticProps } from "antd/lib"; +import CountUp from 'react-countup'; + +const Page: React.FC = () => { + + const [variables, setVariables] = useState({ + first: 10000, + }); + + const { loading, data, previousData, error } = useQuery(FIND_BUILD_DURATIONS, { + variables, + pollInterval: 120000, + fetchPolicy: 'cache-and-network', + }); + + const activeData = loading ? previousData : data; + let emptyText = 'No builds match the specified search criteria'; + let dataSource: BazelInvocationNodeFragment[] = [] + + if (error) { + emptyText = error.message; + dataSource = []; + } else { + const buildTimes = activeData?.findBazelInvocations.edges?.flatMap(edge => edge?.node) ?? []; + dataSource = buildTimes.filter((x): x is BazelInvocationNodeFragment => !!x); + } + + interface graphPoint { + invocationId: string + from: string + to: string + duration: number + } + + let dataPoints: graphPoint[] = [] + + dataSource.map(x => { + var point: graphPoint = { + invocationId: x.invocationID, + from: x.startedAt, + to: x.endedAt, + duration: (new Date(x.endedAt).getTime() - new Date(x.startedAt).getTime()) + } + dataPoints.push(point) + }); + + const formatter: StatisticProps['formatter'] = (value) => ( + + ); + + var avg: number = dataPoints.reduce((sum, item) => sum + item.duration, 0) / dataPoints.length; + var medianVals = dataPoints.map(x => x.duration).sort((a, b) => a - b); + var medianMid = Math.floor(dataPoints.length / 2); + var median: number; + + if (medianVals.length % 2 === 0) { + median = (medianVals[medianMid - 1] + medianVals[medianMid]) / 2 + } + else { + median = medianVals[medianMid]; + } + + var max: number = Math.max(...dataPoints.map(x => x.duration)) + var min: number = Math.min(...dataPoints.map(x => x.duration)) + + return ( + + } + titleBits={[Trends]}> + } + titleBits={[Invocation Durations ]}> + + + + + + + + + + + + + + + + + + + + + + + + + + + + } + /> + ); +} + +export default Page; diff --git a/frontend/src/components/ActionCacheMetrics/index.tsx b/frontend/src/components/ActionCacheMetrics/index.tsx index 258b637..82808b1 100644 --- a/frontend/src/components/ActionCacheMetrics/index.tsx +++ b/frontend/src/components/ActionCacheMetrics/index.tsx @@ -1,6 +1,6 @@ import React, { useCallback, useState } from "react"; import { PieChart, Pie, Cell, Legend, BarChart, Bar, LabelList } from 'recharts'; -import { Table, Row, Col, Statistic, Tooltip, Space, Typography } from 'antd'; +import { Table, Row, Col, Statistic, Tooltip, Space } from 'antd'; import type { StatisticProps, TableColumnsType } from "antd/lib"; import CountUp from 'react-countup'; import { ActionCacheStatistics, ActionSummary, MissDetail } from "@/graphql/__generated__/graphql"; @@ -10,6 +10,7 @@ import { renderActiveShape } from "../Utilities/renderShape" import { nullPercent } from "../Utilities/nullPercent"; import "./index.module.css" import MissDetailTag, { MissDetailEnum } from "./ActionCacheMissTag"; +import styles from "../../theme/theme.module.css" interface MissDetailDisplayDataType { key: React.Key; name: string; @@ -23,13 +24,13 @@ const formatter: StatisticProps['formatter'] = (value) => ( var ac_colors = [ - "grey", //unknown - "blue", //different action key - "pink", //different deps - "purple", //different env - "cyan", //diff files - "orange", //corrupted cache entry - "red"] //not cached + "grey", //unknown + "blue", //different action key + "pink", //different deps + "purple", //different env + "cyan", //diff files + "orange", //corrupted cache entry + "red"] //not cached const ac_columns: TableColumnsType = [ { @@ -41,11 +42,16 @@ const ac_columns: TableColumnsType = [ { title: "Count", dataIndex: "value", + align: "right", + defaultSortOrder: "descend", + render: (_, record) => {record.value}, sorter: (a, b) => a.value - b.value, }, { title: "Rate (%)", dataIndex: "rate", + align: "right", + render: (_, record) => {record.rate}, sorter: (a, b) => parseFloat(a.rate) - parseFloat(b.rate), } ] @@ -53,7 +59,7 @@ const ac_columns: TableColumnsType = [ const AcMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ({ acMetrics }) => { - const acMetricsData: ActionCacheStatistics | undefined = acMetrics?.actionCacheStatistics?.at(0) + const acMetricsData: ActionCacheStatistics | undefined = acMetrics?.actionCacheStatistics ?? undefined var hitMissTotal: number = (acMetricsData?.misses ?? 0) + (acMetricsData?.hits ?? 0); @@ -94,7 +100,7 @@ const AcMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ({ acMetr return ( - } titleBits={acTitle} > + } titleBits={acTitle} > @@ -117,7 +123,7 @@ const AcMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ({ acMetr - } titleBits={["Miss Detail Breakdown"]}> + } titleBits={["Miss Detail Breakdown"]}> = ({ acMetr - } titleBits={["Miss Detail Data"]}> + } titleBits={["Miss Detail Data"]}> ( ); -const ad_columns: TableColumnsType = [ +interface ActionDataColumnType { + key: React.Key; + mnemonic: string; + actionsExecuted: number; + actionsCreated: number; + firstStartedMs: number; + lastEndedMs: number; + systemTime: number; + userTime: number; +} + +const ad_columns: TableColumnsType = [ { title: "Mnemonic", dataIndex: "mnemonic" @@ -26,41 +39,48 @@ const ad_columns: TableColumnsType = [ { title: "Actions Executed", dataIndex: "actionsExecuted", + align: "right", + render: (_, record) => {record.actionsExecuted}, sorter: (a, b) => (a.actionsExecuted ?? 0) - (b.actionsExecuted ?? 0), }, { title: "Actions Created", dataIndex: "actionsCreated", + align: "right", + render: (_, record) => {record.actionsCreated}, sorter: (a, b) => (a.actionsCreated ?? 0) - (b.actionsCreated ?? 0), }, { - title: "First Started(ms)", - dataIndex: "firstStartedMs", - sorter: (a, b) => (a.firstStartedMs ?? 0) - (b.firstStartedMs ?? 0), - }, - { - title: "Last Ended(ms)", - dataIndex: "lastEndedMs", - sorter: (a, b) => (a.lastEndedMs ?? 0) - (b.lastEndedMs ?? 0), - }, - { - title: "System Time(ms)", + title: "System Time", dataIndex: "systemTime", + align: "right", + render: (_, record) => {millisecondsToTime(record.systemTime)}, sorter: (a, b) => (a.systemTime ?? 0) - (b.systemTime ?? 0), }, { - title: "User Time(ms)", + title: "User Time", dataIndex: "userTime", + align: "right", + render: (_, record) => {millisecondsToTime(record.userTime)}, sorter: (a, b) => (a.userTime ?? 0) - (b.userTime ?? 0), }, ] const ActionDataMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ({ acMetrics }) => { - const actions_data: ActionData[] = []; + const actions_data: ActionDataColumnType[] = []; const actions_graph_data: ActionDataGraphDisplayType[] = []; acMetrics?.actionData?.map((ad: ActionData, idx) => { - actions_data.push(ad) + actions_data.push({ + key: "action_data_key" + ad.id, + mnemonic: ad.mnemonic ?? "", + actionsExecuted: ad.actionsExecuted ?? 0, + actionsCreated: ad.actionsCreated ?? 0, + firstStartedMs: ad.firstStartedMs ?? 0, + lastEndedMs: ad.lastEndedMs ?? 0, + systemTime: ad.systemTime ?? 0, + userTime: ad.userTime ?? 0 + }) var agd: ActionDataGraphDisplayType = { key: "actiondatagraphdisplaytype-" + String(idx), name: ad.mnemonic ?? "", @@ -84,7 +104,7 @@ const ActionDataMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ( return ( - } titleBits={["Actions"]}> + } type="inner" titleBits={["Actions"]}> @@ -95,16 +115,16 @@ const ActionDataMetrics: React.FC<{ acMetrics: ActionSummary | undefined; }> = ( - } titleBits={["Actions Data"]}> -
- +
+ {/* } titleBits={["Actions Data"]}> + */} - } titleBits={["User Time(ms)"]}> + } titleBits={["User Time Breakdown"]}> { return new NextSSRApolloClient({ cache: new NextSSRInMemoryCache({ - possibleTypes + possibleTypes }), connectToDevTools: true, link: typeof window === 'undefined' ? ApolloLink.from([ - new SSRMultipartLink({ - stripDefer: true, - }), - httpLink, - ]) + new SSRMultipartLink({ + stripDefer: true, + }), + httpLink, + ]) : httpLink, }); } diff --git a/frontend/src/components/AppBar/AppBarMenu.tsx b/frontend/src/components/AppBar/AppBarMenu.tsx index fd7bad9..0ddfd6f 100644 --- a/frontend/src/components/AppBar/AppBarMenu.tsx +++ b/frontend/src/components/AppBar/AppBarMenu.tsx @@ -21,7 +21,7 @@ const AppBarMenu: React.FC = ({ mode, items, className }) => { if (className) { classNames.push(className); } - return ; + return ; }; export default AppBarMenu; diff --git a/frontend/src/components/AppBar/AppBarTitle.tsx b/frontend/src/components/AppBar/AppBarTitle.tsx index ad6bf73..1d58a0a 100644 --- a/frontend/src/components/AppBar/AppBarTitle.tsx +++ b/frontend/src/components/AppBar/AppBarTitle.tsx @@ -9,7 +9,7 @@ const AppBarTitle = () => { return (
- Buildbarn Portal + {process.env.NEXT_PUBLIC_COMPANY_NAME} Buildbarn Portal
); diff --git a/frontend/src/components/AppBar/index.module.css b/frontend/src/components/AppBar/index.module.css index 8c4a119..4538905 100644 --- a/frontend/src/components/AppBar/index.module.css +++ b/frontend/src/components/AppBar/index.module.css @@ -26,7 +26,7 @@ } .menu { - width: 177px; + width: 400px; background-color: inherit !important; } @@ -35,7 +35,8 @@ } .appBarMenu { - min-width: 177px; /* Hack to get the menu to not disappear on resize */ + min-width: 177px; + /* Hack to get the menu to not disappear on resize */ } .buttons { @@ -63,4 +64,4 @@ .linkItem { /* empty style will override default behavior of hiding on smaller screen. */ -} +} \ No newline at end of file diff --git a/frontend/src/components/AppBar/index.tsx b/frontend/src/components/AppBar/index.tsx index c35ffc5..fb44df3 100644 --- a/frontend/src/components/AppBar/index.tsx +++ b/frontend/src/components/AppBar/index.tsx @@ -20,6 +20,8 @@ export const SetExtraAppBarMenuItemsContext = createContext< const APP_BAR_MENU_ITEMS: ItemType[] = [ getItem({ depth: 0, href: '/builds', title: 'Builds' }), getItem({ depth: 0, href: '/bazel-invocations', title: 'Invocations' }), + getItem({ depth: 0, href: '/trends', title: "Trends" }), + getItem({ depth: 0, href: '/tests', title: "Tests" }), ]; type Props = { diff --git a/frontend/src/components/Artifacts/index.tsx b/frontend/src/components/Artifacts/index.tsx index 70c3b68..26b6eed 100644 --- a/frontend/src/components/Artifacts/index.tsx +++ b/frontend/src/components/Artifacts/index.tsx @@ -3,10 +3,9 @@ import { Table, Row, Col, Space } from 'antd'; import type { TableColumnsType } from "antd/lib"; import { ArtifactMetrics } from "@/graphql/__generated__/graphql"; import PortalCard from "../PortalCard"; -import { - RadiusUprightOutlined - -} from "@ant-design/icons"; +import { RadiusUprightOutlined } from "@ant-design/icons"; +import styles from "../../theme/theme.module.css" +import { record } from "zod"; const artifacts_columns: TableColumnsType = [ @@ -17,11 +16,15 @@ const artifacts_columns: TableColumnsType = [ { title: "Size (bytes)", dataIndex: "sizeInBytes", + align: "right", + render: (_, record) => {record.sizeInBytes}, sorter: (a, b) => (a.sizeInBytes ?? 0) - (b.sizeInBytes ?? 0), }, { title: "Count", dataIndex: "count", + align: "right", + render: (_, record) => {record.count}, sorter: (a, b) => (a.count ?? 0) - (b.count ?? 0), }, ] @@ -39,23 +42,23 @@ const ArtifactsDataMetrics: React.FC<{ artifactMetrics: ArtifactMetrics | undefi artifacts_data.push( { name: "Source Artifacts Read", - sizeInBytes: artifactMetrics?.sourceArtifactsRead?.at(0)?.sizeInBytes ?? 0, - count: artifactMetrics?.sourceArtifactsRead?.at(0)?.count ?? 0 + sizeInBytes: artifactMetrics?.sourceArtifactsRead?.sizeInBytes ?? 0, + count: artifactMetrics?.sourceArtifactsRead?.count ?? 0 }, { name: "Output Artifacts From Action Cache", - sizeInBytes: artifactMetrics?.outputArtifactsFromActionCache?.at(0)?.sizeInBytes ?? 0, - count: artifactMetrics?.outputArtifactsFromActionCache?.at(0)?.count ?? 0 + sizeInBytes: artifactMetrics?.outputArtifactsFromActionCache?.sizeInBytes ?? 0, + count: artifactMetrics?.outputArtifactsFromActionCache?.count ?? 0 }, { name: "Output Artifacts Seen", - sizeInBytes: artifactMetrics?.outputArtifactsSeen?.at(0)?.sizeInBytes ?? 0, - count: artifactMetrics?.outputArtifactsSeen?.at(0)?.count ?? 0 + sizeInBytes: artifactMetrics?.outputArtifactsSeen?.sizeInBytes ?? 0, + count: artifactMetrics?.outputArtifactsSeen?.count ?? 0 }, { name: "Top Level Artifacts", - sizeInBytes: artifactMetrics?.topLevelArtifacts?.at(0)?.sizeInBytes ?? 0, - count: artifactMetrics?.topLevelArtifacts?.at(0)?.count ?? 0 + sizeInBytes: artifactMetrics?.topLevelArtifacts?.sizeInBytes ?? 0, + count: artifactMetrics?.topLevelArtifacts?.count ?? 0 }, ) @@ -64,7 +67,7 @@ const ArtifactsDataMetrics: React.FC<{ artifactMetrics: ArtifactMetrics | undefi return ( - } titleBits={actionsTitle}> + } titleBits={actionsTitle}>
runnerMetrics.push(item)); + metrics?.actionSummary?.runnerCount?.map((item: RunnerCount) => runnerMetrics.push(item)); //data for ac metrics - var acMetrics: ActionSummary | undefined = metrics?.actionSummary?.at(0); + var acMetrics: ActionSummary | undefined = metrics?.actionSummary ?? undefined; //artifact metrics - var artifactMetrics: ArtifactMetrics | undefined = metrics?.artifactMetrics?.at(0); + var artifactMetrics: ArtifactMetrics | undefined = metrics?.artifactMetrics ?? undefined; //data for target metrics - var targetMetrics: TargetMetrics | undefined | null = metrics?.targetMetrics?.at(0) + var targetMetrics: TargetMetrics | undefined | null = metrics?.targetMetrics ?? undefined //memory metrics - var memoryMetrics: MemoryMetrics | undefined = metrics?.memoryMetrics?.at(0) + var memoryMetrics: MemoryMetrics | undefined = metrics?.memoryMetrics ?? undefined //build graph metrics - var buildGraphMetrics: BuildGraphMetrics | undefined = metrics?.buildGraphMetrics?.at(0) + var buildGraphMetrics: BuildGraphMetrics | undefined = metrics?.buildGraphMetrics ?? undefined //timing metrics - var timingMetrics: TimingMetrics | undefined = metrics?.timingMetrics?.at(0) + var timingMetrics: TimingMetrics | undefined = metrics?.timingMetrics ?? undefined //netowrk metrics - var networkMetrics: NetworkMetrics | undefined = metrics?.networkMetrics?.at(0) - const bytesRecv = networkMetrics?.systemNetworkStats?.at(0)?.bytesRecv ?? 0 - const bytesSent = networkMetrics?.systemNetworkStats?.at(0)?.bytesSent ?? 0 + var networkMetrics: NetworkMetrics | undefined = metrics?.networkMetrics ?? undefined + const bytesRecv = networkMetrics?.systemNetworkStats?.bytesRecv ?? 0 + const bytesSent = networkMetrics?.systemNetworkStats?.bytesSent ?? 0 const hideNetworkMetricsTab: boolean = bytesRecv == 0 && bytesSent == 0 //test data @@ -128,7 +126,7 @@ const BazelInvocation: React.FC<{ var items: TabsProps['items'] = [ { - key: 'BazelInvocationTabs-1', + key: 'BazelInvocationTabs-Problems', label: 'Problems', icon: , children: @@ -147,7 +145,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-2', + key: 'BazelInvocationTabs-Logs', label: 'Logs', icon: , children: @@ -157,7 +155,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-3', + key: 'BazelInvocationTabs-Runners', label: 'Runners', icon: , children: @@ -165,7 +163,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-4', + key: 'BazelInvocationTabs-ActionCache', label: 'Action Cache', icon: , children: @@ -173,7 +171,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-5', + key: 'BazelInvocationTabs-ActionsData', label: 'Actions Data', icon: , children: @@ -182,7 +180,7 @@ const BazelInvocation: React.FC<{ }, { - key: 'BazelInvocationTabs-8', + key: 'BazelInvocationTabs-Artifacts', label: 'Artifacts', icon: , children: @@ -190,7 +188,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-9', + key: 'BazelInvocationTabs-Memory', label: 'Memory', icon: , children: @@ -198,7 +196,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-10', + key: 'BazelInvocationTabs-Timing', label: 'Timing', icon: , children: @@ -207,7 +205,7 @@ const BazelInvocation: React.FC<{ }, { - key: 'BazelInvocationTabs-6', + key: 'BazelInvocationTabs-Targets', label: 'Targets', icon: , children: @@ -215,7 +213,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-7', + key: 'BazelInvocationTabs-Tests', label: 'Tests', icon: , children: @@ -223,7 +221,7 @@ const BazelInvocation: React.FC<{ , }, { - key: 'BazelInvocationTabs-11', + key: 'BazelInvocationTabs-Network', label: 'Network', icon: , children: @@ -232,21 +230,28 @@ const BazelInvocation: React.FC<{ }, ]; + const hideLogs = true //hide the logs tab for now + if (hideLogs == true) { + var idx = items.findIndex((x, _) => x.key == "BazelInvocationTabs-Logs") + if (idx > -1) { + items.splice(idx, 1); + } + } + if (hideTestsTab == true) { - var idx = items.findIndex((x, _) => x.key == "BazelInvocationTabs-7") + var idx = items.findIndex((x, _) => x.key == "BazelInvocationTabs-Tests") if (idx > -1) { items.splice(idx, 1); } } if (hideNetworkMetricsTab == true) { - var idx = items.findIndex((x, _) => x.key == "BazelInvocationTabs-11") + var idx = items.findIndex((x, _) => x.key == "BazelInvocationTabs-Network") if (idx > -1) { items.splice(idx, 1); } } - const extraBits: React.ReactNode[] = [ , ]; diff --git a/frontend/src/components/BazelInvocationsTable/Columns.tsx b/frontend/src/components/BazelInvocationsTable/Columns.tsx index c75ca31..390a28b 100644 --- a/frontend/src/components/BazelInvocationsTable/Columns.tsx +++ b/frontend/src/components/BazelInvocationsTable/Columns.tsx @@ -28,6 +28,8 @@ const startedAtColumn: ColumnType = { key: 'startedAt', width: 165, title: 'Start Time', + sorter: (a, b) => dayjs(a.startedAt).isBefore(dayjs(b.startedAt)) == true ? 0 : 1, + defaultSortOrder: "descend", render: (_, record) => ( {dayjs(record.startedAt).format('YYYY-MM-DD hh:mm:ss A')} diff --git a/frontend/src/components/Breadcrumbs/index.tsx b/frontend/src/components/Breadcrumbs/index.tsx index dff80a5..6f900c7 100644 --- a/frontend/src/components/Breadcrumbs/index.tsx +++ b/frontend/src/components/Breadcrumbs/index.tsx @@ -52,7 +52,7 @@ const Breadcrumbs: React.FC = ({ segmentTitles }) => { // Use the segment as the title to be displayed in the browser // Convert to upper case to avoid implementing a title case converter - const title = segmentTitles && segmentTitles.length > index ? segmentTitles[index] : segment.toUpperCase(); + const title = segmentTitles && segmentTitles.length > index ? segmentTitles[index] : decodeURIComponent(segment.toUpperCase()); // Return the path and title for the breadcrumb item return { path, title }; diff --git a/frontend/src/components/FooterBar/index.tsx b/frontend/src/components/FooterBar/index.tsx index 9be4069..2ea4c5a 100644 --- a/frontend/src/components/FooterBar/index.tsx +++ b/frontend/src/components/FooterBar/index.tsx @@ -11,18 +11,29 @@ interface Props { linkItemClassName?: string; } + const FooterBar: React.FC = ({ className, linkItemClassName }) => { const linkClassName = linkItemClassName ? linkItemClassName : styles.footerLink; return ( - - - - - Buildbarn Slack Channel - - - + + + + + + {process.env.NEXT_PUBLIC_COMPANY_SLACK_CHANNEL_NAME} + + + + + + + + Buildteam + + + + ); }; diff --git a/frontend/src/components/MemoryMetrics/index.tsx b/frontend/src/components/MemoryMetrics/index.tsx index cda8e52..5789214 100644 --- a/frontend/src/components/MemoryMetrics/index.tsx +++ b/frontend/src/components/MemoryMetrics/index.tsx @@ -7,6 +7,7 @@ import { MemoryMetrics, GarbageMetrics } from "@/graphql/__generated__/graphql"; import PortalCard from "../PortalCard"; import { PieChartOutlined, HddOutlined } from "@ant-design/icons"; import { renderActiveShape, newColorFind } from "../Utilities/renderShape"; +import styles from "../../theme/theme.module.css" interface GarbageMetricDetailDisplayType { key: React.Key; @@ -29,6 +30,8 @@ const garbage_columns: TableColumnsType = [ title: "Garbage Collected", dataIndex: "value", sorter: (a, b) => a.value - b.value, + align: "right", + render: (_, record) => {record.value} }, ] @@ -55,7 +58,7 @@ const MemoryMetricsDisplay: React.FC<{ memoryMetrics: MemoryMetrics | undefined; return ( - } titleBits={["Memory Metrics"]} > + } titleBits={["Memory Metrics"]} > @@ -65,7 +68,7 @@ const MemoryMetricsDisplay: React.FC<{ memoryMetrics: MemoryMetrics | undefined; - } titleBits={["Garbage Collection Breakdown"]}> + } titleBits={["Garbage Collection Breakdown"]}> - } titleBits={["Gargage Collection Data"]}> + } titleBits={["Gargage Collection Data"]}>
( const NetworkMetricsDisplay: React.FC<{ networkMetrics: NetworkMetrics | undefined }> = ({ networkMetrics: networkMetrics }) => { - const systemNetworkStats: SystemNetworkStats | undefined = networkMetrics?.systemNetworkStats?.at(0) + const systemNetworkStats: SystemNetworkStats | undefined = networkMetrics?.systemNetworkStats ?? undefined return ( }> diff --git a/frontend/src/components/RunnerMetrics/index.tsx b/frontend/src/components/RunnerMetrics/index.tsx index 9733388..2646d58 100644 --- a/frontend/src/components/RunnerMetrics/index.tsx +++ b/frontend/src/components/RunnerMetrics/index.tsx @@ -8,6 +8,7 @@ import { renderActiveShape } from "../Utilities/renderShape"; import { nullPercent } from "../Utilities/nullPercent"; import PortalCard from "../PortalCard"; import { BaseType } from "antd/es/typography/Base"; +import styles from "../../theme/theme.module.css" interface RunnerDataType { key: React.Key; @@ -45,11 +46,15 @@ const runner_columns: TableColumnsType = [ { title: 'Count', dataIndex: 'value', + align: "right", + render: (_, record) => {record.value}, sorter: (a, b) => a.value - b.value, }, { title: 'Rate (%)', dataIndex: 'rate', + align: "right", + render: (_, record) => {record.rate}, sorter: (a, b) => parseFloat(a.rate) - parseFloat(b.rate), }, ]; @@ -116,7 +121,7 @@ const RunnerMetrics: React.FC<{ runnerMetrics: RunnerCount[]; }> = ({ runnerMetr - } titleBits={["Action Runners Breakdown"]}> + } titleBits={["Action Runners Breakdown"]}> = ({ runnerMetr - } titleBits={["Action Runner Data"]}> + } titleBits={["Action Runner Data"]}>
( ); - const TargetMetricsDisplay: React.FC<{ targetMetrics: TargetMetrics | undefined | null, targetData: TargetPair[] | undefined | null, @@ -36,6 +36,7 @@ const TargetMetricsDisplay: React.FC<{ var all_types: string[] = [] var targets_skipped: number = 0; var targets_built_successfully: number = 0; + targetData?.map(x => { count++; var targetKind = x.targetKind ?? "" @@ -78,8 +79,10 @@ const TargetMetricsDisplay: React.FC<{ onFilter: (value, record) => (record.name.includes(value.toString()) ? true : false) }, { - title: "Duration(ms)", + title: "Duration", dataIndex: "value", + align: "right", + render: (_, record) => {millisecondsToTime(record.value)}, sorter: (a, b) => a.value - b.value, }, { @@ -166,34 +169,26 @@ const TargetMetricsDisplay: React.FC<{ }, ] - return ( - } titleBits={["Targets"]}> + } titleBits={["Targets"]}> - - - - -
- - +
- ) } diff --git a/frontend/src/components/TestDetails/graphql.ts b/frontend/src/components/TestDetails/graphql.ts new file mode 100644 index 0000000..ed206e7 --- /dev/null +++ b/frontend/src/components/TestDetails/graphql.ts @@ -0,0 +1,34 @@ +import { gql } from '@/graphql/__generated__'; + +export const FIND_TESTS_WITH_CACHE = gql(/* GraphQL */ ` + query FindTestsWithCache( + $first: Int! + $where: TestCollectionWhereInput + $orderBy: TestCollectionOrder + $after: Cursor + ){ + findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){ + totalCount + pageInfo{ + startCursor + endCursor + hasNextPage + hasPreviousPage + } + edges { + node { + id + durationMs + firstSeen + label + overallStatus + cachedLocally + cachedRemotely + bazelInvocation { + invocationID + } + } + } + } + } + `); \ No newline at end of file diff --git a/frontend/src/components/TestDetails/index.tsx b/frontend/src/components/TestDetails/index.tsx new file mode 100644 index 0000000..52d0d1b --- /dev/null +++ b/frontend/src/components/TestDetails/index.tsx @@ -0,0 +1,111 @@ +import React, { useState } from 'react'; +import { Space, Row, Statistic } from 'antd'; +import { TestStatusEnum } from '../TestStatusTag'; +import type { StatisticProps } from "antd/lib"; +import CountUp from 'react-countup'; +import { useQuery } from '@apollo/client'; +import { FindTestsQueryVariables } from '@/graphql/__generated__/graphql'; +import TestGridRow from '../TestGridRow'; +import PortalAlert from '../PortalAlert'; +import { AreaChart, Area, CartesianGrid, XAxis, YAxis, Tooltip } from 'recharts'; +import { FIND_TESTS_WITH_CACHE } from './graphql'; +import PortalCard from '../PortalCard'; +import { FieldTimeOutlined, BorderInnerOutlined } from '@ant-design/icons/lib/icons'; + +interface Props { + label: string +} + +const formatter: StatisticProps['formatter'] = (value) => ( + +); + +export interface TestStatusType { + label: string + invocationId: string, + status: TestStatusEnum +} +const PAGE_SIZE = 10 +interface GraphDataPoint { + name: string + duration: number + local: boolean + remote: boolean +} + + +const TestDetails: React.FC = ({ label }) => { + + const [variables, setVariables] = useState({ first: 1000, where: { label: label } }) + const { loading: labelLoading, data: labelData, previousData: labelPreviousData, error: labelError } = useQuery(FIND_TESTS_WITH_CACHE, { + variables: variables, + fetchPolicy: 'cache-and-network', + //pollInterval: 120000, + }); + + + const data = labelLoading ? labelPreviousData : labelData; + var result: GraphDataPoint[] = [] + var totalCnt: number = 0 + var local_cached: number = 0 + var remote_cached: number = 0 + var total_duration: number = 0 + + if (labelError) { + + } else { + totalCnt = data?.findTests.totalCount ?? 0 + data?.findTests.edges?.map(edge => { + var row = edge?.node + result.push({ + name: row?.bazelInvocation?.invocationID ?? "", + duration: row?.durationMs ?? 0, + local: row?.cachedLocally ?? false, + remote: row?.cachedRemotely ?? false + }) + if (row?.cachedLocally) { + local_cached++ + } + if (row?.cachedRemotely) { + remote_cached++ + } + total_duration += row?.durationMs ?? 0 + }); + } + + return ( + +

{label}

+ + + + + + + + + } titleBits={["Test Duration Over Time"]} > + + + + + + + + + + + + + + + + } titleBits={["Test Pass/Fail Grid"]}> + + + +
+ ); +} +export default TestDetails \ No newline at end of file diff --git a/frontend/src/components/TestGrid/index.tsx b/frontend/src/components/TestGrid/index.tsx new file mode 100644 index 0000000..2416e79 --- /dev/null +++ b/frontend/src/components/TestGrid/index.tsx @@ -0,0 +1,169 @@ +import React, { useCallback, useState } from 'react'; +import { TableColumnsType } from "antd/lib" +import { Space, Row, Statistic, Table, TableProps, TablePaginationConfig, Pagination } from 'antd'; +import { TestStatusEnum } from '../TestStatusTag'; +import type { StatisticProps } from "antd/lib"; +import CountUp from 'react-countup'; +import { SearchFilterIcon, SearchWidget } from '@/components/SearchWidgets'; +import { SearchOutlined } from '@ant-design/icons'; +import { useQuery } from '@apollo/client'; +import { GET_TEST_GRID_DATA } from '@/app/tests/index.graphql'; +import { FilterValue } from 'antd/es/table/interface'; +import { uniqueId } from 'lodash'; +import { GetTestsWithOffsetQueryVariables } from '@/graphql/__generated__/graphql'; +import TestGridRow from '../TestGridRow'; +import PortalAlert from '../PortalAlert'; +import Link from 'next/link'; +import styles from "../../theme/theme.module.css" +import { millisecondsToTime } from '../Utilities/time'; +interface Props { + //labelData: GetTestsWithOffsetQuery | undefined +} + +const formatter: StatisticProps['formatter'] = (value) => ( + +); +export interface TestStatusType { + label: string + invocationId: string, + status: TestStatusEnum +} + +interface TestGridRowDataType { + key: React.Key; + label: string; + average_duration: number; + min_duration: number; + max_duration: number; + total_count: number; + pass_rate: number; + status: TestStatusType[]; +} + +const PAGE_SIZE = 10 +const columns: TableColumnsType = [ + { + title: "Label", + dataIndex: "label", + filterSearch: true, + render: (_, record) => {record.label}, + filterDropdown: filterProps => ( + + ), + filterIcon: filtered => } filtered={filtered} />, + onFilter: (value, record) => (record.label.includes(value.toString()) ? true : false) + }, + { + title: "Average Duration", + dataIndex: "average_duration", + //sorter: (a, b) => a.average_duration - b.average_duration, + render: (_, record) => {millisecondsToTime(record.average_duration)} + }, + { + title: "Min Duration", + dataIndex: "min_duration", + //sorter: (a, b) => a.average_duration - b.average_duration, + render: (_, record) => {millisecondsToTime(record.min_duration)} + }, + { + title: "Max Duration", + dataIndex: "max_duration", + //sorter: (a, b) => a.average_duration - b.average_duration, + render: (_, record) => {millisecondsToTime(record.max_duration)} + }, + { + title: "# Runs", + dataIndex: "total_count", + align: "right", + render: (_, record) => {record.total_count}, + //sorter: (a, b) => a.total_count - b.total_count, + }, + { + title: "Pass Rate", + dataIndex: "pass_rate", + //sorter: (a, b) => a.pass_rate - b.pass_rate, + render: (_, record) => {(record.pass_rate * 100).toFixed(2)}% + } +] + +const TestGrid: React.FC = () => { + + const [variables, setVariables] = useState({}) + + const { loading: labelLoading, data: labelData, previousData: labelPreviousData, error: labelError } = useQuery(GET_TEST_GRID_DATA, { + variables: variables, + fetchPolicy: 'cache-and-network', + }); + + const data = labelLoading ? labelPreviousData : labelData; + var result: TestGridRowDataType[] = [] + var totalCnt: number = 0 + + if (labelError) { + + } else { + totalCnt = data?.getTestsWithOffset?.total ?? 0 + data?.getTestsWithOffset?.result?.map(dataRow => { + var row: TestGridRowDataType = { + key: "test-grid-row-data-" + uniqueId(), + label: dataRow?.label ?? "", + status: [], + average_duration: dataRow?.avg ?? 0, + min_duration: dataRow?.min ?? 0, + max_duration: dataRow?.max ?? 0, + total_count: dataRow?.count ?? 0, + pass_rate: dataRow?.passRate ?? 0 + } + result.push(row) + }) + } + const onChange: TableProps['onChange'] = useCallback( + (pagination: TablePaginationConfig, + filters: Record, extra: any) => { + var vars: GetTestsWithOffsetQueryVariables = {} + if (filters['label']?.length) { + var label = filters['label']?.[0]?.toString() ?? "" + console.log(label) + vars.label = label + } else { + vars.label = "" + } + console.log("pagination.curreent", pagination.current) + vars.offset = ((pagination.current ?? 1) - 1) * PAGE_SIZE; + console.log(vars.offset) + setVariables(vars) + }, + [variables], + ); + return ( + + + + + + + + + + columns={columns} + rowKey="key" + onChange={onChange} + expandable={{ + indentSize: 100, + expandedRowRender: (record) => ( + //TODO: dynamically determine number of buttons to display based on page width and pass that as first + + ), + rowExpandable: (_) => true, + }} + pagination={{ + total: totalCnt, + showSizeChanger: false, + }} + dataSource={result} /> + + + ); +}; + +export default TestGrid; \ No newline at end of file diff --git a/frontend/src/components/TestGridBtn/index.tsx b/frontend/src/components/TestGridBtn/index.tsx new file mode 100644 index 0000000..37454fc --- /dev/null +++ b/frontend/src/components/TestGridBtn/index.tsx @@ -0,0 +1,78 @@ +import React from 'react'; +import { + CheckCircleFilled, + CloseCircleFilled, + InfoCircleFilled, + MinusCircleFilled, + QuestionCircleFilled, + StopOutlined, +} from '@ant-design/icons'; +import { Button, Tag } from 'antd'; +import themeStyles from '@/theme/theme.module.css'; + +export const ALL_STATUS_VALUES = [ + 'NO_STATUS', + 'PASSED', + 'FLAKY', + 'TIMEOUT', + 'FAILED', + 'INCOMPLETE', + 'REMOTE_FAILURE', + 'FAILED_TO_BUILD', + 'TOOL_HALTED_BEFORE_TESTING' +] as const; + +export type StatusTuple = typeof ALL_STATUS_VALUES; +export type TestStatusEnum = StatusTuple[number]; + +interface Props { + status: TestStatusEnum; + invocationId: string; +} + + +const TestGridBtn: React.FC = ({ status, invocationId }) => { + + const ICON_BTNS: { [key in TestStatusEnum]: React.ReactNode } = { + NO_STATUS: ( +
- -
- - +
diff --git a/frontend/src/components/TimingMetrics/index.tsx b/frontend/src/components/TimingMetrics/index.tsx index 2e6070a..84d7bc3 100644 --- a/frontend/src/components/TimingMetrics/index.tsx +++ b/frontend/src/components/TimingMetrics/index.tsx @@ -21,7 +21,7 @@ const TimingMetricsDisplay: React.FC<{ return ( - }> + }> @@ -29,11 +29,10 @@ const TimingMetricsDisplay: React.FC<{ - - }> + }> @@ -47,7 +46,6 @@ const TimingMetricsDisplay: React.FC<{ - ) } diff --git a/frontend/src/components/Utilities/time.ts b/frontend/src/components/Utilities/time.ts index edec5d1..fdd1ccf 100644 --- a/frontend/src/components/Utilities/time.ts +++ b/frontend/src/components/Utilities/time.ts @@ -10,4 +10,19 @@ export const humanFriendlyAgo = (timestamp: string) => { return duration.humanize(true); }; + + +export function millisecondsToTime(milliseconds: number): string { + const totalSeconds = Math.floor(milliseconds / 1000); + const hours = Math.floor(totalSeconds / 3600); + const minutes = Math.floor((totalSeconds % 3600) / 60); + const seconds = totalSeconds % 60; + const remainingMilliseconds = Math.floor(milliseconds % 1000); + + return `${pad(hours)}:${pad(minutes)}:${pad(seconds)}:${pad(remainingMilliseconds, 3)}`; +} + +function pad(num: number, size: number = 2): string { + return num.toString().padStart(size, '0'); +} export default preciseTo; diff --git a/frontend/src/graphql/__generated__/gql.ts b/frontend/src/graphql/__generated__/gql.ts index ba2af53..ecb7b19 100644 --- a/frontend/src/graphql/__generated__/gql.ts +++ b/frontend/src/graphql/__generated__/gql.ts @@ -21,10 +21,17 @@ const documents = { "\n query GetActionProblem($id: ID!) {\n node(id: $id) {\n id\n ... on ActionProblem {\n label\n stdout {\n ...BlobReferenceInfo\n }\n stderr {\n ...BlobReferenceInfo\n }\n }\n }\n }\n": types.GetActionProblemDocument, "\nfragment TestResultInfo on TestResult {\n actionLogOutput {\n ...BlobReferenceInfo\n }\n attempt\n run\n shard\n status\n undeclaredTestOutputs {\n ...BlobReferenceInfo\n }\n}": types.TestResultInfoFragmentDoc, "\n query FindBuildByUUID($url: String, $uuid: UUID) {\n getBuild(buildURL: $url, buildUUID: $uuid) {\n id\n buildURL\n buildUUID\n invocations {\n ...FullBazelInvocationDetails\n }\n env {\n key\n value\n }\n }\n }\n": types.FindBuildByUuidDocument, + "\nquery GetTestsWithOffset(\n $label: String,\n $offset: Int,\n $limit: Int,\n $sortBy: String,\n $direction: String) {\n getTestsWithOffset(\n label: $label\n offset: $offset\n limit: $limit\n sortBy: $sortBy\n direction: $direction\n ) {\n total\n result {\n label\n sum\n min\n max\n avg\n count\n passRate\n }\n }\n }\n": types.GetTestsWithOffsetDocument, + "\n query GetUniqueTestLabels{\n getUniqueTestLabels\n }\n": types.GetUniqueTestLabelsDocument, + "\n\n query GetAveragePassPercentageForLabel(\n $label: String!\n ) {\n getAveragePassPercentageForLabel(label:$label)\n }\n\n": types.GetAveragePassPercentageForLabelDocument, + "\n query GetTestDurationAggregation(\n $label: String\n ) {\n getTestDurationAggregation(label:$label) {\n label\n count\n sum\n min\n max\n }\n }\n": types.GetTestDurationAggregationDocument, + "\n query FindTests(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n}\n": types.FindTestsDocument, + "\n query FindBuildTimes(\n $first: Int!\n \t$where: BazelInvocationWhereInput\n ) {\n findBazelInvocations(first: $first, where: $where ) {\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n\n }\n totalCount\n edges {\n node {\n invocationID\n startedAt\n endedAt\n }\n }\n }\n }\n": types.FindBuildTimesDocument, "\n query FindBazelInvocations(\n $first: Int!\n $where: BazelInvocationWhereInput\n ) {\n findBazelInvocations(first: $first, where: $where) {\n edges {\n node {\n ...BazelInvocationNode\n }\n }\n }\n }\n": types.FindBazelInvocationsDocument, "\n fragment BazelInvocationNode on BazelInvocation {\n id\n invocationID\n startedAt\n user {\n Email\n LDAP\n }\n endedAt\n state {\n bepCompleted\n exitCode {\n code\n name\n }\n }\n build {\n buildUUID\n }\n }\n": types.BazelInvocationNodeFragmentDoc, "\n query FindBuilds(\n $first: Int!\n $where: BuildWhereInput\n ) {\n findBuilds(first: $first, where: $where) {\n edges {\n node {\n ...BuildNode\n }\n }\n }\n }\n": types.FindBuildsDocument, "\n fragment BuildNode on Build {\n id\n buildUUID\n buildURL\n }\n": types.BuildNodeFragmentDoc, + "\n query FindTestsWithCache(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n cachedLocally\n cachedRemotely\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n }\n ": types.FindTestsWithCacheDocument, }; /** @@ -73,6 +80,30 @@ export function gql(source: "\nfragment TestResultInfo on TestResult {\n ac * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ export function gql(source: "\n query FindBuildByUUID($url: String, $uuid: UUID) {\n getBuild(buildURL: $url, buildUUID: $uuid) {\n id\n buildURL\n buildUUID\n invocations {\n ...FullBazelInvocationDetails\n }\n env {\n key\n value\n }\n }\n }\n"): (typeof documents)["\n query FindBuildByUUID($url: String, $uuid: UUID) {\n getBuild(buildURL: $url, buildUUID: $uuid) {\n id\n buildURL\n buildUUID\n invocations {\n ...FullBazelInvocationDetails\n }\n env {\n key\n value\n }\n }\n }\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\nquery GetTestsWithOffset(\n $label: String,\n $offset: Int,\n $limit: Int,\n $sortBy: String,\n $direction: String) {\n getTestsWithOffset(\n label: $label\n offset: $offset\n limit: $limit\n sortBy: $sortBy\n direction: $direction\n ) {\n total\n result {\n label\n sum\n min\n max\n avg\n count\n passRate\n }\n }\n }\n"): (typeof documents)["\nquery GetTestsWithOffset(\n $label: String,\n $offset: Int,\n $limit: Int,\n $sortBy: String,\n $direction: String) {\n getTestsWithOffset(\n label: $label\n offset: $offset\n limit: $limit\n sortBy: $sortBy\n direction: $direction\n ) {\n total\n result {\n label\n sum\n min\n max\n avg\n count\n passRate\n }\n }\n }\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n query GetUniqueTestLabels{\n getUniqueTestLabels\n }\n"): (typeof documents)["\n query GetUniqueTestLabels{\n getUniqueTestLabels\n }\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n\n query GetAveragePassPercentageForLabel(\n $label: String!\n ) {\n getAveragePassPercentageForLabel(label:$label)\n }\n\n"): (typeof documents)["\n\n query GetAveragePassPercentageForLabel(\n $label: String!\n ) {\n getAveragePassPercentageForLabel(label:$label)\n }\n\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n query GetTestDurationAggregation(\n $label: String\n ) {\n getTestDurationAggregation(label:$label) {\n label\n count\n sum\n min\n max\n }\n }\n"): (typeof documents)["\n query GetTestDurationAggregation(\n $label: String\n ) {\n getTestDurationAggregation(label:$label) {\n label\n count\n sum\n min\n max\n }\n }\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n query FindTests(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n}\n"): (typeof documents)["\n query FindTests(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n}\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n query FindBuildTimes(\n $first: Int!\n \t$where: BazelInvocationWhereInput\n ) {\n findBazelInvocations(first: $first, where: $where ) {\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n\n }\n totalCount\n edges {\n node {\n invocationID\n startedAt\n endedAt\n }\n }\n }\n }\n"): (typeof documents)["\n query FindBuildTimes(\n $first: Int!\n \t$where: BazelInvocationWhereInput\n ) {\n findBazelInvocations(first: $first, where: $where ) {\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n\n }\n totalCount\n edges {\n node {\n invocationID\n startedAt\n endedAt\n }\n }\n }\n }\n"]; /** * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ @@ -89,6 +120,10 @@ export function gql(source: "\n query FindBuilds(\n $first: Int!\n $where * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. */ export function gql(source: "\n fragment BuildNode on Build {\n id\n buildUUID\n buildURL\n }\n"): (typeof documents)["\n fragment BuildNode on Build {\n id\n buildUUID\n buildURL\n }\n"]; +/** + * The gql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function gql(source: "\n query FindTestsWithCache(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n cachedLocally\n cachedRemotely\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n }\n "): (typeof documents)["\n query FindTestsWithCache(\n $first: Int!\n $where: TestCollectionWhereInput\n $orderBy: TestCollectionOrder\n $after: Cursor\n ){\n findTests (first: $first, where: $where, orderBy: $orderBy, after: $after){\n totalCount\n pageInfo{\n startCursor\n endCursor\n hasNextPage\n hasPreviousPage\n }\n edges {\n node {\n id\n durationMs\n firstSeen\n label\n overallStatus\n cachedLocally\n cachedRemotely\n bazelInvocation {\n invocationID\n }\n }\n }\n }\n }\n "]; export function gql(source: string) { return (documents as any)[source] ?? {}; diff --git a/frontend/src/graphql/__generated__/graphql.ts b/frontend/src/graphql/__generated__/graphql.ts index b89da1e..2d41cb5 100644 --- a/frontend/src/graphql/__generated__/graphql.ts +++ b/frontend/src/graphql/__generated__/graphql.ts @@ -29,7 +29,7 @@ export type Scalars = { export type ActionCacheStatistics = Node & { __typename?: 'ActionCacheStatistics'; - actionSummary?: Maybe>; + actionSummary?: Maybe; hits?: Maybe; id: Scalars['ID']['output']; loadTimeInMs?: Maybe; @@ -121,7 +121,7 @@ export type ActionCacheStatisticsWhereInput = { export type ActionData = Node & { __typename?: 'ActionData'; - actionSummary?: Maybe>; + actionSummary?: Maybe; actionsCreated?: Maybe; actionsExecuted?: Maybe; firstStartedMs?: Maybe; @@ -253,7 +253,7 @@ export type ActionProblem = Node & Problem & { export type ActionSummary = Node & { __typename?: 'ActionSummary'; - actionCacheStatistics?: Maybe>; + actionCacheStatistics?: Maybe; actionData?: Maybe>; actionsCreated?: Maybe; actionsCreatedNotIncludingAspects?: Maybe; @@ -342,11 +342,11 @@ export type ActionSummaryWhereInput = { export type ArtifactMetrics = Node & { __typename?: 'ArtifactMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; - outputArtifactsFromActionCache?: Maybe>; - outputArtifactsSeen?: Maybe>; - sourceArtifactsRead?: Maybe>; - topLevelArtifacts?: Maybe>; + metrics?: Maybe; + outputArtifactsFromActionCache?: Maybe; + outputArtifactsSeen?: Maybe; + sourceArtifactsRead?: Maybe; + topLevelArtifacts?: Maybe; }; /** @@ -442,6 +442,20 @@ export type BazelInvocationEdge = { node?: Maybe; }; +/** Ordering options for BazelInvocation connections */ +export type BazelInvocationOrder = { + /** The ordering direction. */ + direction?: OrderDirection; + /** The field by which to order BazelInvocations. */ + field: BazelInvocationOrderField; +}; + +/** Properties by which BazelInvocation connections can be ordered. */ +export enum BazelInvocationOrderField { + StartedAt = 'STARTED_AT', + UserLdap = 'USER_LDAP' +} + export type BazelInvocationProblem = Node & { __typename?: 'BazelInvocationProblem'; bazelInvocation?: Maybe; @@ -879,14 +893,14 @@ export type BuildGraphMetrics = Node & { actionCountNotIncludingAspects?: Maybe; actionLookupValueCount?: Maybe; actionLookupValueCountNotIncludingAspects?: Maybe; - builtValues?: Maybe>; - changedValues?: Maybe>; - cleanedValues?: Maybe>; - dirtiedValues?: Maybe>; - evaluatedValues?: Maybe>; + builtValues?: Maybe; + changedValues?: Maybe; + cleanedValues?: Maybe; + dirtiedValues?: Maybe; + evaluatedValues?: Maybe; id: Scalars['ID']['output']; inputFileConfiguredTargetCount?: Maybe; - metrics?: Maybe>; + metrics?: Maybe; otherConfiguredTargetCount?: Maybe; outputArtifactCount?: Maybe; outputFileConfiguredTargetCount?: Maybe; @@ -1090,7 +1104,7 @@ export type BuildWhereInput = { export type CumulativeMetrics = Node & { __typename?: 'CumulativeMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; numAnalyses?: Maybe; numBuilds?: Maybe; }; @@ -1142,7 +1156,7 @@ export type CumulativeMetricsWhereInput = { export type DynamicExecutionMetrics = Node & { __typename?: 'DynamicExecutionMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; raceStatistics?: Maybe>; }; @@ -1179,7 +1193,7 @@ export type EnvVar = { export type EvaluationStat = Node & { __typename?: 'EvaluationStat'; - buildGraphMetrics?: Maybe>; + buildGraphMetrics?: Maybe; count?: Maybe; id: Scalars['ID']['output']; skyfunctionName?: Maybe; @@ -1357,7 +1371,7 @@ export type ExectionInfo = Node & { id: Scalars['ID']['output']; resourceUsage?: Maybe>; strategy?: Maybe; - testResult?: Maybe>; + testResult?: Maybe; timeoutSeconds?: Maybe; timingBreakdown?: Maybe; }; @@ -1458,7 +1472,7 @@ export type ExitCode = { export type FilesMetric = Node & { __typename?: 'FilesMetric'; - artifactMetrics?: Maybe>; + artifactMetrics?: Maybe; count?: Maybe; id: Scalars['ID']['output']; sizeInBytes?: Maybe; @@ -1512,7 +1526,7 @@ export type GarbageMetrics = Node & { __typename?: 'GarbageMetrics'; garbageCollected?: Maybe; id: Scalars['ID']['output']; - memoryMetrics?: Maybe>; + memoryMetrics?: Maybe; type?: Maybe; }; @@ -1569,7 +1583,7 @@ export type MemoryMetrics = Node & { __typename?: 'MemoryMetrics'; garbageMetrics?: Maybe>; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; peakPostGcHeapSize?: Maybe; peakPostGcTenuredSpaceHeapSize?: Maybe; usedHeapSizePostBuild?: Maybe; @@ -1635,18 +1649,18 @@ export type MemoryMetricsWhereInput = { export type Metrics = Node & { __typename?: 'Metrics'; - actionSummary?: Maybe>; - artifactMetrics?: Maybe>; + actionSummary?: Maybe; + artifactMetrics?: Maybe; bazelInvocation?: Maybe; - buildGraphMetrics?: Maybe>; - cumulativeMetrics?: Maybe>; - dynamicExecutionMetrics?: Maybe>; + buildGraphMetrics?: Maybe; + cumulativeMetrics?: Maybe; + dynamicExecutionMetrics?: Maybe; id: Scalars['ID']['output']; - memoryMetrics?: Maybe>; - networkMetrics?: Maybe>; - packageMetrics?: Maybe>; - targetMetrics?: Maybe>; - timingMetrics?: Maybe>; + memoryMetrics?: Maybe; + networkMetrics?: Maybe; + packageMetrics?: Maybe; + targetMetrics?: Maybe; + timingMetrics?: Maybe; }; /** A connection to a list of items. */ @@ -1723,7 +1737,7 @@ export type MetricsWhereInput = { export type MissDetail = Node & { __typename?: 'MissDetail'; - actionCacheStatistics?: Maybe>; + actionCacheStatistics?: Maybe; count?: Maybe; id: Scalars['ID']['output']; reason?: Maybe; @@ -1792,7 +1806,7 @@ export type NamedSetOfFiles = Node & { fileSets?: Maybe; files?: Maybe>; id: Scalars['ID']['output']; - outputGroup?: Maybe>; + outputGroup?: Maybe; }; /** @@ -1826,8 +1840,8 @@ export type NamedSetOfFilesWhereInput = { export type NetworkMetrics = Node & { __typename?: 'NetworkMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; - systemNetworkStats?: Maybe>; + metrics?: Maybe; + systemNetworkStats?: Maybe; }; /** @@ -1879,7 +1893,7 @@ export type OutputGroup = Node & { incomplete?: Maybe; inlineFiles?: Maybe>; name?: Maybe; - targetComplete?: Maybe>; + targetComplete?: Maybe; }; /** @@ -1939,7 +1953,7 @@ export type PackageLoadMetrics = Node & { name?: Maybe; numTargets?: Maybe; numTransitiveLoads?: Maybe; - packageMetrics?: Maybe>; + packageMetrics?: Maybe; packageOverhead?: Maybe; }; @@ -2039,7 +2053,7 @@ export type PackageLoadMetricsWhereInput = { export type PackageMetrics = Node & { __typename?: 'PackageMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; packageLoadMetrics?: Maybe>; packagesLoaded?: Maybe; }; @@ -2123,7 +2137,16 @@ export type Query = { findBuilds: BuildConnection; findMetrics: MetricsConnection; findRunnerCounts: RunnerCountConnection; + findTests: TestCollectionConnection; + getAveragePassPercentageForLabel?: Maybe; getBuild?: Maybe; + getTargetDurationAggregation?: Maybe>>; + getTargetPassAggregation?: Maybe>>; + getTestDurationAggregation?: Maybe>>; + getTestPassAggregation?: Maybe>>; + getTestsWithOffset?: Maybe; + getUniqueTargetLabels?: Maybe>>; + getUniqueTestLabels?: Maybe>>; /** Fetches an object given its ID. */ node?: Maybe; /** Lookup nodes by a list of IDs. */ @@ -2141,6 +2164,7 @@ export type QueryFindBazelInvocationsArgs = { before?: InputMaybe; first?: InputMaybe; last?: InputMaybe; + orderBy?: InputMaybe; where?: InputMaybe; }; @@ -2172,12 +2196,66 @@ export type QueryFindRunnerCountsArgs = { }; +export type QueryFindTestsArgs = { + after?: InputMaybe; + before?: InputMaybe; + first?: InputMaybe; + last?: InputMaybe; + orderBy?: InputMaybe; + where?: InputMaybe; +}; + + +export type QueryGetAveragePassPercentageForLabelArgs = { + label: Scalars['String']['input']; +}; + + export type QueryGetBuildArgs = { buildURL?: InputMaybe; buildUUID?: InputMaybe; }; +export type QueryGetTargetDurationAggregationArgs = { + label?: InputMaybe; +}; + + +export type QueryGetTargetPassAggregationArgs = { + label?: InputMaybe; +}; + + +export type QueryGetTestDurationAggregationArgs = { + label?: InputMaybe; +}; + + +export type QueryGetTestPassAggregationArgs = { + label?: InputMaybe; +}; + + +export type QueryGetTestsWithOffsetArgs = { + direction?: InputMaybe; + label?: InputMaybe; + limit?: InputMaybe; + offset?: InputMaybe; + sortBy?: InputMaybe; +}; + + +export type QueryGetUniqueTargetLabelsArgs = { + param?: InputMaybe; +}; + + +export type QueryGetUniqueTestLabelsArgs = { + param?: InputMaybe; +}; + + export type QueryNodeArgs = { id: Scalars['ID']['input']; }; @@ -2189,7 +2267,7 @@ export type QueryNodesArgs = { export type RaceStatistics = Node & { __typename?: 'RaceStatistics'; - dynamicExecutionMetrics?: Maybe>; + dynamicExecutionMetrics?: Maybe; id: Scalars['ID']['output']; localRunner?: Maybe; localWins?: Maybe; @@ -2292,7 +2370,7 @@ export type RaceStatisticsWhereInput = { export type ResourceUsage = Node & { __typename?: 'ResourceUsage'; - executionInfo?: Maybe>; + executionInfo?: Maybe; id: Scalars['ID']['output']; name?: Maybe; value?: Maybe; @@ -2354,7 +2432,7 @@ export type ResourceUsageWhereInput = { export type RunnerCount = Node & { __typename?: 'RunnerCount'; - actionSummary?: Maybe>; + actionSummary?: Maybe; actionsExecuted?: Maybe; execKind?: Maybe; id: Scalars['ID']['output']; @@ -2570,6 +2648,17 @@ export type SystemNetworkStatsWhereInput = { peakPacketsSentPerSecNotNil?: InputMaybe; }; +export type TargetAggregate = { + __typename?: 'TargetAggregate'; + avg?: Maybe; + count?: Maybe; + label?: Maybe; + max?: Maybe; + min?: Maybe; + pass?: Maybe; + sum?: Maybe; +}; + export type TargetComplete = Node & { __typename?: 'TargetComplete'; directoryOutput?: Maybe>; @@ -2580,7 +2669,7 @@ export type TargetComplete = Node & { success?: Maybe; tag?: Maybe>; targetKind?: Maybe; - targetPair?: Maybe>; + targetPair?: Maybe; testSize?: Maybe; testTimeout?: Maybe; testTimeoutSeconds?: Maybe; @@ -2693,7 +2782,7 @@ export type TargetConfigured = Node & { startTimeInMs?: Maybe; tag?: Maybe>; targetKind?: Maybe; - targetPair?: Maybe>; + targetPair?: Maybe; testSize?: Maybe; }; @@ -2765,7 +2854,7 @@ export type TargetConfiguredWhereInput = { export type TargetMetrics = Node & { __typename?: 'TargetMetrics'; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; targetsConfigured?: Maybe; targetsConfiguredNotIncludingAspects?: Maybe; targetsLoaded?: Maybe; @@ -2829,7 +2918,7 @@ export type TargetMetricsWhereInput = { export type TargetPair = Node & { __typename?: 'TargetPair'; abortReason?: Maybe; - bazelInvocation?: Maybe>; + bazelInvocation?: Maybe; completion?: Maybe; configuration?: Maybe; durationInMs?: Maybe; @@ -2963,10 +3052,11 @@ export type TargetProblem = Node & Problem & { export type TestCollection = Node & { __typename?: 'TestCollection'; - bazelInvocation?: Maybe>; + bazelInvocation?: Maybe; cachedLocally?: Maybe; cachedRemotely?: Maybe; durationMs?: Maybe; + firstSeen?: Maybe; id: Scalars['ID']['output']; label?: Maybe; overallStatus?: Maybe; @@ -2975,6 +3065,40 @@ export type TestCollection = Node & { testSummary?: Maybe; }; +/** A connection to a list of items. */ +export type TestCollectionConnection = { + __typename?: 'TestCollectionConnection'; + /** A list of edges. */ + edges?: Maybe>>; + /** Information to aid in pagination. */ + pageInfo: PageInfo; + /** Identifies the total count of items in the connection. */ + totalCount: Scalars['Int']['output']; +}; + +/** An edge in a connection. */ +export type TestCollectionEdge = { + __typename?: 'TestCollectionEdge'; + /** A cursor for use in pagination. */ + cursor: Scalars['Cursor']['output']; + /** The item at the end of the edge. */ + node?: Maybe; +}; + +/** Ordering options for TestCollection connections */ +export type TestCollectionOrder = { + /** The ordering direction. */ + direction?: OrderDirection; + /** The field by which to order TestCollections. */ + field: TestCollectionOrderField; +}; + +/** Properties by which TestCollection connections can be ordered. */ +export enum TestCollectionOrderField { + Duration = 'DURATION', + FirstSeen = 'FIRST_SEEN' +} + /** TestCollectionOverallStatus is enum for the field overall_status */ export enum TestCollectionOverallStatus { Failed = 'FAILED', @@ -3015,6 +3139,17 @@ export type TestCollectionWhereInput = { durationMsNEQ?: InputMaybe; durationMsNotIn?: InputMaybe>; durationMsNotNil?: InputMaybe; + /** first_seen field predicates */ + firstSeen?: InputMaybe; + firstSeenGT?: InputMaybe; + firstSeenGTE?: InputMaybe; + firstSeenIn?: InputMaybe>; + firstSeenIsNil?: InputMaybe; + firstSeenLT?: InputMaybe; + firstSeenLTE?: InputMaybe; + firstSeenNEQ?: InputMaybe; + firstSeenNotIn?: InputMaybe>; + firstSeenNotNil?: InputMaybe; /** bazel_invocation edge predicates */ hasBazelInvocation?: InputMaybe; hasBazelInvocationWith?: InputMaybe>; @@ -3084,7 +3219,7 @@ export type TestFile = Node & { length?: Maybe; name?: Maybe; prefix?: Maybe>; - testResult?: Maybe>; + testResult?: Maybe; }; /** @@ -3168,6 +3303,30 @@ export type TestFileWhereInput = { or?: InputMaybe>; }; +export type TestGridCell = { + __typename?: 'TestGridCell'; + invocationId?: Maybe; + status?: Maybe; +}; + +export type TestGridResult = { + __typename?: 'TestGridResult'; + result?: Maybe>>; + total?: Maybe; +}; + +export type TestGridRow = { + __typename?: 'TestGridRow'; + avg?: Maybe; + cells?: Maybe>>; + count?: Maybe; + label?: Maybe; + max?: Maybe; + min?: Maybe; + passRate?: Maybe; + sum?: Maybe; +}; + export type TestProblem = Node & Problem & { __typename?: 'TestProblem'; id: Scalars['ID']['output']; @@ -3350,7 +3509,7 @@ export type TestSummary = Node & { passed?: Maybe>; runCount?: Maybe; shardCount?: Maybe; - testCollection?: Maybe>; + testCollection?: Maybe; totalNumCached?: Maybe; totalRunCount?: Maybe; totalRunDuration?: Maybe; @@ -3511,7 +3670,7 @@ export type TestSummaryWhereInput = { export type TimingBreakdown = Node & { __typename?: 'TimingBreakdown'; child?: Maybe>; - executionInfo?: Maybe>; + executionInfo?: Maybe; id: Scalars['ID']['output']; name?: Maybe; time?: Maybe; @@ -3579,7 +3738,7 @@ export type TimingChild = Node & { id: Scalars['ID']['output']; name?: Maybe; time?: Maybe; - timingBreakdown?: Maybe>; + timingBreakdown?: Maybe; }; /** @@ -3643,7 +3802,7 @@ export type TimingMetrics = Node & { cpuTimeInMs?: Maybe; executionPhaseTimeInMs?: Maybe; id: Scalars['ID']['output']; - metrics?: Maybe>; + metrics?: Maybe; wallTimeInMs?: Maybe; }; @@ -3741,7 +3900,7 @@ export type LoadFullBazelInvocationDetailsQuery = { __typename?: 'Query', bazelI & { ' $fragmentRefs'?: { 'FullBazelInvocationDetailsFragment': FullBazelInvocationDetailsFragment } } ) }; -export type BazelInvocationInfoFragment = { __typename?: 'BazelInvocation', id: string, invocationID: any, startedAt: any, endedAt?: any | null, stepLabel: string, metrics?: { __typename?: 'Metrics', id: string, actionSummary?: Array<{ __typename?: 'ActionSummary', id: string, actionsCreated?: number | null, actionsExecuted?: number | null, actionsCreatedNotIncludingAspects?: number | null, remoteCacheHits?: number | null, actionCacheStatistics?: Array<{ __typename?: 'ActionCacheStatistics', id: string, loadTimeInMs?: number | null, saveTimeInMs?: number | null, hits?: number | null, misses?: number | null, sizeInBytes?: number | null, missDetails?: Array<{ __typename?: 'MissDetail', id: string, count?: number | null, reason?: MissDetailReason | null }> | null }> | null, runnerCount?: Array<{ __typename?: 'RunnerCount', id: string, actionsExecuted?: number | null, name?: string | null, execKind?: string | null }> | null, actionData?: Array<{ __typename?: 'ActionData', id: string, mnemonic?: string | null, userTime?: number | null, systemTime?: number | null, lastEndedMs?: number | null, actionsCreated?: number | null, actionsExecuted?: number | null, firstStartedMs?: number | null }> | null }> | null, artifactMetrics?: Array<{ __typename?: 'ArtifactMetrics', id: string, sourceArtifactsRead?: Array<{ __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null }> | null, outputArtifactsSeen?: Array<{ __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null }> | null, outputArtifactsFromActionCache?: Array<{ __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null }> | null, topLevelArtifacts?: Array<{ __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null }> | null }> | null, cumulativeMetrics?: Array<{ __typename?: 'CumulativeMetrics', id: string, numBuilds?: number | null, numAnalyses?: number | null }> | null, dynamicExecutionMetrics?: Array<{ __typename?: 'DynamicExecutionMetrics', id: string, raceStatistics?: Array<{ __typename?: 'RaceStatistics', id: string, localWins?: number | null, mnemonic?: string | null, renoteWins?: number | null, localRunner?: string | null, remoteRunner?: string | null }> | null }> | null, buildGraphMetrics?: Array<{ __typename?: 'BuildGraphMetrics', id: string, actionLookupValueCount?: number | null, actionLookupValueCountNotIncludingAspects?: number | null, actionCount?: number | null, inputFileConfiguredTargetCount?: number | null, outputFileConfiguredTargetCount?: number | null, otherConfiguredTargetCount?: number | null, outputArtifactCount?: number | null, postInvocationSkyframeNodeCount?: number | null }> | null, memoryMetrics?: Array<{ __typename?: 'MemoryMetrics', id: string, usedHeapSizePostBuild?: number | null, peakPostGcHeapSize?: number | null, peakPostGcTenuredSpaceHeapSize?: number | null, garbageMetrics?: Array<{ __typename?: 'GarbageMetrics', id: string, garbageCollected?: number | null, type?: string | null }> | null }> | null, targetMetrics?: Array<{ __typename?: 'TargetMetrics', id: string, targetsLoaded?: number | null, targetsConfigured?: number | null, targetsConfiguredNotIncludingAspects?: number | null }> | null, timingMetrics?: Array<{ __typename?: 'TimingMetrics', id: string, cpuTimeInMs?: number | null, wallTimeInMs?: number | null, analysisPhaseTimeInMs?: number | null, executionPhaseTimeInMs?: number | null, actionsExecutionStartInMs?: number | null }> | null, networkMetrics?: Array<{ __typename?: 'NetworkMetrics', id: string, systemNetworkStats?: Array<{ __typename?: 'SystemNetworkStats', id: string, bytesSent?: number | null, bytesRecv?: number | null, packetsSent?: number | null, packetsRecv?: number | null, peakBytesSentPerSec?: number | null, peakBytesRecvPerSec?: number | null, peakPacketsSentPerSec?: number | null, peakPacketsRecvPerSec?: number | null }> | null }> | null, packageMetrics?: Array<{ __typename?: 'PackageMetrics', id: string, packagesLoaded?: number | null, packageLoadMetrics?: Array<{ __typename?: 'PackageLoadMetrics', id: string, name?: string | null, numTargets?: number | null, loadDuration?: number | null, packageOverhead?: number | null, computationSteps?: number | null, numTransitiveLoads?: number | null }> | null }> | null } | null, bazelCommand: { __typename?: 'BazelCommand', command: string, executable: string, id: string, residual: string, buildOptions: string }, build?: { __typename?: 'Build', id: string, buildUUID: any } | null, profile?: { __typename?: 'Profile', id: string, name: string, digest: string, sizeInBytes: number } | null, targets?: Array<{ __typename?: 'TargetPair', id: string, label?: string | null, success?: boolean | null, testSize?: TargetPairTestSize | null, targetKind?: string | null, durationInMs?: number | null, abortReason?: TargetPairAbortReason | null }> | null, testCollection?: Array<{ __typename?: 'TestCollection', id: string, label?: string | null, strategy?: string | null, durationMs?: number | null, overallStatus?: TestCollectionOverallStatus | null, cachedLocally?: boolean | null, cachedRemotely?: boolean | null }> | null, relatedFiles: Array<{ __typename?: 'NamedFile', name: string, url: string }>, user?: { __typename?: 'User', Email: string, LDAP: string } | null, state: { __typename?: 'BazelInvocationState', bepCompleted: boolean, buildEndTime: any, buildStartTime: any, id: string, exitCode?: { __typename?: 'ExitCode', code: number, id: string, name: string } | null } } & { ' $fragmentName'?: 'BazelInvocationInfoFragment' }; +export type BazelInvocationInfoFragment = { __typename?: 'BazelInvocation', id: string, invocationID: any, startedAt: any, endedAt?: any | null, stepLabel: string, metrics?: { __typename?: 'Metrics', id: string, actionSummary?: { __typename?: 'ActionSummary', id: string, actionsCreated?: number | null, actionsExecuted?: number | null, actionsCreatedNotIncludingAspects?: number | null, remoteCacheHits?: number | null, actionCacheStatistics?: { __typename?: 'ActionCacheStatistics', id: string, loadTimeInMs?: number | null, saveTimeInMs?: number | null, hits?: number | null, misses?: number | null, sizeInBytes?: number | null, missDetails?: Array<{ __typename?: 'MissDetail', id: string, count?: number | null, reason?: MissDetailReason | null }> | null } | null, runnerCount?: Array<{ __typename?: 'RunnerCount', id: string, actionsExecuted?: number | null, name?: string | null, execKind?: string | null }> | null, actionData?: Array<{ __typename?: 'ActionData', id: string, mnemonic?: string | null, userTime?: number | null, systemTime?: number | null, lastEndedMs?: number | null, actionsCreated?: number | null, actionsExecuted?: number | null, firstStartedMs?: number | null }> | null } | null, artifactMetrics?: { __typename?: 'ArtifactMetrics', id: string, sourceArtifactsRead?: { __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null } | null, outputArtifactsSeen?: { __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null } | null, outputArtifactsFromActionCache?: { __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null } | null, topLevelArtifacts?: { __typename?: 'FilesMetric', id: string, sizeInBytes?: number | null, count?: number | null } | null } | null, cumulativeMetrics?: { __typename?: 'CumulativeMetrics', id: string, numBuilds?: number | null, numAnalyses?: number | null } | null, dynamicExecutionMetrics?: { __typename?: 'DynamicExecutionMetrics', id: string, raceStatistics?: Array<{ __typename?: 'RaceStatistics', id: string, localWins?: number | null, mnemonic?: string | null, renoteWins?: number | null, localRunner?: string | null, remoteRunner?: string | null }> | null } | null, buildGraphMetrics?: { __typename?: 'BuildGraphMetrics', id: string, actionLookupValueCount?: number | null, actionLookupValueCountNotIncludingAspects?: number | null, actionCount?: number | null, inputFileConfiguredTargetCount?: number | null, outputFileConfiguredTargetCount?: number | null, otherConfiguredTargetCount?: number | null, outputArtifactCount?: number | null, postInvocationSkyframeNodeCount?: number | null } | null, memoryMetrics?: { __typename?: 'MemoryMetrics', id: string, usedHeapSizePostBuild?: number | null, peakPostGcHeapSize?: number | null, peakPostGcTenuredSpaceHeapSize?: number | null, garbageMetrics?: Array<{ __typename?: 'GarbageMetrics', id: string, garbageCollected?: number | null, type?: string | null }> | null } | null, targetMetrics?: { __typename?: 'TargetMetrics', id: string, targetsLoaded?: number | null, targetsConfigured?: number | null, targetsConfiguredNotIncludingAspects?: number | null } | null, timingMetrics?: { __typename?: 'TimingMetrics', id: string, cpuTimeInMs?: number | null, wallTimeInMs?: number | null, analysisPhaseTimeInMs?: number | null, executionPhaseTimeInMs?: number | null, actionsExecutionStartInMs?: number | null } | null, networkMetrics?: { __typename?: 'NetworkMetrics', id: string, systemNetworkStats?: { __typename?: 'SystemNetworkStats', id: string, bytesSent?: number | null, bytesRecv?: number | null, packetsSent?: number | null, packetsRecv?: number | null, peakBytesSentPerSec?: number | null, peakBytesRecvPerSec?: number | null, peakPacketsSentPerSec?: number | null, peakPacketsRecvPerSec?: number | null } | null } | null, packageMetrics?: { __typename?: 'PackageMetrics', id: string, packagesLoaded?: number | null, packageLoadMetrics?: Array<{ __typename?: 'PackageLoadMetrics', id: string, name?: string | null, numTargets?: number | null, loadDuration?: number | null, packageOverhead?: number | null, computationSteps?: number | null, numTransitiveLoads?: number | null }> | null } | null } | null, bazelCommand: { __typename?: 'BazelCommand', command: string, executable: string, id: string, residual: string, buildOptions: string }, build?: { __typename?: 'Build', id: string, buildUUID: any } | null, profile?: { __typename?: 'Profile', id: string, name: string, digest: string, sizeInBytes: number } | null, targets?: Array<{ __typename?: 'TargetPair', id: string, label?: string | null, success?: boolean | null, testSize?: TargetPairTestSize | null, targetKind?: string | null, durationInMs?: number | null, abortReason?: TargetPairAbortReason | null }> | null, testCollection?: Array<{ __typename?: 'TestCollection', id: string, label?: string | null, strategy?: string | null, durationMs?: number | null, overallStatus?: TestCollectionOverallStatus | null, cachedLocally?: boolean | null, cachedRemotely?: boolean | null }> | null, relatedFiles: Array<{ __typename?: 'NamedFile', name: string, url: string }>, user?: { __typename?: 'User', Email: string, LDAP: string } | null, state: { __typename?: 'BazelInvocationState', bepCompleted: boolean, buildEndTime: any, buildStartTime: any, id: string, exitCode?: { __typename?: 'ExitCode', code: number, id: string, name: string } | null } } & { ' $fragmentName'?: 'BazelInvocationInfoFragment' }; type ProblemInfo_ActionProblem_Fragment = { __typename: 'ActionProblem', id: string, label: string, type: string, stdout?: ( { __typename?: 'BlobReference' } @@ -3816,6 +3975,54 @@ export type FindBuildByUuidQuery = { __typename?: 'Query', getBuild?: { __typena & { ' $fragmentRefs'?: { 'FullBazelInvocationDetailsFragment': FullBazelInvocationDetailsFragment } } )> | null, env: Array<{ __typename?: 'EnvVar', key: string, value: string }> } | null }; +export type GetTestsWithOffsetQueryVariables = Exact<{ + label?: InputMaybe; + offset?: InputMaybe; + limit?: InputMaybe; + sortBy?: InputMaybe; + direction?: InputMaybe; +}>; + + +export type GetTestsWithOffsetQuery = { __typename?: 'Query', getTestsWithOffset?: { __typename?: 'TestGridResult', total?: number | null, result?: Array<{ __typename?: 'TestGridRow', label?: string | null, sum?: number | null, min?: number | null, max?: number | null, avg?: number | null, count?: number | null, passRate?: number | null } | null> | null } | null }; + +export type GetUniqueTestLabelsQueryVariables = Exact<{ [key: string]: never; }>; + + +export type GetUniqueTestLabelsQuery = { __typename?: 'Query', getUniqueTestLabels?: Array | null }; + +export type GetAveragePassPercentageForLabelQueryVariables = Exact<{ + label: Scalars['String']['input']; +}>; + + +export type GetAveragePassPercentageForLabelQuery = { __typename?: 'Query', getAveragePassPercentageForLabel?: number | null }; + +export type GetTestDurationAggregationQueryVariables = Exact<{ + label?: InputMaybe; +}>; + + +export type GetTestDurationAggregationQuery = { __typename?: 'Query', getTestDurationAggregation?: Array<{ __typename?: 'TargetAggregate', label?: string | null, count?: number | null, sum?: number | null, min?: number | null, max?: number | null } | null> | null }; + +export type FindTestsQueryVariables = Exact<{ + first: Scalars['Int']['input']; + where?: InputMaybe; + orderBy?: InputMaybe; + after?: InputMaybe; +}>; + + +export type FindTestsQuery = { __typename?: 'Query', findTests: { __typename?: 'TestCollectionConnection', totalCount: number, pageInfo: { __typename?: 'PageInfo', startCursor?: any | null, endCursor?: any | null, hasNextPage: boolean, hasPreviousPage: boolean }, edges?: Array<{ __typename?: 'TestCollectionEdge', node?: { __typename?: 'TestCollection', id: string, durationMs?: number | null, firstSeen?: any | null, label?: string | null, overallStatus?: TestCollectionOverallStatus | null, bazelInvocation?: { __typename?: 'BazelInvocation', invocationID: any } | null } | null } | null> | null } }; + +export type FindBuildTimesQueryVariables = Exact<{ + first: Scalars['Int']['input']; + where?: InputMaybe; +}>; + + +export type FindBuildTimesQuery = { __typename?: 'Query', findBazelInvocations: { __typename?: 'BazelInvocationConnection', totalCount: number, pageInfo: { __typename?: 'PageInfo', startCursor?: any | null, endCursor?: any | null, hasNextPage: boolean, hasPreviousPage: boolean }, edges?: Array<{ __typename?: 'BazelInvocationEdge', node?: { __typename?: 'BazelInvocation', invocationID: any, startedAt: any, endedAt?: any | null } | null } | null> | null } }; + export type FindBazelInvocationsQueryVariables = Exact<{ first: Scalars['Int']['input']; where?: InputMaybe; @@ -3842,6 +4049,16 @@ export type FindBuildsQuery = { __typename?: 'Query', findBuilds: { __typename?: export type BuildNodeFragment = { __typename?: 'Build', id: string, buildUUID: any, buildURL: string } & { ' $fragmentName'?: 'BuildNodeFragment' }; +export type FindTestsWithCacheQueryVariables = Exact<{ + first: Scalars['Int']['input']; + where?: InputMaybe; + orderBy?: InputMaybe; + after?: InputMaybe; +}>; + + +export type FindTestsWithCacheQuery = { __typename?: 'Query', findTests: { __typename?: 'TestCollectionConnection', totalCount: number, pageInfo: { __typename?: 'PageInfo', startCursor?: any | null, endCursor?: any | null, hasNextPage: boolean, hasPreviousPage: boolean }, edges?: Array<{ __typename?: 'TestCollectionEdge', node?: { __typename?: 'TestCollection', id: string, durationMs?: number | null, firstSeen?: any | null, label?: string | null, overallStatus?: TestCollectionOverallStatus | null, cachedLocally?: boolean | null, cachedRemotely?: boolean | null, bazelInvocation?: { __typename?: 'BazelInvocation', invocationID: any } | null } | null } | null> | null } }; + export const BlobReferenceInfoFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BlobReferenceInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BlobReference"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"availabilityStatus"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"downloadURL"}}]}}]} as unknown as DocumentNode; export const ProblemInfoFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"ProblemInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Problem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ActionProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"stdout"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stderr"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TestProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"results"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"run"}},{"kind":"Field","name":{"kind":"Name","value":"shard"}},{"kind":"Field","name":{"kind":"Name","value":"attempt"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"actionLogOutput"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"undeclaredTestOutputs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TargetProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ProgressProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"output"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BlobReferenceInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BlobReference"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"availabilityStatus"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"downloadURL"}}]}}]} as unknown as DocumentNode; export const BazelInvocationInfoFragmentDoc = {"kind":"Document","definitions":[{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BazelInvocationInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"metrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionSummary"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreatedNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"remoteCacheHits"}},{"kind":"Field","name":{"kind":"Name","value":"actionCacheStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"loadTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"saveTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"hits"}},{"kind":"Field","name":{"kind":"Name","value":"misses"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"missDetails"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"count"}},{"kind":"Field","name":{"kind":"Name","value":"reason"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"runnerCount"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"execKind"}}]}},{"kind":"Field","name":{"kind":"Name","value":"actionData"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"userTime"}},{"kind":"Field","name":{"kind":"Name","value":"systemTime"}},{"kind":"Field","name":{"kind":"Name","value":"lastEndedMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"firstStartedMs"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"artifactMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sourceArtifactsRead"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsSeen"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsFromActionCache"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"topLevelArtifacts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"cumulativeMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"numBuilds"}},{"kind":"Field","name":{"kind":"Name","value":"numAnalyses"}}]}},{"kind":"Field","name":{"kind":"Name","value":"dynamicExecutionMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"raceStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"localWins"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"renoteWins"}},{"kind":"Field","name":{"kind":"Name","value":"localRunner"}},{"kind":"Field","name":{"kind":"Name","value":"remoteRunner"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"buildGraphMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCount"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCountNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"actionCount"}},{"kind":"Field","name":{"kind":"Name","value":"inputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"otherConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactCount"}},{"kind":"Field","name":{"kind":"Name","value":"postInvocationSkyframeNodeCount"}}]}},{"kind":"Field","name":{"kind":"Name","value":"memoryMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"usedHeapSizePostBuild"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcTenuredSpaceHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"garbageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"garbageCollected"}},{"kind":"Field","name":{"kind":"Name","value":"type"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"targetMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"targetsLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfigured"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfiguredNotIncludingAspects"}}]}},{"kind":"Field","name":{"kind":"Name","value":"timingMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cpuTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"wallTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"analysisPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"executionPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecutionStartInMs"}}]}},{"kind":"Field","name":{"kind":"Name","value":"networkMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"systemNetworkStats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"bytesSent"}},{"kind":"Field","name":{"kind":"Name","value":"bytesRecv"}},{"kind":"Field","name":{"kind":"Name","value":"packetsSent"}},{"kind":"Field","name":{"kind":"Name","value":"packetsRecv"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesRecvPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsRecvPerSec"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"packageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"packagesLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"packageLoadMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"numTargets"}},{"kind":"Field","name":{"kind":"Name","value":"loadDuration"}},{"kind":"Field","name":{"kind":"Name","value":"packageOverhead"}},{"kind":"Field","name":{"kind":"Name","value":"computationSteps"}},{"kind":"Field","name":{"kind":"Name","value":"numTransitiveLoads"}}]}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"bazelCommand"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"command"}},{"kind":"Field","name":{"kind":"Name","value":"executable"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","alias":{"kind":"Name","value":"buildOptions"},"name":{"kind":"Name","value":"options"}},{"kind":"Field","name":{"kind":"Name","value":"residual"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"invocationID"}},{"kind":"Field","name":{"kind":"Name","value":"build"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}}]}},{"kind":"Field","name":{"kind":"Name","value":"profile"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"digest"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}}]}},{"kind":"Field","name":{"kind":"Name","value":"targets"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"testSize"}},{"kind":"Field","name":{"kind":"Name","value":"targetKind"}},{"kind":"Field","name":{"kind":"Name","value":"durationInMs"}},{"kind":"Field","name":{"kind":"Name","value":"abortReason"}}]}},{"kind":"Field","name":{"kind":"Name","value":"testCollection"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"strategy"}},{"kind":"Field","name":{"kind":"Name","value":"durationMs"}},{"kind":"Field","name":{"kind":"Name","value":"overallStatus"}},{"kind":"Field","name":{"kind":"Name","value":"cachedLocally"}},{"kind":"Field","name":{"kind":"Name","value":"cachedRemotely"}}]}},{"kind":"Field","name":{"kind":"Name","value":"relatedFiles"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"url"}}]}},{"kind":"Field","name":{"kind":"Name","value":"user"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"Email"}},{"kind":"Field","name":{"kind":"Name","value":"LDAP"}}]}},{"kind":"Field","name":{"kind":"Name","value":"startedAt"}},{"kind":"Field","name":{"kind":"Name","value":"endedAt"}},{"kind":"Field","name":{"kind":"Name","value":"state"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bepCompleted"}},{"kind":"Field","name":{"kind":"Name","value":"buildEndTime"}},{"kind":"Field","name":{"kind":"Name","value":"buildStartTime"}},{"kind":"Field","name":{"kind":"Name","value":"exitCode"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"code"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stepLabel"}}]}}]} as unknown as DocumentNode; @@ -3852,5 +4069,12 @@ export const BuildNodeFragmentDoc = {"kind":"Document","definitions":[{"kind":"F export const LoadFullBazelInvocationDetailsDocument = {"__meta__":{"hash":"6afd8375434b1cf5b89da8a90f7e496c0f6485fe"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"LoadFullBazelInvocationDetails"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"invocationID"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bazelInvocation"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"invocationId"},"value":{"kind":"Variable","name":{"kind":"Name","value":"invocationID"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"FullBazelInvocationDetails"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BlobReferenceInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BlobReference"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"availabilityStatus"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"downloadURL"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"ProblemInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Problem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ActionProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"stdout"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stderr"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TestProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"results"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"run"}},{"kind":"Field","name":{"kind":"Name","value":"shard"}},{"kind":"Field","name":{"kind":"Name","value":"attempt"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"actionLogOutput"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"undeclaredTestOutputs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TargetProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ProgressProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"output"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BazelInvocationInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"metrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionSummary"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreatedNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"remoteCacheHits"}},{"kind":"Field","name":{"kind":"Name","value":"actionCacheStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"loadTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"saveTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"hits"}},{"kind":"Field","name":{"kind":"Name","value":"misses"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"missDetails"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"count"}},{"kind":"Field","name":{"kind":"Name","value":"reason"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"runnerCount"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"execKind"}}]}},{"kind":"Field","name":{"kind":"Name","value":"actionData"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"userTime"}},{"kind":"Field","name":{"kind":"Name","value":"systemTime"}},{"kind":"Field","name":{"kind":"Name","value":"lastEndedMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"firstStartedMs"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"artifactMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sourceArtifactsRead"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsSeen"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsFromActionCache"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"topLevelArtifacts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"cumulativeMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"numBuilds"}},{"kind":"Field","name":{"kind":"Name","value":"numAnalyses"}}]}},{"kind":"Field","name":{"kind":"Name","value":"dynamicExecutionMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"raceStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"localWins"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"renoteWins"}},{"kind":"Field","name":{"kind":"Name","value":"localRunner"}},{"kind":"Field","name":{"kind":"Name","value":"remoteRunner"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"buildGraphMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCount"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCountNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"actionCount"}},{"kind":"Field","name":{"kind":"Name","value":"inputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"otherConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactCount"}},{"kind":"Field","name":{"kind":"Name","value":"postInvocationSkyframeNodeCount"}}]}},{"kind":"Field","name":{"kind":"Name","value":"memoryMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"usedHeapSizePostBuild"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcTenuredSpaceHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"garbageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"garbageCollected"}},{"kind":"Field","name":{"kind":"Name","value":"type"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"targetMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"targetsLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfigured"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfiguredNotIncludingAspects"}}]}},{"kind":"Field","name":{"kind":"Name","value":"timingMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cpuTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"wallTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"analysisPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"executionPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecutionStartInMs"}}]}},{"kind":"Field","name":{"kind":"Name","value":"networkMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"systemNetworkStats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"bytesSent"}},{"kind":"Field","name":{"kind":"Name","value":"bytesRecv"}},{"kind":"Field","name":{"kind":"Name","value":"packetsSent"}},{"kind":"Field","name":{"kind":"Name","value":"packetsRecv"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesRecvPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsRecvPerSec"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"packageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"packagesLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"packageLoadMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"numTargets"}},{"kind":"Field","name":{"kind":"Name","value":"loadDuration"}},{"kind":"Field","name":{"kind":"Name","value":"packageOverhead"}},{"kind":"Field","name":{"kind":"Name","value":"computationSteps"}},{"kind":"Field","name":{"kind":"Name","value":"numTransitiveLoads"}}]}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"bazelCommand"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"command"}},{"kind":"Field","name":{"kind":"Name","value":"executable"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","alias":{"kind":"Name","value":"buildOptions"},"name":{"kind":"Name","value":"options"}},{"kind":"Field","name":{"kind":"Name","value":"residual"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"invocationID"}},{"kind":"Field","name":{"kind":"Name","value":"build"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}}]}},{"kind":"Field","name":{"kind":"Name","value":"profile"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"digest"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}}]}},{"kind":"Field","name":{"kind":"Name","value":"targets"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"testSize"}},{"kind":"Field","name":{"kind":"Name","value":"targetKind"}},{"kind":"Field","name":{"kind":"Name","value":"durationInMs"}},{"kind":"Field","name":{"kind":"Name","value":"abortReason"}}]}},{"kind":"Field","name":{"kind":"Name","value":"testCollection"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"strategy"}},{"kind":"Field","name":{"kind":"Name","value":"durationMs"}},{"kind":"Field","name":{"kind":"Name","value":"overallStatus"}},{"kind":"Field","name":{"kind":"Name","value":"cachedLocally"}},{"kind":"Field","name":{"kind":"Name","value":"cachedRemotely"}}]}},{"kind":"Field","name":{"kind":"Name","value":"relatedFiles"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"url"}}]}},{"kind":"Field","name":{"kind":"Name","value":"user"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"Email"}},{"kind":"Field","name":{"kind":"Name","value":"LDAP"}}]}},{"kind":"Field","name":{"kind":"Name","value":"startedAt"}},{"kind":"Field","name":{"kind":"Name","value":"endedAt"}},{"kind":"Field","name":{"kind":"Name","value":"state"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bepCompleted"}},{"kind":"Field","name":{"kind":"Name","value":"buildEndTime"}},{"kind":"Field","name":{"kind":"Name","value":"buildStartTime"}},{"kind":"Field","name":{"kind":"Name","value":"exitCode"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"code"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stepLabel"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"FullBazelInvocationDetails"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"problems"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"ProblemInfo"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"BazelInvocationInfo"}}]}}]} as unknown as DocumentNode; export const GetActionProblemDocument = {"__meta__":{"hash":"3368384a6bed2b74e78f79b5dc6df1bdd1de7bcc"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetActionProblem"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"id"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"ID"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"id"},"value":{"kind":"Variable","name":{"kind":"Name","value":"id"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ActionProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"stdout"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stderr"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BlobReferenceInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BlobReference"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"availabilityStatus"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"downloadURL"}}]}}]} as unknown as DocumentNode; export const FindBuildByUuidDocument = {"__meta__":{"hash":"69ef798b19a03284bc228ef60c02492fcb8b1c44"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindBuildByUUID"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"url"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"uuid"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"UUID"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"getBuild"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"buildURL"},"value":{"kind":"Variable","name":{"kind":"Name","value":"url"}}},{"kind":"Argument","name":{"kind":"Name","value":"buildUUID"},"value":{"kind":"Variable","name":{"kind":"Name","value":"uuid"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildURL"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}},{"kind":"Field","name":{"kind":"Name","value":"invocations"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"FullBazelInvocationDetails"}}]}},{"kind":"Field","name":{"kind":"Name","value":"env"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"key"}},{"kind":"Field","name":{"kind":"Name","value":"value"}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BlobReferenceInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BlobReference"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"availabilityStatus"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"downloadURL"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"ProblemInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Problem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ActionProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"type"}},{"kind":"Field","name":{"kind":"Name","value":"stdout"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stderr"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TestProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"results"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"run"}},{"kind":"Field","name":{"kind":"Name","value":"shard"}},{"kind":"Field","name":{"kind":"Name","value":"attempt"}},{"kind":"Field","name":{"kind":"Name","value":"status"}},{"kind":"Field","name":{"kind":"Name","value":"actionLogOutput"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}},{"kind":"Field","name":{"kind":"Name","value":"undeclaredTestOutputs"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BlobReferenceInfo"}}]}}]}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"TargetProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}},{"kind":"InlineFragment","typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"ProgressProblem"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"output"}},{"kind":"Field","name":{"kind":"Name","value":"label"}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BazelInvocationInfo"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"metrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionSummary"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreatedNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"remoteCacheHits"}},{"kind":"Field","name":{"kind":"Name","value":"actionCacheStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"loadTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"saveTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"hits"}},{"kind":"Field","name":{"kind":"Name","value":"misses"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"missDetails"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"count"}},{"kind":"Field","name":{"kind":"Name","value":"reason"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"runnerCount"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"execKind"}}]}},{"kind":"Field","name":{"kind":"Name","value":"actionData"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"userTime"}},{"kind":"Field","name":{"kind":"Name","value":"systemTime"}},{"kind":"Field","name":{"kind":"Name","value":"lastEndedMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsCreated"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecuted"}},{"kind":"Field","name":{"kind":"Name","value":"firstStartedMs"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"artifactMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sourceArtifactsRead"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsSeen"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactsFromActionCache"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}},{"kind":"Field","name":{"kind":"Name","value":"topLevelArtifacts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}},{"kind":"Field","name":{"kind":"Name","value":"count"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"cumulativeMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"numBuilds"}},{"kind":"Field","name":{"kind":"Name","value":"numAnalyses"}}]}},{"kind":"Field","name":{"kind":"Name","value":"dynamicExecutionMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"raceStatistics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"localWins"}},{"kind":"Field","name":{"kind":"Name","value":"mnemonic"}},{"kind":"Field","name":{"kind":"Name","value":"renoteWins"}},{"kind":"Field","name":{"kind":"Name","value":"localRunner"}},{"kind":"Field","name":{"kind":"Name","value":"remoteRunner"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"buildGraphMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCount"}},{"kind":"Field","name":{"kind":"Name","value":"actionLookupValueCountNotIncludingAspects"}},{"kind":"Field","name":{"kind":"Name","value":"actionCount"}},{"kind":"Field","name":{"kind":"Name","value":"inputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputFileConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"otherConfiguredTargetCount"}},{"kind":"Field","name":{"kind":"Name","value":"outputArtifactCount"}},{"kind":"Field","name":{"kind":"Name","value":"postInvocationSkyframeNodeCount"}}]}},{"kind":"Field","name":{"kind":"Name","value":"memoryMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"usedHeapSizePostBuild"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"peakPostGcTenuredSpaceHeapSize"}},{"kind":"Field","name":{"kind":"Name","value":"garbageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"garbageCollected"}},{"kind":"Field","name":{"kind":"Name","value":"type"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"targetMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"targetsLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfigured"}},{"kind":"Field","name":{"kind":"Name","value":"targetsConfiguredNotIncludingAspects"}}]}},{"kind":"Field","name":{"kind":"Name","value":"timingMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"cpuTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"wallTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"analysisPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"executionPhaseTimeInMs"}},{"kind":"Field","name":{"kind":"Name","value":"actionsExecutionStartInMs"}}]}},{"kind":"Field","name":{"kind":"Name","value":"networkMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"systemNetworkStats"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"bytesSent"}},{"kind":"Field","name":{"kind":"Name","value":"bytesRecv"}},{"kind":"Field","name":{"kind":"Name","value":"packetsSent"}},{"kind":"Field","name":{"kind":"Name","value":"packetsRecv"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakBytesRecvPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsSentPerSec"}},{"kind":"Field","name":{"kind":"Name","value":"peakPacketsRecvPerSec"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"packageMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"packagesLoaded"}},{"kind":"Field","name":{"kind":"Name","value":"packageLoadMetrics"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"numTargets"}},{"kind":"Field","name":{"kind":"Name","value":"loadDuration"}},{"kind":"Field","name":{"kind":"Name","value":"packageOverhead"}},{"kind":"Field","name":{"kind":"Name","value":"computationSteps"}},{"kind":"Field","name":{"kind":"Name","value":"numTransitiveLoads"}}]}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"bazelCommand"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"command"}},{"kind":"Field","name":{"kind":"Name","value":"executable"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","alias":{"kind":"Name","value":"buildOptions"},"name":{"kind":"Name","value":"options"}},{"kind":"Field","name":{"kind":"Name","value":"residual"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"invocationID"}},{"kind":"Field","name":{"kind":"Name","value":"build"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}}]}},{"kind":"Field","name":{"kind":"Name","value":"profile"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"digest"}},{"kind":"Field","name":{"kind":"Name","value":"sizeInBytes"}}]}},{"kind":"Field","name":{"kind":"Name","value":"targets"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"success"}},{"kind":"Field","name":{"kind":"Name","value":"testSize"}},{"kind":"Field","name":{"kind":"Name","value":"targetKind"}},{"kind":"Field","name":{"kind":"Name","value":"durationInMs"}},{"kind":"Field","name":{"kind":"Name","value":"abortReason"}}]}},{"kind":"Field","name":{"kind":"Name","value":"testCollection"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"strategy"}},{"kind":"Field","name":{"kind":"Name","value":"durationMs"}},{"kind":"Field","name":{"kind":"Name","value":"overallStatus"}},{"kind":"Field","name":{"kind":"Name","value":"cachedLocally"}},{"kind":"Field","name":{"kind":"Name","value":"cachedRemotely"}}]}},{"kind":"Field","name":{"kind":"Name","value":"relatedFiles"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"name"}},{"kind":"Field","name":{"kind":"Name","value":"url"}}]}},{"kind":"Field","name":{"kind":"Name","value":"user"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"Email"}},{"kind":"Field","name":{"kind":"Name","value":"LDAP"}}]}},{"kind":"Field","name":{"kind":"Name","value":"startedAt"}},{"kind":"Field","name":{"kind":"Name","value":"endedAt"}},{"kind":"Field","name":{"kind":"Name","value":"state"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bepCompleted"}},{"kind":"Field","name":{"kind":"Name","value":"buildEndTime"}},{"kind":"Field","name":{"kind":"Name","value":"buildStartTime"}},{"kind":"Field","name":{"kind":"Name","value":"exitCode"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"code"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}},{"kind":"Field","name":{"kind":"Name","value":"id"}}]}},{"kind":"Field","name":{"kind":"Name","value":"stepLabel"}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"FullBazelInvocationDetails"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"problems"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"ProblemInfo"}}]}},{"kind":"FragmentSpread","name":{"kind":"Name","value":"BazelInvocationInfo"}}]}}]} as unknown as DocumentNode; +export const GetTestsWithOffsetDocument = {"__meta__":{"hash":"a58c22b2db5f30d1dba201fe125838c7f14d5e6d"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetTestsWithOffset"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"label"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"offset"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"limit"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"sortBy"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"direction"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"getTestsWithOffset"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"label"},"value":{"kind":"Variable","name":{"kind":"Name","value":"label"}}},{"kind":"Argument","name":{"kind":"Name","value":"offset"},"value":{"kind":"Variable","name":{"kind":"Name","value":"offset"}}},{"kind":"Argument","name":{"kind":"Name","value":"limit"},"value":{"kind":"Variable","name":{"kind":"Name","value":"limit"}}},{"kind":"Argument","name":{"kind":"Name","value":"sortBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"sortBy"}}},{"kind":"Argument","name":{"kind":"Name","value":"direction"},"value":{"kind":"Variable","name":{"kind":"Name","value":"direction"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"total"}},{"kind":"Field","name":{"kind":"Name","value":"result"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"sum"}},{"kind":"Field","name":{"kind":"Name","value":"min"}},{"kind":"Field","name":{"kind":"Name","value":"max"}},{"kind":"Field","name":{"kind":"Name","value":"avg"}},{"kind":"Field","name":{"kind":"Name","value":"count"}},{"kind":"Field","name":{"kind":"Name","value":"passRate"}}]}}]}}]}}]} as unknown as DocumentNode; +export const GetUniqueTestLabelsDocument = {"__meta__":{"hash":"0125ec580cc0dba1b90f091f83f0aba0b1549311"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetUniqueTestLabels"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"getUniqueTestLabels"}}]}}]} as unknown as DocumentNode; +export const GetAveragePassPercentageForLabelDocument = {"__meta__":{"hash":"1849a9864fcfcecf29b462f0b9c9275fa2dd49ef"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetAveragePassPercentageForLabel"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"label"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"getAveragePassPercentageForLabel"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"label"},"value":{"kind":"Variable","name":{"kind":"Name","value":"label"}}}]}]}}]} as unknown as DocumentNode; +export const GetTestDurationAggregationDocument = {"__meta__":{"hash":"4b5a5a8301688612cd07d393bc301156ffba05c7"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"GetTestDurationAggregation"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"label"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"String"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"getTestDurationAggregation"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"label"},"value":{"kind":"Variable","name":{"kind":"Name","value":"label"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"count"}},{"kind":"Field","name":{"kind":"Name","value":"sum"}},{"kind":"Field","name":{"kind":"Name","value":"min"}},{"kind":"Field","name":{"kind":"Name","value":"max"}}]}}]}}]} as unknown as DocumentNode; +export const FindTestsDocument = {"__meta__":{"hash":"17231025ad382237c09f082d9e94e39118d8502d"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindTests"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"TestCollectionWhereInput"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"TestCollectionOrder"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"after"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Cursor"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findTests"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}},{"kind":"Argument","name":{"kind":"Name","value":"after"},"value":{"kind":"Variable","name":{"kind":"Name","value":"after"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"totalCount"}},{"kind":"Field","name":{"kind":"Name","value":"pageInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"startCursor"}},{"kind":"Field","name":{"kind":"Name","value":"endCursor"}},{"kind":"Field","name":{"kind":"Name","value":"hasNextPage"}},{"kind":"Field","name":{"kind":"Name","value":"hasPreviousPage"}}]}},{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"durationMs"}},{"kind":"Field","name":{"kind":"Name","value":"firstSeen"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"overallStatus"}},{"kind":"Field","name":{"kind":"Name","value":"bazelInvocation"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"invocationID"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode; +export const FindBuildTimesDocument = {"__meta__":{"hash":"707420fe8ea691631ecc9c431896b8e8d6b62692"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindBuildTimes"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocationWhereInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findBazelInvocations"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"pageInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"startCursor"}},{"kind":"Field","name":{"kind":"Name","value":"endCursor"}},{"kind":"Field","name":{"kind":"Name","value":"hasNextPage"}},{"kind":"Field","name":{"kind":"Name","value":"hasPreviousPage"}}]}},{"kind":"Field","name":{"kind":"Name","value":"totalCount"}},{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"invocationID"}},{"kind":"Field","name":{"kind":"Name","value":"startedAt"}},{"kind":"Field","name":{"kind":"Name","value":"endedAt"}}]}}]}}]}}]}}]} as unknown as DocumentNode; export const FindBazelInvocationsDocument = {"__meta__":{"hash":"7151b14bba5e7ea3e0f6e50882cc2f429591feb9"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindBazelInvocations"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocationWhereInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findBazelInvocations"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BazelInvocationNode"}}]}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BazelInvocationNode"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"BazelInvocation"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"invocationID"}},{"kind":"Field","name":{"kind":"Name","value":"startedAt"}},{"kind":"Field","name":{"kind":"Name","value":"user"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"Email"}},{"kind":"Field","name":{"kind":"Name","value":"LDAP"}}]}},{"kind":"Field","name":{"kind":"Name","value":"endedAt"}},{"kind":"Field","name":{"kind":"Name","value":"state"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"bepCompleted"}},{"kind":"Field","name":{"kind":"Name","value":"exitCode"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"code"}},{"kind":"Field","name":{"kind":"Name","value":"name"}}]}}]}},{"kind":"Field","name":{"kind":"Name","value":"build"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}}]}}]}}]} as unknown as DocumentNode; -export const FindBuildsDocument = {"__meta__":{"hash":"8edb3e7557d6c22033afe132a5975af6d3ffb001"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindBuilds"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"BuildWhereInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findBuilds"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BuildNode"}}]}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BuildNode"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Build"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}},{"kind":"Field","name":{"kind":"Name","value":"buildURL"}}]}}]} as unknown as DocumentNode; \ No newline at end of file +export const FindBuildsDocument = {"__meta__":{"hash":"8edb3e7557d6c22033afe132a5975af6d3ffb001"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindBuilds"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"BuildWhereInput"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findBuilds"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"FragmentSpread","name":{"kind":"Name","value":"BuildNode"}}]}}]}}]}}]}},{"kind":"FragmentDefinition","name":{"kind":"Name","value":"BuildNode"},"typeCondition":{"kind":"NamedType","name":{"kind":"Name","value":"Build"}},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"buildUUID"}},{"kind":"Field","name":{"kind":"Name","value":"buildURL"}}]}}]} as unknown as DocumentNode; +export const FindTestsWithCacheDocument = {"__meta__":{"hash":"add15b38babd589a6f929f238d8f8f03392ad43b"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindTestsWithCache"},"variableDefinitions":[{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"first"}},"type":{"kind":"NonNullType","type":{"kind":"NamedType","name":{"kind":"Name","value":"Int"}}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"where"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"TestCollectionWhereInput"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"TestCollectionOrder"}}},{"kind":"VariableDefinition","variable":{"kind":"Variable","name":{"kind":"Name","value":"after"}},"type":{"kind":"NamedType","name":{"kind":"Name","value":"Cursor"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"findTests"},"arguments":[{"kind":"Argument","name":{"kind":"Name","value":"first"},"value":{"kind":"Variable","name":{"kind":"Name","value":"first"}}},{"kind":"Argument","name":{"kind":"Name","value":"where"},"value":{"kind":"Variable","name":{"kind":"Name","value":"where"}}},{"kind":"Argument","name":{"kind":"Name","value":"orderBy"},"value":{"kind":"Variable","name":{"kind":"Name","value":"orderBy"}}},{"kind":"Argument","name":{"kind":"Name","value":"after"},"value":{"kind":"Variable","name":{"kind":"Name","value":"after"}}}],"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"totalCount"}},{"kind":"Field","name":{"kind":"Name","value":"pageInfo"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"startCursor"}},{"kind":"Field","name":{"kind":"Name","value":"endCursor"}},{"kind":"Field","name":{"kind":"Name","value":"hasNextPage"}},{"kind":"Field","name":{"kind":"Name","value":"hasPreviousPage"}}]}},{"kind":"Field","name":{"kind":"Name","value":"edges"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"node"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"durationMs"}},{"kind":"Field","name":{"kind":"Name","value":"firstSeen"}},{"kind":"Field","name":{"kind":"Name","value":"label"}},{"kind":"Field","name":{"kind":"Name","value":"overallStatus"}},{"kind":"Field","name":{"kind":"Name","value":"cachedLocally"}},{"kind":"Field","name":{"kind":"Name","value":"cachedRemotely"}},{"kind":"Field","name":{"kind":"Name","value":"bazelInvocation"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"invocationID"}}]}}]}}]}}]}}]}}]} as unknown as DocumentNode; \ No newline at end of file diff --git a/frontend/src/graphql/__generated__/persisted-documents.json b/frontend/src/graphql/__generated__/persisted-documents.json index cb8e734..e35c020 100644 --- a/frontend/src/graphql/__generated__/persisted-documents.json +++ b/frontend/src/graphql/__generated__/persisted-documents.json @@ -2,6 +2,13 @@ "6afd8375434b1cf5b89da8a90f7e496c0f6485fe": "fragment BazelInvocationInfo on BazelInvocation { bazelCommand { command executable id buildOptions: options residual } build { buildUUID id } endedAt id invocationID metrics { actionSummary { actionCacheStatistics { hits id loadTimeInMs missDetails { count id reason } misses saveTimeInMs sizeInBytes } actionData { actionsCreated actionsExecuted firstStartedMs id lastEndedMs mnemonic systemTime userTime } actionsCreated actionsCreatedNotIncludingAspects actionsExecuted id remoteCacheHits runnerCount { actionsExecuted execKind id name } } artifactMetrics { id outputArtifactsFromActionCache { count id sizeInBytes } outputArtifactsSeen { count id sizeInBytes } sourceArtifactsRead { count id sizeInBytes } topLevelArtifacts { count id sizeInBytes } } buildGraphMetrics { actionCount actionLookupValueCount actionLookupValueCountNotIncludingAspects id inputFileConfiguredTargetCount otherConfiguredTargetCount outputArtifactCount outputFileConfiguredTargetCount postInvocationSkyframeNodeCount } cumulativeMetrics { id numAnalyses numBuilds } dynamicExecutionMetrics { id raceStatistics { id localRunner localWins mnemonic remoteRunner renoteWins } } id memoryMetrics { garbageMetrics { garbageCollected id type } id peakPostGcHeapSize peakPostGcTenuredSpaceHeapSize usedHeapSizePostBuild } networkMetrics { id systemNetworkStats { bytesRecv bytesSent id packetsRecv packetsSent peakBytesRecvPerSec peakBytesSentPerSec peakPacketsRecvPerSec peakPacketsSentPerSec } } packageMetrics { id packageLoadMetrics { computationSteps id loadDuration name numTargets numTransitiveLoads packageOverhead } packagesLoaded } targetMetrics { id targetsConfigured targetsConfiguredNotIncludingAspects targetsLoaded } timingMetrics { actionsExecutionStartInMs analysisPhaseTimeInMs cpuTimeInMs executionPhaseTimeInMs id wallTimeInMs } } profile { digest id name sizeInBytes } relatedFiles { name url } startedAt state { bepCompleted buildEndTime buildStartTime exitCode { code id name } id } stepLabel targets { abortReason durationInMs id label success targetKind testSize } testCollection { cachedLocally cachedRemotely durationMs id label overallStatus strategy } user { Email LDAP } } fragment BlobReferenceInfo on BlobReference { availabilityStatus downloadURL name sizeInBytes } fragment FullBazelInvocationDetails on BazelInvocation { problems { ...ProblemInfo } ...BazelInvocationInfo } fragment ProblemInfo on Problem { __typename id label ... on ActionProblem { __typename id label stderr { ...BlobReferenceInfo } stdout { ...BlobReferenceInfo } type } ... on ProgressProblem { __typename id label output } ... on TargetProblem { __typename id label } ... on TestProblem { __typename id label results { __typename actionLogOutput { ...BlobReferenceInfo } attempt id run shard status undeclaredTestOutputs { ...BlobReferenceInfo } } status } } query LoadFullBazelInvocationDetails($invocationID: String!) { bazelInvocation(invocationId: $invocationID) { ...FullBazelInvocationDetails } }", "3368384a6bed2b74e78f79b5dc6df1bdd1de7bcc": "fragment BlobReferenceInfo on BlobReference { availabilityStatus downloadURL name sizeInBytes } query GetActionProblem($id: ID!) { node(id: $id) { id ... on ActionProblem { label stderr { ...BlobReferenceInfo } stdout { ...BlobReferenceInfo } } } }", "69ef798b19a03284bc228ef60c02492fcb8b1c44": "fragment BazelInvocationInfo on BazelInvocation { bazelCommand { command executable id buildOptions: options residual } build { buildUUID id } endedAt id invocationID metrics { actionSummary { actionCacheStatistics { hits id loadTimeInMs missDetails { count id reason } misses saveTimeInMs sizeInBytes } actionData { actionsCreated actionsExecuted firstStartedMs id lastEndedMs mnemonic systemTime userTime } actionsCreated actionsCreatedNotIncludingAspects actionsExecuted id remoteCacheHits runnerCount { actionsExecuted execKind id name } } artifactMetrics { id outputArtifactsFromActionCache { count id sizeInBytes } outputArtifactsSeen { count id sizeInBytes } sourceArtifactsRead { count id sizeInBytes } topLevelArtifacts { count id sizeInBytes } } buildGraphMetrics { actionCount actionLookupValueCount actionLookupValueCountNotIncludingAspects id inputFileConfiguredTargetCount otherConfiguredTargetCount outputArtifactCount outputFileConfiguredTargetCount postInvocationSkyframeNodeCount } cumulativeMetrics { id numAnalyses numBuilds } dynamicExecutionMetrics { id raceStatistics { id localRunner localWins mnemonic remoteRunner renoteWins } } id memoryMetrics { garbageMetrics { garbageCollected id type } id peakPostGcHeapSize peakPostGcTenuredSpaceHeapSize usedHeapSizePostBuild } networkMetrics { id systemNetworkStats { bytesRecv bytesSent id packetsRecv packetsSent peakBytesRecvPerSec peakBytesSentPerSec peakPacketsRecvPerSec peakPacketsSentPerSec } } packageMetrics { id packageLoadMetrics { computationSteps id loadDuration name numTargets numTransitiveLoads packageOverhead } packagesLoaded } targetMetrics { id targetsConfigured targetsConfiguredNotIncludingAspects targetsLoaded } timingMetrics { actionsExecutionStartInMs analysisPhaseTimeInMs cpuTimeInMs executionPhaseTimeInMs id wallTimeInMs } } profile { digest id name sizeInBytes } relatedFiles { name url } startedAt state { bepCompleted buildEndTime buildStartTime exitCode { code id name } id } stepLabel targets { abortReason durationInMs id label success targetKind testSize } testCollection { cachedLocally cachedRemotely durationMs id label overallStatus strategy } user { Email LDAP } } fragment BlobReferenceInfo on BlobReference { availabilityStatus downloadURL name sizeInBytes } fragment FullBazelInvocationDetails on BazelInvocation { problems { ...ProblemInfo } ...BazelInvocationInfo } fragment ProblemInfo on Problem { __typename id label ... on ActionProblem { __typename id label stderr { ...BlobReferenceInfo } stdout { ...BlobReferenceInfo } type } ... on ProgressProblem { __typename id label output } ... on TargetProblem { __typename id label } ... on TestProblem { __typename id label results { __typename actionLogOutput { ...BlobReferenceInfo } attempt id run shard status undeclaredTestOutputs { ...BlobReferenceInfo } } status } } query FindBuildByUUID($url: String, $uuid: UUID) { getBuild(buildURL: $url, buildUUID: $uuid) { buildURL buildUUID env { key value } id invocations { ...FullBazelInvocationDetails } } }", + "a58c22b2db5f30d1dba201fe125838c7f14d5e6d": "query GetTestsWithOffset($direction: String, $label: String, $limit: Int, $offset: Int, $sortBy: String) { getTestsWithOffset( label: $label offset: $offset limit: $limit sortBy: $sortBy direction: $direction ) { result { avg count label max min passRate sum } total } }", + "0125ec580cc0dba1b90f091f83f0aba0b1549311": "query GetUniqueTestLabels { getUniqueTestLabels }", + "1849a9864fcfcecf29b462f0b9c9275fa2dd49ef": "query GetAveragePassPercentageForLabel($label: String!) { getAveragePassPercentageForLabel(label: $label) }", + "4b5a5a8301688612cd07d393bc301156ffba05c7": "query GetTestDurationAggregation($label: String) { getTestDurationAggregation(label: $label) { count label max min sum } }", + "17231025ad382237c09f082d9e94e39118d8502d": "query FindTests($after: Cursor, $first: Int!, $orderBy: TestCollectionOrder, $where: TestCollectionWhereInput) { findTests(first: $first, where: $where, orderBy: $orderBy, after: $after) { edges { node { bazelInvocation { invocationID } durationMs firstSeen id label overallStatus } } pageInfo { endCursor hasNextPage hasPreviousPage startCursor } totalCount } }", + "707420fe8ea691631ecc9c431896b8e8d6b62692": "query FindBuildTimes($first: Int!, $where: BazelInvocationWhereInput) { findBazelInvocations(first: $first, where: $where) { edges { node { endedAt invocationID startedAt } } pageInfo { endCursor hasNextPage hasPreviousPage startCursor } totalCount } }", "7151b14bba5e7ea3e0f6e50882cc2f429591feb9": "fragment BazelInvocationNode on BazelInvocation { build { buildUUID } endedAt id invocationID startedAt state { bepCompleted exitCode { code name } } user { Email LDAP } } query FindBazelInvocations($first: Int!, $where: BazelInvocationWhereInput) { findBazelInvocations(first: $first, where: $where) { edges { node { ...BazelInvocationNode } } } }", - "8edb3e7557d6c22033afe132a5975af6d3ffb001": "fragment BuildNode on Build { buildURL buildUUID id } query FindBuilds($first: Int!, $where: BuildWhereInput) { findBuilds(first: $first, where: $where) { edges { node { ...BuildNode } } } }" + "8edb3e7557d6c22033afe132a5975af6d3ffb001": "fragment BuildNode on Build { buildURL buildUUID id } query FindBuilds($first: Int!, $where: BuildWhereInput) { findBuilds(first: $first, where: $where) { edges { node { ...BuildNode } } } }", + "add15b38babd589a6f929f238d8f8f03392ad43b": "query FindTestsWithCache($after: Cursor, $first: Int!, $orderBy: TestCollectionOrder, $where: TestCollectionWhereInput) { findTests(first: $first, where: $where, orderBy: $orderBy, after: $after) { edges { node { bazelInvocation { invocationID } cachedLocally cachedRemotely durationMs firstSeen id label overallStatus } } pageInfo { endCursor hasNextPage hasPreviousPage startCursor } totalCount } }" } \ No newline at end of file diff --git a/frontend/src/theme/theme.module.css b/frontend/src/theme/theme.module.css index 5213b98..1bf02ec 100644 --- a/frontend/src/theme/theme.module.css +++ b/frontend/src/theme/theme.module.css @@ -95,3 +95,9 @@ .tooltipSecondaryContent { font-size: 80% !important; } + +.numberFormat { + font-family: var(--font-mono); + font-size: 90% !important; + display: inline-block !important; +} \ No newline at end of file diff --git a/internal/api/grpc/bes/bes.go b/internal/api/grpc/bes/bes.go index e3d8c3f..b428b72 100644 --- a/internal/api/grpc/bes/bes.go +++ b/internal/api/grpc/bes/bes.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "log/slog" + "time" build "google.golang.org/genproto/googleapis/devtools/build/v1" "google.golang.org/protobuf/encoding/protojson" @@ -57,14 +58,13 @@ func (b BES) PublishBuildToolEventStream(stream build.PublishBuildEvent_PublishB for { req, err := stream.Recv() if err == io.EOF { - slog.InfoContext(stream.Context(), "stream finished") + slog.InfoContext(stream.Context(), "Stream finished", "event", stream.Context()) break } if err != nil { slog.ErrorContext(stream.Context(), "Recv failed", "err", err) return err } - // slog.InfoContext(stream.Context(), "Received ordered build event", "event", protojson.Format(req)) if streamID == nil { streamID = req.GetOrderedBuildEvent().GetStreamId() @@ -89,13 +89,18 @@ func (b BES) PublishBuildToolEventStream(stream build.PublishBuildEvent_PublishB "grpc://localhost:8082/google.devtools.build.v1/PublishLifecycleEvent?streamID=%s", streamID.String(), ) + workflow := processing.New(b.db, b.blobArchiver) + slog.InfoContext(stream.Context(), "Saving invocation", "id", streamID.String()) + startTime := time.Now() invocation, err := workflow.SaveSummary(stream.Context(), summaryReport) if err != nil { slog.ErrorContext(stream.Context(), "SaveSummary failed", "err", err) return err } - slog.InfoContext(stream.Context(), "saved invocation", "id", invocation.InvocationID) + endTime := time.Now() + elapsedTime := endTime.Sub(startTime) + slog.InfoContext(stream.Context(), fmt.Sprintf("Saved invocation in %v", elapsedTime.String()), "id", invocation.InvocationID) return nil } diff --git a/internal/graphql/BUILD.bazel b/internal/graphql/BUILD.bazel index 599850b..0132795 100644 --- a/internal/graphql/BUILD.bazel +++ b/internal/graphql/BUILD.bazel @@ -48,7 +48,6 @@ go_test( "graphql_helpers_test.go", "graphql_service_test.go", ], - cgo = True, # keep data = glob(["testdata/**"]) + ["//frontend/src/graphql:__generated__"], deps = [ ":graphql", diff --git a/internal/graphql/custom.resolvers.go b/internal/graphql/custom.resolvers.go index 61ddf62..c661a2d 100644 --- a/internal/graphql/custom.resolvers.go +++ b/internal/graphql/custom.resolvers.go @@ -17,6 +17,8 @@ import ( "github.com/buildbarn/bb-portal/ent/gen/ent/bazelinvocation" "github.com/buildbarn/bb-portal/ent/gen/ent/blob" "github.com/buildbarn/bb-portal/ent/gen/ent/build" + "github.com/buildbarn/bb-portal/ent/gen/ent/targetpair" + "github.com/buildbarn/bb-portal/ent/gen/ent/testcollection" "github.com/buildbarn/bb-portal/internal/graphql/helpers" "github.com/buildbarn/bb-portal/internal/graphql/model" "github.com/buildbarn/bb-portal/third_party/bazel/gen/bes" @@ -210,6 +212,192 @@ func (r *queryResolver) GetBuild(ctx context.Context, buildURL *string, buildUUI return r.client.Build.Query().Where(build.BuildUUID(*buildUUID)).First(ctx) } +// GetUniqueTestLabels is the resolver for the getUniqueTestLabels field. +func (r *queryResolver) GetUniqueTestLabels(ctx context.Context, param *string) ([]*string, error) { + if param == nil { + res, err := r.client.TestCollection.Query(). + GroupBy(testcollection.FieldLabel). + Strings(ctx) + if err != nil { + return nil, err + } + return helpers.StringSliceArrayToPointerArray(res), nil + } + res, err := r.client.TestCollection.Query(). + Where(testcollection.LabelContains(*param)). + GroupBy(testcollection.FieldLabel). + Strings(ctx) + if err != nil { + return nil, err + } + return helpers.StringSliceArrayToPointerArray(res), nil +} + +// GetUniqueTargetLabels is the resolver for the getUniqueTargetLabels field. +func (r *queryResolver) GetUniqueTargetLabels(ctx context.Context, param *string) ([]*string, error) { + if param == nil { + res, err := r.client.TargetPair.Query(). + GroupBy(targetpair.FieldLabel). + Strings(ctx) + if err != nil { + return nil, err + } + return helpers.StringSliceArrayToPointerArray(res), nil + } + res, err := r.client.TargetPair.Query(). + Where(targetpair.LabelContains(*param)). + GroupBy(targetpair.FieldLabel). + Strings(ctx) + if err != nil { + return nil, err + } + + return helpers.StringSliceArrayToPointerArray(res), nil +} + +// GetTestDurationAggregation is the resolver for the getTestDurationAggregation field. +func (r *queryResolver) GetTestDurationAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) { + var result []*model.TargetAggregate + err := r.client.TestCollection.Query(). + Where(testcollection.LabelContains(*label)). + GroupBy(testcollection.FieldLabel). + Aggregate(ent.Count(), + ent.Sum(testcollection.FieldDurationMs), + ent.Min(testcollection.FieldDurationMs), + ent.Max(testcollection.FieldDurationMs)). + Scan(ctx, &result) + if err != nil { + return nil, err + } + return result, nil +} + +// GetTestPassAggregation is the resolver for the getTestPassAggregation field. +func (r *queryResolver) GetTestPassAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) { + var result []*model.TargetAggregate + err := r.client.TestCollection.Query(). + Where(testcollection.And( + testcollection.LabelContains(*label), + testcollection.OverallStatusEQ(testcollection.OverallStatusPASSED), + )). + GroupBy(testcollection.FieldLabel). + Aggregate(ent.Count()). + Scan(ctx, &result) + if err != nil { + return nil, err + } + return result, nil +} + +// GetTargetDurationAggregation is the resolver for the getTargetDurationAggregation field. +func (r *queryResolver) GetTargetDurationAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) { + panic(fmt.Errorf("not implemented: GetTargetDurationAggregation - getTargetDurationAggregation")) +} + +// GetTargetPassAggregation is the resolver for the getTargetPassAggregation field. +func (r *queryResolver) GetTargetPassAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) { + panic(fmt.Errorf("not implemented: GetTargetPassAggregation - getTargetPassAggregation")) +} + +// GetTestsWithOffset is the resolver for the getTestsWithOffset field. +func (r *queryResolver) GetTestsWithOffset(ctx context.Context, label *string, offset, limit *int, sortBy, direction *string) (*model.TestGridResult, error) { + maxLimit := 10000 + take := 10 + skip := 0 + if limit != nil { + if *limit > maxLimit { + return nil, fmt.Errorf("limit cannot exceed %d", maxLimit) + } + take = *limit + } + if offset != nil { + skip = *offset + } + orderBy := "first_seen" + if sortBy != nil { + orderBy = *sortBy + } + + var result []*model.TestGridRow + query := r.client.TestCollection.Query() + + switch orderBy { + case "first_seen": + query = query.Order(testcollection.ByFirstSeen()) + } + + if label != nil && *label != "" { + query = query.Where(testcollection.LabelContains(*label)) + } + + err := query. + Limit(take). + Offset(skip). + GroupBy(testcollection.FieldLabel). + Aggregate(ent.Count(), + ent.As(ent.Mean(testcollection.FieldDurationMs), "avg"), + ent.Sum(testcollection.FieldDurationMs), + ent.Min(testcollection.FieldDurationMs), + ent.Max(testcollection.FieldDurationMs)). + Scan(ctx, &result) + if err != nil { + return nil, err + } + for _, item := range result { + lbl := *item.Label + cnt := *item.Count + passes, err := r.client.TestCollection.Query(). + Where(testcollection. + And(testcollection.LabelEQ(lbl), + testcollection.OverallStatusEQ(testcollection.OverallStatusPASSED))). + Count(ctx) + if err != nil { + return nil, err + } + passRate := float64(passes / cnt) + item.PassRate = &passRate + } + l := r.client.TestCollection.Query() + if label != nil && *label != "" { + l = l.Where(testcollection.LabelContains(*label)) + } + labels, err := l.GroupBy(testcollection.FieldLabel).Strings(ctx) + if err != nil { + return nil, err + } + totalCount := len(labels) + response := &model.TestGridResult{ + Result: result, + Total: &totalCount, + } + return response, nil +} + +// GetAveragePassPercentageForLabel is the resolver for the getAveragePassPercentageForLabel field. +func (r *queryResolver) GetAveragePassPercentageForLabel(ctx context.Context, label string) (*float64, error) { + // TODO: maybe there is a more elegant/faster way to do this with aggregaate + passCount, err := r.client.TestCollection.Query(). + Where(testcollection.And( + testcollection.LabelEQ(label), + testcollection.OverallStatusEQ(testcollection.OverallStatusPASSED), + )).Count(ctx) + if err != nil { + return nil, err + } + totalCount, err := r.client.TestCollection.Query(). + Where(testcollection.LabelEQ(label)). + Count(ctx) + if err != nil { + return nil, err + } + if totalCount == 0 { + result := 0.0 + return helpers.GetFloatPointer(&result), nil + } + result := float64(passCount/totalCount) * 100.0 + return helpers.GetFloatPointer(&result), nil +} + // ActionLogOutput is the resolver for the actionLogOutput field. func (r *testResultResolver) ActionLogOutput(ctx context.Context, obj *model.TestResult) (*model.BlobReference, error) { return helpers.GetTestResultActionLogOutput(ctx, r.client, obj) diff --git a/internal/graphql/ent.resolvers.go b/internal/graphql/ent.resolvers.go index 2f35bcd..134632f 100644 --- a/internal/graphql/ent.resolvers.go +++ b/internal/graphql/ent.resolvers.go @@ -191,8 +191,8 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []string) ([]ent.Noder, e } // FindBazelInvocations is the resolver for the findBazelInvocations field. -func (r *queryResolver) FindBazelInvocations(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BazelInvocationWhereInput) (*ent.BazelInvocationConnection, error) { - return r.client.BazelInvocation.Query().Paginate(ctx, after, first, before, last, ent.WithBazelInvocationFilter(where.Filter)) +func (r *queryResolver) FindBazelInvocations(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.BazelInvocationOrder, where *ent.BazelInvocationWhereInput) (*ent.BazelInvocationConnection, error) { + return r.client.BazelInvocation.Query().Paginate(ctx, after, first, before, last, ent.WithBazelInvocationFilter(where.Filter), ent.WithBazelInvocationOrder(orderBy)) } // FindBuilds is the resolver for the findBuilds field. @@ -210,6 +210,11 @@ func (r *queryResolver) FindRunnerCounts(ctx context.Context, after *entgql.Curs return r.client.RunnerCount.Query().Paginate(ctx, after, first, before, last, ent.WithRunnerCountFilter(where.Filter)) } +// FindTests is the resolver for the findTests field. +func (r *queryResolver) FindTests(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.TestCollectionOrder, where *ent.TestCollectionWhereInput) (*ent.TestCollectionConnection, error) { + return r.client.TestCollection.Query().Paginate(ctx, after, first, before, last, ent.WithTestCollectionFilter(where.Filter), ent.WithTestCollectionOrder(orderBy)) +} + // ID is the resolver for the id field. func (r *raceStatisticsResolver) ID(ctx context.Context, obj *ent.RaceStatistics) (string, error) { return helpers.GraphQLIDFromTypeAndID("RaceStatistics", obj.ID), nil diff --git a/internal/graphql/helpers/resolver.helpers.go b/internal/graphql/helpers/resolver.helpers.go index c467e74..106e8b6 100644 --- a/internal/graphql/helpers/resolver.helpers.go +++ b/internal/graphql/helpers/resolver.helpers.go @@ -23,6 +23,25 @@ var ( errStatusNotFound = errors.New("status not found") ) +// StringSliceArrayToPointerArray takes an array of strings and returns an array of string pointers +func StringSliceArrayToPointerArray(strings []string) []*string { + result := make([]*string, len(strings)) + for i, str := range strings { + result[i] = &str + } + return result +} + +// GetFloatPointer returns a pointer to a float +func GetFloatPointer(f *float64) *float64 { + return f +} + +// GetInt64Pointer returns a pointer to an int64 +func GetInt64Pointer(i *int64) *int64 { + return i +} + // Helper A Helper struct. type Helper struct { *problemHelper diff --git a/internal/graphql/model/BUILD.bazel b/internal/graphql/model/BUILD.bazel index 61b19da..105277e 100644 --- a/internal/graphql/model/BUILD.bazel +++ b/internal/graphql/model/BUILD.bazel @@ -11,6 +11,8 @@ go_library( visibility = ["//:__subpackages__"], deps = [ "//ent/gen/ent", + "//ent/gen/ent/testcollection", "//third_party/bazel/gen/bes", + "@com_github_google_uuid//:uuid", ], ) diff --git a/internal/graphql/model/models_gen.go b/internal/graphql/model/models_gen.go index 597bb7f..e0aade5 100644 --- a/internal/graphql/model/models_gen.go +++ b/internal/graphql/model/models_gen.go @@ -9,7 +9,9 @@ import ( "time" "github.com/buildbarn/bb-portal/ent/gen/ent" + "github.com/buildbarn/bb-portal/ent/gen/ent/testcollection" "github.com/buildbarn/bb-portal/third_party/bazel/gen/bes" + "github.com/google/uuid" ) type BuildStep interface { @@ -102,6 +104,16 @@ func (ProgressProblem) IsProblem() {} func (this ProgressProblem) GetID() string { return this.ID } func (this ProgressProblem) GetLabel() string { return this.Label } +type TargetAggregate struct { + Label *string `json:"label,omitempty"` + Count *int `json:"count,omitempty"` + Sum *int `json:"sum,omitempty"` + Min *int `json:"min,omitempty"` + Max *int `json:"max,omitempty"` + Avg *int `json:"avg,omitempty"` + Pass *int `json:"pass,omitempty"` +} + type TargetProblem struct { ID string `json:"id"` Label string `json:"label"` @@ -113,6 +125,27 @@ func (TargetProblem) IsProblem() {} func (this TargetProblem) GetID() string { return this.ID } func (this TargetProblem) GetLabel() string { return this.Label } +type TestGridCell struct { + InvocationID *uuid.UUID `json:"invocationId,omitempty"` + Status *testcollection.OverallStatus `json:"status,omitempty"` +} + +type TestGridResult struct { + Total *int `json:"total,omitempty"` + Result []*TestGridRow `json:"result,omitempty"` +} + +type TestGridRow struct { + Label *string `json:"label,omitempty"` + Count *int `json:"count,omitempty"` + Sum *int `json:"sum,omitempty"` + Min *int `json:"min,omitempty"` + Max *int `json:"max,omitempty"` + Avg *float64 `json:"avg,omitempty"` + PassRate *float64 `json:"passRate,omitempty"` + Cells []*TestGridCell `json:"cells,omitempty"` +} + type TestProblem struct { ID string `json:"id"` Label string `json:"label"` diff --git a/internal/graphql/schema/custom.graphql b/internal/graphql/schema/custom.graphql index 9148740..89a2185 100644 --- a/internal/graphql/schema/custom.graphql +++ b/internal/graphql/schema/custom.graphql @@ -1,6 +1,45 @@ extend type Query { bazelInvocation(invocationId: String!): BazelInvocation! getBuild(buildURL: String, buildUUID: UUID): Build + getUniqueTestLabels(param: String): [String] + getUniqueTargetLabels(param: String): [String] + getTestDurationAggregation(label: String): [TargetAggregate] + getTestPassAggregation(label: String): [TargetAggregate] + getTargetDurationAggregation(label: String): [TargetAggregate] + getTargetPassAggregation(label: String):[TargetAggregate] + getTestsWithOffset(label: String, offset: Int, limit: Int, sortBy: String, direction: String):TestGridResult + getAveragePassPercentageForLabel(label: String!):Float +} + +type TargetAggregate { + label: String + count: Int + sum: Int + min: Int + max: Int + avg: Int + pass: Int +} + +type TestGridResult { + total: Int + result: [TestGridRow] +} + +type TestGridRow { + label: String + count: Int + sum: Int + min: Int + max: Int + avg: Float + passRate: Float + cells: [TestGridCell] +} + +type TestGridCell { + invocationId: UUID + status: TestCollectionOverallStatus } type BazelCommand { diff --git a/internal/graphql/schema/ent.graphql b/internal/graphql/schema/ent.graphql index 4fe2059..cece08f 100644 --- a/internal/graphql/schema/ent.graphql +++ b/internal/graphql/schema/ent.graphql @@ -7,7 +7,7 @@ type ActionCacheStatistics implements Node { loadTimeInMs: Int hits: Int misses: Int - actionSummary: [ActionSummary!] + actionSummary: ActionSummary missDetails: [MissDetail!] } """ @@ -114,7 +114,7 @@ type ActionData implements Node { lastEndedMs: Int systemTime: Int userTime: Int - actionSummary: [ActionSummary!] + actionSummary: ActionSummary } """ ActionDataWhereInput is used for filtering ActionData objects. @@ -246,7 +246,7 @@ type ActionSummary implements Node { metrics: Metrics actionData: [ActionData!] runnerCount: [RunnerCount!] - actionCacheStatistics: [ActionCacheStatistics!] + actionCacheStatistics: ActionCacheStatistics } """ ActionSummaryWhereInput is used for filtering ActionSummary objects. @@ -342,11 +342,11 @@ input ActionSummaryWhereInput { } type ArtifactMetrics implements Node { id: ID! - metrics: [Metrics!] - sourceArtifactsRead: [FilesMetric!] - outputArtifactsSeen: [FilesMetric!] - outputArtifactsFromActionCache: [FilesMetric!] - topLevelArtifacts: [FilesMetric!] + metrics: Metrics + sourceArtifactsRead: FilesMetric + outputArtifactsSeen: FilesMetric + outputArtifactsFromActionCache: FilesMetric + topLevelArtifacts: FilesMetric } """ ArtifactMetricsWhereInput is used for filtering ArtifactMetrics objects. @@ -445,6 +445,26 @@ type BazelInvocationEdge { """ cursor: Cursor! } +""" +Ordering options for BazelInvocation connections +""" +input BazelInvocationOrder { + """ + The ordering direction. + """ + direction: OrderDirection! = ASC + """ + The field by which to order BazelInvocations. + """ + field: BazelInvocationOrderField! +} +""" +Properties by which BazelInvocation connections can be ordered. +""" +enum BazelInvocationOrderField { + STARTED_AT + USER_LDAP +} type BazelInvocationProblem implements Node { id: ID! problemType: String! @@ -935,12 +955,12 @@ type BuildGraphMetrics implements Node { otherConfiguredTargetCount: Int outputArtifactCount: Int postInvocationSkyframeNodeCount: Int - metrics: [Metrics!] - dirtiedValues: [EvaluationStat!] - changedValues: [EvaluationStat!] - builtValues: [EvaluationStat!] - cleanedValues: [EvaluationStat!] - evaluatedValues: [EvaluationStat!] + metrics: Metrics + dirtiedValues: EvaluationStat + changedValues: EvaluationStat + builtValues: EvaluationStat + cleanedValues: EvaluationStat + evaluatedValues: EvaluationStat } """ BuildGraphMetricsWhereInput is used for filtering BuildGraphMetrics objects. @@ -1165,7 +1185,7 @@ type CumulativeMetrics implements Node { id: ID! numAnalyses: Int numBuilds: Int - metrics: [Metrics!] + metrics: Metrics } """ CumulativeMetricsWhereInput is used for filtering CumulativeMetrics objects. @@ -1225,7 +1245,7 @@ https://relay.dev/graphql/connections.htm#sec-Cursor scalar Cursor type DynamicExecutionMetrics implements Node { id: ID! - metrics: [Metrics!] + metrics: Metrics raceStatistics: [RaceStatistics!] } """ @@ -1262,7 +1282,7 @@ type EvaluationStat implements Node { id: ID! skyfunctionName: String count: Int - buildGraphMetrics: [BuildGraphMetrics!] + buildGraphMetrics: BuildGraphMetrics } """ EvaluationStatWhereInput is used for filtering EvaluationStat objects. @@ -1455,7 +1475,7 @@ type ExectionInfo implements Node { cachedRemotely: Boolean exitCode: Int hostname: String - testResult: [TestResultBES!] + testResult: TestResultBES timingBreakdown: TimingBreakdown resourceUsage: [ResourceUsage!] } @@ -1567,7 +1587,7 @@ type FilesMetric implements Node { id: ID! sizeInBytes: Int count: Int - artifactMetrics: [ArtifactMetrics!] + artifactMetrics: ArtifactMetrics } """ FilesMetricWhereInput is used for filtering FilesMetric objects. @@ -1624,7 +1644,7 @@ type GarbageMetrics implements Node { id: ID! type: String garbageCollected: Int - memoryMetrics: [MemoryMetrics!] + memoryMetrics: MemoryMetrics } """ GarbageMetricsWhereInput is used for filtering GarbageMetrics objects. @@ -1687,7 +1707,7 @@ type MemoryMetrics implements Node { peakPostGcHeapSize: Int usedHeapSizePostBuild: Int peakPostGcTenuredSpaceHeapSize: Int - metrics: [Metrics!] + metrics: Metrics garbageMetrics: [GarbageMetrics!] } """ @@ -1762,16 +1782,16 @@ input MemoryMetricsWhereInput { type Metrics implements Node { id: ID! bazelInvocation: BazelInvocation - actionSummary: [ActionSummary!] - memoryMetrics: [MemoryMetrics!] - targetMetrics: [TargetMetrics!] - packageMetrics: [PackageMetrics!] - timingMetrics: [TimingMetrics!] - cumulativeMetrics: [CumulativeMetrics!] - artifactMetrics: [ArtifactMetrics!] - networkMetrics: [NetworkMetrics!] - dynamicExecutionMetrics: [DynamicExecutionMetrics!] - buildGraphMetrics: [BuildGraphMetrics!] + actionSummary: ActionSummary + memoryMetrics: MemoryMetrics + targetMetrics: TargetMetrics + packageMetrics: PackageMetrics + timingMetrics: TimingMetrics + cumulativeMetrics: CumulativeMetrics + artifactMetrics: ArtifactMetrics + networkMetrics: NetworkMetrics + dynamicExecutionMetrics: DynamicExecutionMetrics + buildGraphMetrics: BuildGraphMetrics } """ A connection to a list of items. @@ -1882,7 +1902,7 @@ type MissDetail implements Node { id: ID! reason: MissDetailReason count: Int - actionCacheStatistics: [ActionCacheStatistics!] + actionCacheStatistics: ActionCacheStatistics } """ MissDetailReason is enum for the field reason @@ -1946,7 +1966,7 @@ input MissDetailWhereInput { } type NamedSetOfFiles implements Node { id: ID! - outputGroup: [OutputGroup!] + outputGroup: OutputGroup files: [TestFile!] fileSets: NamedSetOfFiles } @@ -1987,8 +2007,8 @@ input NamedSetOfFilesWhereInput { } type NetworkMetrics implements Node { id: ID! - metrics: [Metrics!] - systemNetworkStats: [SystemNetworkStats!] + metrics: Metrics + systemNetworkStats: SystemNetworkStats } """ NetworkMetricsWhereInput is used for filtering NetworkMetrics objects. @@ -2047,7 +2067,7 @@ type OutputGroup implements Node { id: ID! name: String incomplete: Boolean - targetComplete: [TargetComplete!] + targetComplete: TargetComplete inlineFiles: [TestFile!] fileSets: NamedSetOfFiles } @@ -2119,7 +2139,7 @@ type PackageLoadMetrics implements Node { computationSteps: Int numTransitiveLoads: Int packageOverhead: Int - packageMetrics: [PackageMetrics!] + packageMetrics: PackageMetrics } """ PackageLoadMetricsWhereInput is used for filtering PackageLoadMetrics objects. @@ -2232,7 +2252,7 @@ input PackageLoadMetricsWhereInput { type PackageMetrics implements Node { id: ID! packagesLoaded: Int - metrics: [Metrics!] + metrics: Metrics packageLoadMetrics: [PackageLoadMetrics!] } """ @@ -2340,6 +2360,11 @@ type Query { """ last: Int + """ + Ordering options for BazelInvocations returned from the connection. + """ + orderBy: BazelInvocationOrder + """ Filtering options for BazelInvocations returned from the connection. """ @@ -2423,6 +2448,37 @@ type Query { """ where: RunnerCountWhereInput ): RunnerCountConnection! + findTests( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: Cursor + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: Cursor + + """ + Returns the last _n_ elements from the list. + """ + last: Int + + """ + Ordering options for TestCollections returned from the connection. + """ + orderBy: TestCollectionOrder + + """ + Filtering options for TestCollections returned from the connection. + """ + where: TestCollectionWhereInput + ): TestCollectionConnection! } type RaceStatistics implements Node { id: ID! @@ -2431,7 +2487,7 @@ type RaceStatistics implements Node { remoteRunner: String localWins: Int renoteWins: Int - dynamicExecutionMetrics: [DynamicExecutionMetrics!] + dynamicExecutionMetrics: DynamicExecutionMetrics } """ RaceStatisticsWhereInput is used for filtering RaceStatistics objects. @@ -2542,7 +2598,7 @@ type ResourceUsage implements Node { id: ID! name: String value: String - executionInfo: [ExectionInfo!] + executionInfo: ExectionInfo } """ ResourceUsageWhereInput is used for filtering ResourceUsage objects. @@ -2610,7 +2666,7 @@ type RunnerCount implements Node { name: String execKind: String actionsExecuted: Int - actionSummary: [ActionSummary!] + actionSummary: ActionSummary } """ A connection to a list of items. @@ -2866,7 +2922,7 @@ type TargetComplete implements Node { testTimeoutSeconds: Int testTimeout: Int testSize: TargetCompleteTestSize - targetPair: [TargetPair!] + targetPair: TargetPair importantOutput: [TestFile!] directoryOutput: [TestFile!] outputGroup: OutputGroup @@ -3000,7 +3056,7 @@ type TargetConfigured implements Node { targetKind: String startTimeInMs: Int testSize: TargetConfiguredTestSize - targetPair: [TargetPair!] + targetPair: TargetPair } """ TargetConfiguredTestSize is enum for the field test_size @@ -3082,7 +3138,7 @@ type TargetMetrics implements Node { targetsLoaded: Int targetsConfigured: Int targetsConfiguredNotIncludingAspects: Int - metrics: [Metrics!] + metrics: Metrics } """ TargetMetricsWhereInput is used for filtering TargetMetrics objects. @@ -3156,7 +3212,7 @@ type TargetPair implements Node { targetKind: String testSize: TargetPairTestSize abortReason: TargetPairAbortReason - bazelInvocation: [BazelInvocation!] + bazelInvocation: BazelInvocation configuration: TargetConfigured completion: TargetComplete } @@ -3303,12 +3359,63 @@ type TestCollection implements Node { strategy: String cachedLocally: Boolean cachedRemotely: Boolean + firstSeen: Time durationMs: Int - bazelInvocation: [BazelInvocation!] + bazelInvocation: BazelInvocation testSummary: TestSummary testResults: [TestResultBES!] } """ +A connection to a list of items. +""" +type TestCollectionConnection { + """ + A list of edges. + """ + edges: [TestCollectionEdge] + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + """ + Identifies the total count of items in the connection. + """ + totalCount: Int! +} +""" +An edge in a connection. +""" +type TestCollectionEdge { + """ + The item at the end of the edge. + """ + node: TestCollection + """ + A cursor for use in pagination. + """ + cursor: Cursor! +} +""" +Ordering options for TestCollection connections +""" +input TestCollectionOrder { + """ + The ordering direction. + """ + direction: OrderDirection! = ASC + """ + The field by which to order TestCollections. + """ + field: TestCollectionOrderField! +} +""" +Properties by which TestCollection connections can be ordered. +""" +enum TestCollectionOrderField { + FIRST_SEEN + DURATION +} +""" TestCollectionOverallStatus is enum for the field overall_status """ enum TestCollectionOverallStatus @goModel(model: "github.com/buildbarn/bb-portal/ent/gen/ent/testcollection.OverallStatus") { @@ -3401,6 +3508,19 @@ input TestCollectionWhereInput { cachedRemotelyIsNil: Boolean cachedRemotelyNotNil: Boolean """ + first_seen field predicates + """ + firstSeen: Time + firstSeenNEQ: Time + firstSeenIn: [Time!] + firstSeenNotIn: [Time!] + firstSeenGT: Time + firstSeenGTE: Time + firstSeenLT: Time + firstSeenLTE: Time + firstSeenIsNil: Boolean + firstSeenNotNil: Boolean + """ duration_ms field predicates """ durationMs: Int @@ -3436,7 +3556,7 @@ type TestFile implements Node { length: Int name: String prefix: [String!] - testResult: [TestResultBES!] + testResult: TestResultBES } """ TestFileWhereInput is used for filtering TestFile objects. @@ -3715,7 +3835,7 @@ type TestSummary implements Node { lastStopTime: Int totalRunDuration: Int label: String - testCollection: [TestCollection!] + testCollection: TestCollection passed: [TestFile!] failed: [TestFile!] } @@ -3903,7 +4023,7 @@ type TimingBreakdown implements Node { id: ID! name: String time: String - executionInfo: [ExectionInfo!] + executionInfo: ExectionInfo child: [TimingChild!] } """ @@ -3976,7 +4096,7 @@ type TimingChild implements Node { id: ID! name: String time: String - timingBreakdown: [TimingBreakdown!] + timingBreakdown: TimingBreakdown } """ TimingChildWhereInput is used for filtering TimingChild objects. @@ -4046,7 +4166,7 @@ type TimingMetrics implements Node { analysisPhaseTimeInMs: Int executionPhaseTimeInMs: Int actionsExecutionStartInMs: Int - metrics: [Metrics!] + metrics: Metrics } """ TimingMetricsWhereInput is used for filtering TimingMetrics objects. diff --git a/internal/graphql/server_gen.go b/internal/graphql/server_gen.go index 333103f..0f1ef87 100644 --- a/internal/graphql/server_gen.go +++ b/internal/graphql/server_gen.go @@ -485,14 +485,23 @@ type ComplexityRoot struct { } Query struct { - BazelInvocation func(childComplexity int, invocationID string) int - FindBazelInvocations func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BazelInvocationWhereInput) int - FindBuilds func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BuildWhereInput) int - FindMetrics func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.MetricsWhereInput) int - FindRunnerCounts func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.RunnerCountWhereInput) int - GetBuild func(childComplexity int, buildURL *string, buildUUID *uuid.UUID) int - Node func(childComplexity int, id string) int - Nodes func(childComplexity int, ids []string) int + BazelInvocation func(childComplexity int, invocationID string) int + FindBazelInvocations func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.BazelInvocationOrder, where *ent.BazelInvocationWhereInput) int + FindBuilds func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BuildWhereInput) int + FindMetrics func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.MetricsWhereInput) int + FindRunnerCounts func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.RunnerCountWhereInput) int + FindTests func(childComplexity int, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.TestCollectionOrder, where *ent.TestCollectionWhereInput) int + GetAveragePassPercentageForLabel func(childComplexity int, label string) int + GetBuild func(childComplexity int, buildURL *string, buildUUID *uuid.UUID) int + GetTargetDurationAggregation func(childComplexity int, label *string) int + GetTargetPassAggregation func(childComplexity int, label *string) int + GetTestDurationAggregation func(childComplexity int, label *string) int + GetTestPassAggregation func(childComplexity int, label *string) int + GetTestsWithOffset func(childComplexity int, label *string, offset *int, limit *int, sortBy *string, direction *string) int + GetUniqueTargetLabels func(childComplexity int, param *string) int + GetUniqueTestLabels func(childComplexity int, param *string) int + Node func(childComplexity int, id string) int + Nodes func(childComplexity int, ids []string) int } RaceStatistics struct { @@ -544,6 +553,16 @@ type ComplexityRoot struct { PeakPacketsSentPerSec func(childComplexity int) int } + TargetAggregate struct { + Avg func(childComplexity int) int + Count func(childComplexity int) int + Label func(childComplexity int) int + Max func(childComplexity int) int + Min func(childComplexity int) int + Pass func(childComplexity int) int + Sum func(childComplexity int) int + } + TargetComplete struct { DirectoryOutput func(childComplexity int) int EndTimeInMs func(childComplexity int) int @@ -599,6 +618,7 @@ type ComplexityRoot struct { CachedLocally func(childComplexity int) int CachedRemotely func(childComplexity int) int DurationMs func(childComplexity int) int + FirstSeen func(childComplexity int) int ID func(childComplexity int) int Label func(childComplexity int) int OverallStatus func(childComplexity int) int @@ -607,6 +627,17 @@ type ComplexityRoot struct { TestSummary func(childComplexity int) int } + TestCollectionConnection struct { + Edges func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + TestCollectionEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + TestFile struct { Digest func(childComplexity int) int File func(childComplexity int) int @@ -617,6 +648,27 @@ type ComplexityRoot struct { TestResult func(childComplexity int) int } + TestGridCell struct { + InvocationID func(childComplexity int) int + Status func(childComplexity int) int + } + + TestGridResult struct { + Result func(childComplexity int) int + Total func(childComplexity int) int + } + + TestGridRow struct { + Avg func(childComplexity int) int + Cells func(childComplexity int) int + Count func(childComplexity int) int + Label func(childComplexity int) int + Max func(childComplexity int) int + Min func(childComplexity int) int + PassRate func(childComplexity int) int + Sum func(childComplexity int) int + } + TestProblem struct { ID func(childComplexity int) int Label func(childComplexity int) int @@ -792,12 +844,21 @@ type PackageMetricsResolver interface { type QueryResolver interface { Node(ctx context.Context, id string) (ent.Noder, error) Nodes(ctx context.Context, ids []string) ([]ent.Noder, error) - FindBazelInvocations(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BazelInvocationWhereInput) (*ent.BazelInvocationConnection, error) + FindBazelInvocations(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.BazelInvocationOrder, where *ent.BazelInvocationWhereInput) (*ent.BazelInvocationConnection, error) FindBuilds(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.BuildWhereInput) (*ent.BuildConnection, error) FindMetrics(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.MetricsWhereInput) (*ent.MetricsConnection, error) FindRunnerCounts(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, where *ent.RunnerCountWhereInput) (*ent.RunnerCountConnection, error) + FindTests(ctx context.Context, after *entgql.Cursor[int], first *int, before *entgql.Cursor[int], last *int, orderBy *ent.TestCollectionOrder, where *ent.TestCollectionWhereInput) (*ent.TestCollectionConnection, error) BazelInvocation(ctx context.Context, invocationID string) (*ent.BazelInvocation, error) GetBuild(ctx context.Context, buildURL *string, buildUUID *uuid.UUID) (*ent.Build, error) + GetUniqueTestLabels(ctx context.Context, param *string) ([]*string, error) + GetUniqueTargetLabels(ctx context.Context, param *string) ([]*string, error) + GetTestDurationAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) + GetTestPassAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) + GetTargetDurationAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) + GetTargetPassAggregation(ctx context.Context, label *string) ([]*model.TargetAggregate, error) + GetTestsWithOffset(ctx context.Context, label *string, offset *int, limit *int, sortBy *string, direction *string) (*model.TestGridResult, error) + GetAveragePassPercentageForLabel(ctx context.Context, label string) (*float64, error) } type RaceStatisticsResolver interface { ID(ctx context.Context, obj *ent.RaceStatistics) (string, error) @@ -2849,7 +2910,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.FindBazelInvocations(childComplexity, args["after"].(*entgql.Cursor[int]), args["first"].(*int), args["before"].(*entgql.Cursor[int]), args["last"].(*int), args["where"].(*ent.BazelInvocationWhereInput)), true + return e.complexity.Query.FindBazelInvocations(childComplexity, args["after"].(*entgql.Cursor[int]), args["first"].(*int), args["before"].(*entgql.Cursor[int]), args["last"].(*int), args["orderBy"].(*ent.BazelInvocationOrder), args["where"].(*ent.BazelInvocationWhereInput)), true case "Query.findBuilds": if e.complexity.Query.FindBuilds == nil { @@ -2887,6 +2948,30 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Query.FindRunnerCounts(childComplexity, args["after"].(*entgql.Cursor[int]), args["first"].(*int), args["before"].(*entgql.Cursor[int]), args["last"].(*int), args["where"].(*ent.RunnerCountWhereInput)), true + case "Query.findTests": + if e.complexity.Query.FindTests == nil { + break + } + + args, err := ec.field_Query_findTests_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.FindTests(childComplexity, args["after"].(*entgql.Cursor[int]), args["first"].(*int), args["before"].(*entgql.Cursor[int]), args["last"].(*int), args["orderBy"].(*ent.TestCollectionOrder), args["where"].(*ent.TestCollectionWhereInput)), true + + case "Query.getAveragePassPercentageForLabel": + if e.complexity.Query.GetAveragePassPercentageForLabel == nil { + break + } + + args, err := ec.field_Query_getAveragePassPercentageForLabel_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetAveragePassPercentageForLabel(childComplexity, args["label"].(string)), true + case "Query.getBuild": if e.complexity.Query.GetBuild == nil { break @@ -2899,6 +2984,90 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Query.GetBuild(childComplexity, args["buildURL"].(*string), args["buildUUID"].(*uuid.UUID)), true + case "Query.getTargetDurationAggregation": + if e.complexity.Query.GetTargetDurationAggregation == nil { + break + } + + args, err := ec.field_Query_getTargetDurationAggregation_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetTargetDurationAggregation(childComplexity, args["label"].(*string)), true + + case "Query.getTargetPassAggregation": + if e.complexity.Query.GetTargetPassAggregation == nil { + break + } + + args, err := ec.field_Query_getTargetPassAggregation_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetTargetPassAggregation(childComplexity, args["label"].(*string)), true + + case "Query.getTestDurationAggregation": + if e.complexity.Query.GetTestDurationAggregation == nil { + break + } + + args, err := ec.field_Query_getTestDurationAggregation_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetTestDurationAggregation(childComplexity, args["label"].(*string)), true + + case "Query.getTestPassAggregation": + if e.complexity.Query.GetTestPassAggregation == nil { + break + } + + args, err := ec.field_Query_getTestPassAggregation_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetTestPassAggregation(childComplexity, args["label"].(*string)), true + + case "Query.getTestsWithOffset": + if e.complexity.Query.GetTestsWithOffset == nil { + break + } + + args, err := ec.field_Query_getTestsWithOffset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetTestsWithOffset(childComplexity, args["label"].(*string), args["offset"].(*int), args["limit"].(*int), args["sortBy"].(*string), args["direction"].(*string)), true + + case "Query.getUniqueTargetLabels": + if e.complexity.Query.GetUniqueTargetLabels == nil { + break + } + + args, err := ec.field_Query_getUniqueTargetLabels_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetUniqueTargetLabels(childComplexity, args["param"].(*string)), true + + case "Query.getUniqueTestLabels": + if e.complexity.Query.GetUniqueTestLabels == nil { + break + } + + args, err := ec.field_Query_getUniqueTestLabels_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.GetUniqueTestLabels(childComplexity, args["param"].(*string)), true + case "Query.node": if e.complexity.Query.Node == nil { break @@ -3140,6 +3309,55 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.SystemNetworkStats.PeakPacketsSentPerSec(childComplexity), true + case "TargetAggregate.avg": + if e.complexity.TargetAggregate.Avg == nil { + break + } + + return e.complexity.TargetAggregate.Avg(childComplexity), true + + case "TargetAggregate.count": + if e.complexity.TargetAggregate.Count == nil { + break + } + + return e.complexity.TargetAggregate.Count(childComplexity), true + + case "TargetAggregate.label": + if e.complexity.TargetAggregate.Label == nil { + break + } + + return e.complexity.TargetAggregate.Label(childComplexity), true + + case "TargetAggregate.max": + if e.complexity.TargetAggregate.Max == nil { + break + } + + return e.complexity.TargetAggregate.Max(childComplexity), true + + case "TargetAggregate.min": + if e.complexity.TargetAggregate.Min == nil { + break + } + + return e.complexity.TargetAggregate.Min(childComplexity), true + + case "TargetAggregate.pass": + if e.complexity.TargetAggregate.Pass == nil { + break + } + + return e.complexity.TargetAggregate.Pass(childComplexity), true + + case "TargetAggregate.sum": + if e.complexity.TargetAggregate.Sum == nil { + break + } + + return e.complexity.TargetAggregate.Sum(childComplexity), true + case "TargetComplete.directoryOutput": if e.complexity.TargetComplete.DirectoryOutput == nil { break @@ -3413,6 +3631,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TestCollection.DurationMs(childComplexity), true + case "TestCollection.firstSeen": + if e.complexity.TestCollection.FirstSeen == nil { + break + } + + return e.complexity.TestCollection.FirstSeen(childComplexity), true + case "TestCollection.id": if e.complexity.TestCollection.ID == nil { break @@ -3455,6 +3680,41 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TestCollection.TestSummary(childComplexity), true + case "TestCollectionConnection.edges": + if e.complexity.TestCollectionConnection.Edges == nil { + break + } + + return e.complexity.TestCollectionConnection.Edges(childComplexity), true + + case "TestCollectionConnection.pageInfo": + if e.complexity.TestCollectionConnection.PageInfo == nil { + break + } + + return e.complexity.TestCollectionConnection.PageInfo(childComplexity), true + + case "TestCollectionConnection.totalCount": + if e.complexity.TestCollectionConnection.TotalCount == nil { + break + } + + return e.complexity.TestCollectionConnection.TotalCount(childComplexity), true + + case "TestCollectionEdge.cursor": + if e.complexity.TestCollectionEdge.Cursor == nil { + break + } + + return e.complexity.TestCollectionEdge.Cursor(childComplexity), true + + case "TestCollectionEdge.node": + if e.complexity.TestCollectionEdge.Node == nil { + break + } + + return e.complexity.TestCollectionEdge.Node(childComplexity), true + case "TestFile.digest": if e.complexity.TestFile.Digest == nil { break @@ -3504,6 +3764,90 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TestFile.TestResult(childComplexity), true + case "TestGridCell.invocationId": + if e.complexity.TestGridCell.InvocationID == nil { + break + } + + return e.complexity.TestGridCell.InvocationID(childComplexity), true + + case "TestGridCell.status": + if e.complexity.TestGridCell.Status == nil { + break + } + + return e.complexity.TestGridCell.Status(childComplexity), true + + case "TestGridResult.result": + if e.complexity.TestGridResult.Result == nil { + break + } + + return e.complexity.TestGridResult.Result(childComplexity), true + + case "TestGridResult.total": + if e.complexity.TestGridResult.Total == nil { + break + } + + return e.complexity.TestGridResult.Total(childComplexity), true + + case "TestGridRow.avg": + if e.complexity.TestGridRow.Avg == nil { + break + } + + return e.complexity.TestGridRow.Avg(childComplexity), true + + case "TestGridRow.cells": + if e.complexity.TestGridRow.Cells == nil { + break + } + + return e.complexity.TestGridRow.Cells(childComplexity), true + + case "TestGridRow.count": + if e.complexity.TestGridRow.Count == nil { + break + } + + return e.complexity.TestGridRow.Count(childComplexity), true + + case "TestGridRow.label": + if e.complexity.TestGridRow.Label == nil { + break + } + + return e.complexity.TestGridRow.Label(childComplexity), true + + case "TestGridRow.max": + if e.complexity.TestGridRow.Max == nil { + break + } + + return e.complexity.TestGridRow.Max(childComplexity), true + + case "TestGridRow.min": + if e.complexity.TestGridRow.Min == nil { + break + } + + return e.complexity.TestGridRow.Min(childComplexity), true + + case "TestGridRow.passRate": + if e.complexity.TestGridRow.PassRate == nil { + break + } + + return e.complexity.TestGridRow.PassRate(childComplexity), true + + case "TestGridRow.sum": + if e.complexity.TestGridRow.Sum == nil { + break + } + + return e.complexity.TestGridRow.Sum(childComplexity), true + case "TestProblem.id": if e.complexity.TestProblem.ID == nil { break @@ -3915,6 +4259,7 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { ec.unmarshalInputActionDataWhereInput, ec.unmarshalInputActionSummaryWhereInput, ec.unmarshalInputArtifactMetricsWhereInput, + ec.unmarshalInputBazelInvocationOrder, ec.unmarshalInputBazelInvocationProblemWhereInput, ec.unmarshalInputBazelInvocationWhereInput, ec.unmarshalInputBlobWhereInput, @@ -3943,6 +4288,7 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { ec.unmarshalInputTargetConfiguredWhereInput, ec.unmarshalInputTargetMetricsWhereInput, ec.unmarshalInputTargetPairWhereInput, + ec.unmarshalInputTestCollectionOrder, ec.unmarshalInputTestCollectionWhereInput, ec.unmarshalInputTestFileWhereInput, ec.unmarshalInputTestResultBESWhereInput, @@ -4122,15 +4468,24 @@ func (ec *executionContext) field_Query_findBazelInvocations_args(ctx context.Co } } args["last"] = arg3 - var arg4 *ent.BazelInvocationWhereInput + var arg4 *ent.BazelInvocationOrder + if tmp, ok := rawArgs["orderBy"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("orderBy")) + arg4, err = ec.unmarshalOBazelInvocationOrder2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationOrder(ctx, tmp) + if err != nil { + return nil, err + } + } + args["orderBy"] = arg4 + var arg5 *ent.BazelInvocationWhereInput if tmp, ok := rawArgs["where"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("where")) - arg4, err = ec.unmarshalOBazelInvocationWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationWhereInput(ctx, tmp) + arg5, err = ec.unmarshalOBazelInvocationWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationWhereInput(ctx, tmp) if err != nil { return nil, err } } - args["where"] = arg4 + args["where"] = arg5 return args, nil } @@ -4287,6 +4642,81 @@ func (ec *executionContext) field_Query_findRunnerCounts_args(ctx context.Contex return args, nil } +func (ec *executionContext) field_Query_findTests_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *entgql.Cursor[int] + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg0, err = ec.unmarshalOCursor2ᚖentgoᚗioᚋcontribᚋentgqlᚐCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2ᚖint(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *entgql.Cursor[int] + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg2, err = ec.unmarshalOCursor2ᚖentgoᚗioᚋcontribᚋentgqlᚐCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg2 + var arg3 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg3 + var arg4 *ent.TestCollectionOrder + if tmp, ok := rawArgs["orderBy"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("orderBy")) + arg4, err = ec.unmarshalOTestCollectionOrder2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionOrder(ctx, tmp) + if err != nil { + return nil, err + } + } + args["orderBy"] = arg4 + var arg5 *ent.TestCollectionWhereInput + if tmp, ok := rawArgs["where"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("where")) + arg5, err = ec.unmarshalOTestCollectionWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["where"] = arg5 + return args, nil +} + +func (ec *executionContext) field_Query_getAveragePassPercentageForLabel_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + return args, nil +} + func (ec *executionContext) field_Query_getBuild_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -4311,6 +4741,147 @@ func (ec *executionContext) field_Query_getBuild_args(ctx context.Context, rawAr return args, nil } +func (ec *executionContext) field_Query_getTargetDurationAggregation_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_getTargetPassAggregation_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_getTestDurationAggregation_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_getTestPassAggregation_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_getTestsWithOffset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["label"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["label"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["offset"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("offset")) + arg1, err = ec.unmarshalOInt2ᚖint(ctx, tmp) + if err != nil { + return nil, err + } + } + args["offset"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["limit"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("limit")) + arg2, err = ec.unmarshalOInt2ᚖint(ctx, tmp) + if err != nil { + return nil, err + } + } + args["limit"] = arg2 + var arg3 *string + if tmp, ok := rawArgs["sortBy"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sortBy")) + arg3, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sortBy"] = arg3 + var arg4 *string + if tmp, ok := rawArgs["direction"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("direction")) + arg4, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["direction"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_getUniqueTargetLabels_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["param"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("param")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["param"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_getUniqueTestLabels_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["param"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("param")) + arg0, err = ec.unmarshalOString2ᚖstring(ctx, tmp) + if err != nil { + return nil, err + } + } + args["param"] = arg0 + return args, nil +} + func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -4651,9 +5222,9 @@ func (ec *executionContext) _ActionCacheStatistics_actionSummary(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionSummary) + res := resTmp.(*ent.ActionSummary) fc.Result = res - return ec.marshalOActionSummary2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryᚄ(ctx, field.Selections, res) + return ec.marshalOActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ActionCacheStatistics_actionSummary(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -5094,9 +5665,9 @@ func (ec *executionContext) _ActionData_actionSummary(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionSummary) + res := resTmp.(*ent.ActionSummary) fc.Result = res - return ec.marshalOActionSummary2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryᚄ(ctx, field.Selections, res) + return ec.marshalOActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ActionData_actionSummary(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -5778,9 +6349,9 @@ func (ec *executionContext) _ActionSummary_actionCacheStatistics(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionCacheStatistics) + res := resTmp.(*ent.ActionCacheStatistics) fc.Result = res - return ec.marshalOActionCacheStatistics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatisticsᚄ(ctx, field.Selections, res) + return ec.marshalOActionCacheStatistics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatistics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ActionSummary_actionCacheStatistics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -5881,9 +6452,9 @@ func (ec *executionContext) _ArtifactMetrics_metrics(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ArtifactMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -5948,9 +6519,9 @@ func (ec *executionContext) _ArtifactMetrics_sourceArtifactsRead(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.FilesMetric) + res := resTmp.(*ent.FilesMetric) fc.Result = res - return ec.marshalOFilesMetric2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricᚄ(ctx, field.Selections, res) + return ec.marshalOFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ArtifactMetrics_sourceArtifactsRead(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -5999,9 +6570,9 @@ func (ec *executionContext) _ArtifactMetrics_outputArtifactsSeen(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.FilesMetric) + res := resTmp.(*ent.FilesMetric) fc.Result = res - return ec.marshalOFilesMetric2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricᚄ(ctx, field.Selections, res) + return ec.marshalOFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ArtifactMetrics_outputArtifactsSeen(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -6050,9 +6621,9 @@ func (ec *executionContext) _ArtifactMetrics_outputArtifactsFromActionCache(ctx if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.FilesMetric) + res := resTmp.(*ent.FilesMetric) fc.Result = res - return ec.marshalOFilesMetric2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricᚄ(ctx, field.Selections, res) + return ec.marshalOFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ArtifactMetrics_outputArtifactsFromActionCache(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -6101,9 +6672,9 @@ func (ec *executionContext) _ArtifactMetrics_topLevelArtifacts(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.FilesMetric) + res := resTmp.(*ent.FilesMetric) fc.Result = res - return ec.marshalOFilesMetric2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricᚄ(ctx, field.Selections, res) + return ec.marshalOFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ArtifactMetrics_topLevelArtifacts(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -7206,6 +7777,8 @@ func (ec *executionContext) fieldContext_BazelInvocation_testCollection(ctx cont return ec.fieldContext_TestCollection_cachedLocally(ctx, field) case "cachedRemotely": return ec.fieldContext_TestCollection_cachedRemotely(ctx, field) + case "firstSeen": + return ec.fieldContext_TestCollection_firstSeen(ctx, field) case "durationMs": return ec.fieldContext_TestCollection_durationMs(ctx, field) case "bazelInvocation": @@ -9709,9 +10282,9 @@ func (ec *executionContext) _BuildGraphMetrics_metrics(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -9776,9 +10349,9 @@ func (ec *executionContext) _BuildGraphMetrics_dirtiedValues(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.EvaluationStat) + res := resTmp.(*ent.EvaluationStat) fc.Result = res - return ec.marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx, field.Selections, res) + return ec.marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_dirtiedValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -9827,9 +10400,9 @@ func (ec *executionContext) _BuildGraphMetrics_changedValues(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.EvaluationStat) + res := resTmp.(*ent.EvaluationStat) fc.Result = res - return ec.marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx, field.Selections, res) + return ec.marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_changedValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -9878,9 +10451,9 @@ func (ec *executionContext) _BuildGraphMetrics_builtValues(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.EvaluationStat) + res := resTmp.(*ent.EvaluationStat) fc.Result = res - return ec.marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx, field.Selections, res) + return ec.marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_builtValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -9929,9 +10502,9 @@ func (ec *executionContext) _BuildGraphMetrics_cleanedValues(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.EvaluationStat) + res := resTmp.(*ent.EvaluationStat) fc.Result = res - return ec.marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx, field.Selections, res) + return ec.marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_cleanedValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -9980,9 +10553,9 @@ func (ec *executionContext) _BuildGraphMetrics_evaluatedValues(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.EvaluationStat) + res := resTmp.(*ent.EvaluationStat) fc.Result = res - return ec.marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx, field.Selections, res) + return ec.marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_BuildGraphMetrics_evaluatedValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -10157,9 +10730,9 @@ func (ec *executionContext) _CumulativeMetrics_metrics(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_CumulativeMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -10268,9 +10841,9 @@ func (ec *executionContext) _DynamicExecutionMetrics_metrics(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_DynamicExecutionMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -10606,9 +11179,9 @@ func (ec *executionContext) _EvaluationStat_buildGraphMetrics(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.BuildGraphMetrics) + res := resTmp.(*ent.BuildGraphMetrics) fc.Result = res - return ec.marshalOBuildGraphMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOBuildGraphMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_EvaluationStat_buildGraphMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -11330,9 +11903,9 @@ func (ec *executionContext) _ExectionInfo_testResult(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TestResultBES) + res := resTmp.(*ent.TestResultBES) fc.Result = res - return ec.marshalOTestResultBES2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBESᚄ(ctx, field.Selections, res) + return ec.marshalOTestResultBES2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBES(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ExectionInfo_testResult(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -11761,9 +12334,9 @@ func (ec *executionContext) _FilesMetric_artifactMetrics(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ArtifactMetrics) + res := resTmp.(*ent.ArtifactMetrics) fc.Result = res - return ec.marshalOArtifactMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOArtifactMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_FilesMetric_artifactMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -11942,9 +12515,9 @@ func (ec *executionContext) _GarbageMetrics_memoryMetrics(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.MemoryMetrics) + res := resTmp.(*ent.MemoryMetrics) fc.Result = res - return ec.marshalOMemoryMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMemoryMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_GarbageMetrics_memoryMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12164,9 +12737,9 @@ func (ec *executionContext) _MemoryMetrics_metrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_MemoryMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12421,9 +12994,9 @@ func (ec *executionContext) _Metrics_actionSummary(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionSummary) + res := resTmp.(*ent.ActionSummary) fc.Result = res - return ec.marshalOActionSummary2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryᚄ(ctx, field.Selections, res) + return ec.marshalOActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_actionSummary(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12482,9 +13055,9 @@ func (ec *executionContext) _Metrics_memoryMetrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.MemoryMetrics) + res := resTmp.(*ent.MemoryMetrics) fc.Result = res - return ec.marshalOMemoryMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMemoryMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_memoryMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12537,9 +13110,9 @@ func (ec *executionContext) _Metrics_targetMetrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TargetMetrics) + res := resTmp.(*ent.TargetMetrics) fc.Result = res - return ec.marshalOTargetMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOTargetMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_targetMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12590,9 +13163,9 @@ func (ec *executionContext) _Metrics_packageMetrics(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.PackageMetrics) + res := resTmp.(*ent.PackageMetrics) fc.Result = res - return ec.marshalOPackageMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOPackageMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_packageMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12641,9 +13214,9 @@ func (ec *executionContext) _Metrics_timingMetrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TimingMetrics) + res := resTmp.(*ent.TimingMetrics) fc.Result = res - return ec.marshalOTimingMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOTimingMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_timingMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12698,9 +13271,9 @@ func (ec *executionContext) _Metrics_cumulativeMetrics(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.CumulativeMetrics) + res := resTmp.(*ent.CumulativeMetrics) fc.Result = res - return ec.marshalOCumulativeMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOCumulativeMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_cumulativeMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12749,9 +13322,9 @@ func (ec *executionContext) _Metrics_artifactMetrics(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ArtifactMetrics) + res := resTmp.(*ent.ArtifactMetrics) fc.Result = res - return ec.marshalOArtifactMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOArtifactMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_artifactMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12804,9 +13377,9 @@ func (ec *executionContext) _Metrics_networkMetrics(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.NetworkMetrics) + res := resTmp.(*ent.NetworkMetrics) fc.Result = res - return ec.marshalONetworkMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsᚄ(ctx, field.Selections, res) + return ec.marshalONetworkMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_networkMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12853,9 +13426,9 @@ func (ec *executionContext) _Metrics_dynamicExecutionMetrics(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.DynamicExecutionMetrics) + res := resTmp.(*ent.DynamicExecutionMetrics) fc.Result = res - return ec.marshalODynamicExecutionMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetricsᚄ(ctx, field.Selections, res) + return ec.marshalODynamicExecutionMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_dynamicExecutionMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -12902,9 +13475,9 @@ func (ec *executionContext) _Metrics_buildGraphMetrics(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.BuildGraphMetrics) + res := resTmp.(*ent.BuildGraphMetrics) fc.Result = res - return ec.marshalOBuildGraphMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOBuildGraphMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_Metrics_buildGraphMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -13359,9 +13932,9 @@ func (ec *executionContext) _MissDetail_actionCacheStatistics(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionCacheStatistics) + res := resTmp.(*ent.ActionCacheStatistics) fc.Result = res - return ec.marshalOActionCacheStatistics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatisticsᚄ(ctx, field.Selections, res) + return ec.marshalOActionCacheStatistics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatistics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_MissDetail_actionCacheStatistics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -13550,9 +14123,9 @@ func (ec *executionContext) _NamedSetOfFiles_outputGroup(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.OutputGroup) + res := resTmp.(*ent.OutputGroup) fc.Result = res - return ec.marshalOOutputGroup2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroupᚄ(ctx, field.Selections, res) + return ec.marshalOOutputGroup2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroup(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_NamedSetOfFiles_outputGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -13757,9 +14330,9 @@ func (ec *executionContext) _NetworkMetrics_metrics(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_NetworkMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -13824,9 +14397,9 @@ func (ec *executionContext) _NetworkMetrics_systemNetworkStats(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.SystemNetworkStats) + res := resTmp.(*ent.SystemNetworkStats) fc.Result = res - return ec.marshalOSystemNetworkStats2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsᚄ(ctx, field.Selections, res) + return ec.marshalOSystemNetworkStats2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStats(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_NetworkMetrics_systemNetworkStats(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -14013,9 +14586,9 @@ func (ec *executionContext) _OutputGroup_targetComplete(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TargetComplete) + res := resTmp.(*ent.TargetComplete) fc.Result = res - return ec.marshalOTargetComplete2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteᚄ(ctx, field.Selections, res) + return ec.marshalOTargetComplete2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetComplete(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_OutputGroup_targetComplete(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -14478,9 +15051,9 @@ func (ec *executionContext) _PackageLoadMetrics_packageMetrics(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.PackageMetrics) + res := resTmp.(*ent.PackageMetrics) fc.Result = res - return ec.marshalOPackageMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOPackageMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_PackageLoadMetrics_packageMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -14614,9 +15187,9 @@ func (ec *executionContext) _PackageMetrics_metrics(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_PackageMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -15316,7 +15889,7 @@ func (ec *executionContext) _Query_findBazelInvocations(ctx context.Context, fie }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().FindBazelInvocations(rctx, fc.Args["after"].(*entgql.Cursor[int]), fc.Args["first"].(*int), fc.Args["before"].(*entgql.Cursor[int]), fc.Args["last"].(*int), fc.Args["where"].(*ent.BazelInvocationWhereInput)) + return ec.resolvers.Query().FindBazelInvocations(rctx, fc.Args["after"].(*entgql.Cursor[int]), fc.Args["first"].(*int), fc.Args["before"].(*entgql.Cursor[int]), fc.Args["last"].(*int), fc.Args["orderBy"].(*ent.BazelInvocationOrder), fc.Args["where"].(*ent.BazelInvocationWhereInput)) }) if err != nil { ec.Error(ctx, err) @@ -15554,6 +16127,69 @@ func (ec *executionContext) fieldContext_Query_findRunnerCounts(ctx context.Cont return fc, nil } +func (ec *executionContext) _Query_findTests(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_findTests(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().FindTests(rctx, fc.Args["after"].(*entgql.Cursor[int]), fc.Args["first"].(*int), fc.Args["before"].(*entgql.Cursor[int]), fc.Args["last"].(*int), fc.Args["orderBy"].(*ent.TestCollectionOrder), fc.Args["where"].(*ent.TestCollectionWhereInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*ent.TestCollectionConnection) + fc.Result = res + return ec.marshalNTestCollectionConnection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_findTests(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_TestCollectionConnection_edges(ctx, field) + case "pageInfo": + return ec.fieldContext_TestCollectionConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_TestCollectionConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestCollectionConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_findTests_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + func (ec *executionContext) _Query_bazelInvocation(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { fc, err := ec.fieldContext_Query_bazelInvocation(ctx, field) if err != nil { @@ -15727,8 +16363,8 @@ func (ec *executionContext) fieldContext_Query_getBuild(ctx context.Context, fie return fc, nil } -func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_Query___type(ctx, field) +func (ec *executionContext) _Query_getUniqueTestLabels(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getUniqueTestLabels(ctx, field) if err != nil { return graphql.Null } @@ -15741,7 +16377,7 @@ func (ec *executionContext) _Query___type(ctx context.Context, field graphql.Col }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.introspectType(fc.Args["name"].(string)) + return ec.resolvers.Query().GetUniqueTestLabels(rctx, fc.Args["param"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -15750,41 +16386,19 @@ func (ec *executionContext) _Query___type(ctx context.Context, field graphql.Col if resTmp == nil { return graphql.Null } - res := resTmp.(*introspection.Type) + res := resTmp.([]*string) fc.Result = res - return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) + return ec.marshalOString2ᚕᚖstring(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getUniqueTestLabels(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ Object: "Query", Field: field, IsMethod: true, - IsResolver: false, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - switch field.Name { - case "kind": - return ec.fieldContext___Type_kind(ctx, field) - case "name": - return ec.fieldContext___Type_name(ctx, field) - case "description": - return ec.fieldContext___Type_description(ctx, field) - case "fields": - return ec.fieldContext___Type_fields(ctx, field) - case "interfaces": - return ec.fieldContext___Type_interfaces(ctx, field) - case "possibleTypes": - return ec.fieldContext___Type_possibleTypes(ctx, field) - case "enumValues": - return ec.fieldContext___Type_enumValues(ctx, field) - case "inputFields": - return ec.fieldContext___Type_inputFields(ctx, field) - case "ofType": - return ec.fieldContext___Type_ofType(ctx, field) - case "specifiedByURL": - return ec.fieldContext___Type_specifiedByURL(ctx, field) - } - return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + return nil, errors.New("field of type String does not have child fields") }, } defer func() { @@ -15794,15 +16408,15 @@ func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field } }() ctx = graphql.WithFieldContext(ctx, fc) - if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + if fc.Args, err = ec.field_Query_getUniqueTestLabels_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) return fc, err } return fc, nil } -func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_Query___schema(ctx, field) +func (ec *executionContext) _Query_getUniqueTargetLabels(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getUniqueTargetLabels(ctx, field) if err != nil { return graphql.Null } @@ -15815,7 +16429,7 @@ func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.C }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.introspectSchema() + return ec.resolvers.Query().GetUniqueTargetLabels(rctx, fc.Args["param"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -15824,40 +16438,37 @@ func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*introspection.Schema) + res := resTmp.([]*string) fc.Result = res - return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res) + return ec.marshalOString2ᚕᚖstring(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_Query___schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getUniqueTargetLabels(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ Object: "Query", Field: field, IsMethod: true, - IsResolver: false, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - switch field.Name { - case "description": - return ec.fieldContext___Schema_description(ctx, field) - case "types": - return ec.fieldContext___Schema_types(ctx, field) - case "queryType": - return ec.fieldContext___Schema_queryType(ctx, field) - case "mutationType": - return ec.fieldContext___Schema_mutationType(ctx, field) - case "subscriptionType": - return ec.fieldContext___Schema_subscriptionType(ctx, field) - case "directives": - return ec.fieldContext___Schema_directives(ctx, field) - } - return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) + return nil, errors.New("field of type String does not have child fields") }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getUniqueTargetLabels_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_id(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_id(ctx, field) +func (ec *executionContext) _Query_getTestDurationAggregation(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getTestDurationAggregation(ctx, field) if err != nil { return graphql.Null } @@ -15870,38 +16481,62 @@ func (ec *executionContext) _RaceStatistics_id(ctx context.Context, field graphq }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.RaceStatistics().ID(rctx, obj) + return ec.resolvers.Query().GetTestDurationAggregation(rctx, fc.Args["label"].(*string)) }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*model.TargetAggregate) fc.Result = res - return ec.marshalNID2string(ctx, field.Selections, res) + return ec.marshalOTargetAggregate2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getTestDurationAggregation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, IsMethod: true, IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type ID does not have child fields") + switch field.Name { + case "label": + return ec.fieldContext_TargetAggregate_label(ctx, field) + case "count": + return ec.fieldContext_TargetAggregate_count(ctx, field) + case "sum": + return ec.fieldContext_TargetAggregate_sum(ctx, field) + case "min": + return ec.fieldContext_TargetAggregate_min(ctx, field) + case "max": + return ec.fieldContext_TargetAggregate_max(ctx, field) + case "avg": + return ec.fieldContext_TargetAggregate_avg(ctx, field) + case "pass": + return ec.fieldContext_TargetAggregate_pass(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TargetAggregate", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getTestDurationAggregation_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_mnemonic(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_mnemonic(ctx, field) +func (ec *executionContext) _Query_getTestPassAggregation(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getTestPassAggregation(ctx, field) if err != nil { return graphql.Null } @@ -15914,7 +16549,7 @@ func (ec *executionContext) _RaceStatistics_mnemonic(ctx context.Context, field }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Mnemonic, nil + return ec.resolvers.Query().GetTestPassAggregation(rctx, fc.Args["label"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -15923,26 +16558,53 @@ func (ec *executionContext) _RaceStatistics_mnemonic(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*model.TargetAggregate) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOTargetAggregate2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_mnemonic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getTestPassAggregation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + switch field.Name { + case "label": + return ec.fieldContext_TargetAggregate_label(ctx, field) + case "count": + return ec.fieldContext_TargetAggregate_count(ctx, field) + case "sum": + return ec.fieldContext_TargetAggregate_sum(ctx, field) + case "min": + return ec.fieldContext_TargetAggregate_min(ctx, field) + case "max": + return ec.fieldContext_TargetAggregate_max(ctx, field) + case "avg": + return ec.fieldContext_TargetAggregate_avg(ctx, field) + case "pass": + return ec.fieldContext_TargetAggregate_pass(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TargetAggregate", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getTestPassAggregation_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_localRunner(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_localRunner(ctx, field) +func (ec *executionContext) _Query_getTargetDurationAggregation(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getTargetDurationAggregation(ctx, field) if err != nil { return graphql.Null } @@ -15955,7 +16617,7 @@ func (ec *executionContext) _RaceStatistics_localRunner(ctx context.Context, fie }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.LocalRunner, nil + return ec.resolvers.Query().GetTargetDurationAggregation(rctx, fc.Args["label"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -15964,26 +16626,53 @@ func (ec *executionContext) _RaceStatistics_localRunner(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*model.TargetAggregate) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOTargetAggregate2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_localRunner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getTargetDurationAggregation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + switch field.Name { + case "label": + return ec.fieldContext_TargetAggregate_label(ctx, field) + case "count": + return ec.fieldContext_TargetAggregate_count(ctx, field) + case "sum": + return ec.fieldContext_TargetAggregate_sum(ctx, field) + case "min": + return ec.fieldContext_TargetAggregate_min(ctx, field) + case "max": + return ec.fieldContext_TargetAggregate_max(ctx, field) + case "avg": + return ec.fieldContext_TargetAggregate_avg(ctx, field) + case "pass": + return ec.fieldContext_TargetAggregate_pass(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TargetAggregate", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getTargetDurationAggregation_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_remoteRunner(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_remoteRunner(ctx, field) +func (ec *executionContext) _Query_getTargetPassAggregation(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getTargetPassAggregation(ctx, field) if err != nil { return graphql.Null } @@ -15996,7 +16685,7 @@ func (ec *executionContext) _RaceStatistics_remoteRunner(ctx context.Context, fi }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.RemoteRunner, nil + return ec.resolvers.Query().GetTargetPassAggregation(rctx, fc.Args["label"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -16005,26 +16694,53 @@ func (ec *executionContext) _RaceStatistics_remoteRunner(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*model.TargetAggregate) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOTargetAggregate2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_remoteRunner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getTargetPassAggregation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + switch field.Name { + case "label": + return ec.fieldContext_TargetAggregate_label(ctx, field) + case "count": + return ec.fieldContext_TargetAggregate_count(ctx, field) + case "sum": + return ec.fieldContext_TargetAggregate_sum(ctx, field) + case "min": + return ec.fieldContext_TargetAggregate_min(ctx, field) + case "max": + return ec.fieldContext_TargetAggregate_max(ctx, field) + case "avg": + return ec.fieldContext_TargetAggregate_avg(ctx, field) + case "pass": + return ec.fieldContext_TargetAggregate_pass(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TargetAggregate", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getTargetPassAggregation_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_localWins(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_localWins(ctx, field) +func (ec *executionContext) _Query_getTestsWithOffset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getTestsWithOffset(ctx, field) if err != nil { return graphql.Null } @@ -16037,7 +16753,7 @@ func (ec *executionContext) _RaceStatistics_localWins(ctx context.Context, field }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.LocalWins, nil + return ec.resolvers.Query().GetTestsWithOffset(rctx, fc.Args["label"].(*string), fc.Args["offset"].(*int), fc.Args["limit"].(*int), fc.Args["sortBy"].(*string), fc.Args["direction"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -16046,26 +16762,43 @@ func (ec *executionContext) _RaceStatistics_localWins(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(int64) + res := resTmp.(*model.TestGridResult) fc.Result = res - return ec.marshalOInt2int64(ctx, field.Selections, res) + return ec.marshalOTestGridResult2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridResult(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_localWins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getTestsWithOffset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type Int does not have child fields") + switch field.Name { + case "total": + return ec.fieldContext_TestGridResult_total(ctx, field) + case "result": + return ec.fieldContext_TestGridResult_result(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestGridResult", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getTestsWithOffset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_renoteWins(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_renoteWins(ctx, field) +func (ec *executionContext) _Query_getAveragePassPercentageForLabel(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_getAveragePassPercentageForLabel(ctx, field) if err != nil { return graphql.Null } @@ -16078,7 +16811,7 @@ func (ec *executionContext) _RaceStatistics_renoteWins(ctx context.Context, fiel }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.RenoteWins, nil + return ec.resolvers.Query().GetAveragePassPercentageForLabel(rctx, fc.Args["label"].(string)) }) if err != nil { ec.Error(ctx, err) @@ -16087,26 +16820,37 @@ func (ec *executionContext) _RaceStatistics_renoteWins(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(int64) + res := resTmp.(*float64) fc.Result = res - return ec.marshalOInt2int64(ctx, field.Selections, res) + return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_renoteWins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query_getAveragePassPercentageForLabel(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type Int does not have child fields") + return nil, errors.New("field of type Float does not have child fields") }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_getAveragePassPercentageForLabel_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _RaceStatistics_dynamicExecutionMetrics(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_RaceStatistics_dynamicExecutionMetrics(ctx, field) +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___type(ctx, field) if err != nil { return graphql.Null } @@ -16119,7 +16863,7 @@ func (ec *executionContext) _RaceStatistics_dynamicExecutionMetrics(ctx context. }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.DynamicExecutionMetrics(ctx) + return ec.introspectType(fc.Args["name"].(string)) }) if err != nil { ec.Error(ctx, err) @@ -16128,34 +16872,59 @@ func (ec *executionContext) _RaceStatistics_dynamicExecutionMetrics(ctx context. if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.DynamicExecutionMetrics) + res := resTmp.(*introspection.Type) fc.Result = res - return ec.marshalODynamicExecutionMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetricsᚄ(ctx, field.Selections, res) + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_RaceStatistics_dynamicExecutionMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "RaceStatistics", + Object: "Query", Field: field, IsMethod: true, IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { switch field.Name { - case "id": - return ec.fieldContext_DynamicExecutionMetrics_id(ctx, field) - case "metrics": - return ec.fieldContext_DynamicExecutionMetrics_metrics(ctx, field) - case "raceStatistics": - return ec.fieldContext_DynamicExecutionMetrics_raceStatistics(ctx, field) + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) } - return nil, fmt.Errorf("no field named %q was found under type DynamicExecutionMetrics", field.Name) + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } return fc, nil } -func (ec *executionContext) _ResourceUsage_id(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_ResourceUsage_id(ctx, field) +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___schema(ctx, field) if err != nil { return graphql.Null } @@ -16168,38 +16937,49 @@ func (ec *executionContext) _ResourceUsage_id(ctx context.Context, field graphql }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.ResourceUsage().ID(rctx, obj) + return ec.introspectSchema() }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(string) + res := resTmp.(*introspection.Schema) fc.Result = res - return ec.marshalNID2string(ctx, field.Selections, res) + return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_ResourceUsage_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_Query___schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "ResourceUsage", + Object: "Query", Field: field, IsMethod: true, - IsResolver: true, + IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type ID does not have child fields") + switch field.Name { + case "description": + return ec.fieldContext___Schema_description(ctx, field) + case "types": + return ec.fieldContext___Schema_types(ctx, field) + case "queryType": + return ec.fieldContext___Schema_queryType(ctx, field) + case "mutationType": + return ec.fieldContext___Schema_mutationType(ctx, field) + case "subscriptionType": + return ec.fieldContext___Schema_subscriptionType(ctx, field) + case "directives": + return ec.fieldContext___Schema_directives(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) }, } return fc, nil } -func (ec *executionContext) _ResourceUsage_name(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_ResourceUsage_name(ctx, field) +func (ec *executionContext) _RaceStatistics_id(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_id(ctx, field) if err != nil { return graphql.Null } @@ -16212,35 +16992,38 @@ func (ec *executionContext) _ResourceUsage_name(ctx context.Context, field graph }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return ec.resolvers.RaceStatistics().ID(rctx, obj) }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } res := resTmp.(string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalNID2string(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_ResourceUsage_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_RaceStatistics_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "ResourceUsage", + Object: "RaceStatistics", Field: field, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + return nil, errors.New("field of type ID does not have child fields") }, } return fc, nil } -func (ec *executionContext) _ResourceUsage_value(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_ResourceUsage_value(ctx, field) +func (ec *executionContext) _RaceStatistics_mnemonic(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_mnemonic(ctx, field) if err != nil { return graphql.Null } @@ -16253,7 +17036,7 @@ func (ec *executionContext) _ResourceUsage_value(ctx context.Context, field grap }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Value, nil + return obj.Mnemonic, nil }) if err != nil { ec.Error(ctx, err) @@ -16267,9 +17050,9 @@ func (ec *executionContext) _ResourceUsage_value(ctx context.Context, field grap return ec.marshalOString2string(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_ResourceUsage_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_RaceStatistics_mnemonic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "ResourceUsage", + Object: "RaceStatistics", Field: field, IsMethod: false, IsResolver: false, @@ -16280,8 +17063,347 @@ func (ec *executionContext) fieldContext_ResourceUsage_value(ctx context.Context return fc, nil } -func (ec *executionContext) _ResourceUsage_executionInfo(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_ResourceUsage_executionInfo(ctx, field) +func (ec *executionContext) _RaceStatistics_localRunner(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_localRunner(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LocalRunner, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RaceStatistics_localRunner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RaceStatistics", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RaceStatistics_remoteRunner(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_remoteRunner(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RemoteRunner, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RaceStatistics_remoteRunner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RaceStatistics", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RaceStatistics_localWins(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_localWins(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LocalWins, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(int64) + fc.Result = res + return ec.marshalOInt2int64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RaceStatistics_localWins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RaceStatistics", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RaceStatistics_renoteWins(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_renoteWins(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RenoteWins, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(int64) + fc.Result = res + return ec.marshalOInt2int64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RaceStatistics_renoteWins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RaceStatistics", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _RaceStatistics_dynamicExecutionMetrics(ctx context.Context, field graphql.CollectedField, obj *ent.RaceStatistics) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RaceStatistics_dynamicExecutionMetrics(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DynamicExecutionMetrics(ctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*ent.DynamicExecutionMetrics) + fc.Result = res + return ec.marshalODynamicExecutionMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetrics(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_RaceStatistics_dynamicExecutionMetrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RaceStatistics", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DynamicExecutionMetrics_id(ctx, field) + case "metrics": + return ec.fieldContext_DynamicExecutionMetrics_metrics(ctx, field) + case "raceStatistics": + return ec.fieldContext_DynamicExecutionMetrics_raceStatistics(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DynamicExecutionMetrics", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _ResourceUsage_id(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ResourceUsage_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.ResourceUsage().ID(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNID2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ResourceUsage_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ResourceUsage", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ResourceUsage_name(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ResourceUsage_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ResourceUsage_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ResourceUsage", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ResourceUsage_value(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ResourceUsage_value(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_ResourceUsage_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ResourceUsage", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _ResourceUsage_executionInfo(ctx context.Context, field graphql.CollectedField, obj *ent.ResourceUsage) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ResourceUsage_executionInfo(ctx, field) if err != nil { return graphql.Null } @@ -16303,9 +17425,9 @@ func (ec *executionContext) _ResourceUsage_executionInfo(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ExectionInfo) + res := resTmp.(*ent.ExectionInfo) fc.Result = res - return ec.marshalOExectionInfo2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfoᚄ(ctx, field.Selections, res) + return ec.marshalOExectionInfo2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfo(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_ResourceUsage_executionInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -16531,9 +17653,9 @@ func (ec *executionContext) _RunnerCount_actionSummary(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ActionSummary) + res := resTmp.(*ent.ActionSummary) fc.Result = res - return ec.marshalOActionSummary2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryᚄ(ctx, field.Selections, res) + return ec.marshalOActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_RunnerCount_actionSummary(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -17232,6 +18354,293 @@ func (ec *executionContext) fieldContext_SystemNetworkStats_networkMetrics(ctx c return fc, nil } +func (ec *executionContext) _TargetAggregate_label(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_label(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_count(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_count(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Count, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_count(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_sum(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_sum(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Sum, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_sum(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_min(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_min(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Min, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_max(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_max(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Max, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_avg(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_avg(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Avg, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_avg(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TargetAggregate_pass(ctx context.Context, field graphql.CollectedField, obj *model.TargetAggregate) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TargetAggregate_pass(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Pass, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TargetAggregate_pass(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TargetAggregate", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _TargetComplete_id(ctx context.Context, field graphql.CollectedField, obj *ent.TargetComplete) (ret graphql.Marshaler) { fc, err := ec.fieldContext_TargetComplete_id(ctx, field) if err != nil { @@ -17586,9 +18995,9 @@ func (ec *executionContext) _TargetComplete_targetPair(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TargetPair) + res := resTmp.(*ent.TargetPair) fc.Result = res - return ec.marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairᚄ(ctx, field.Selections, res) + return ec.marshalOTargetPair2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPair(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TargetComplete_targetPair(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -18026,9 +19435,9 @@ func (ec *executionContext) _TargetConfigured_targetPair(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TargetPair) + res := resTmp.(*ent.TargetPair) fc.Result = res - return ec.marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairᚄ(ctx, field.Selections, res) + return ec.marshalOTargetPair2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPair(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TargetConfigured_targetPair(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -18256,9 +19665,9 @@ func (ec *executionContext) _TargetMetrics_metrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TargetMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -18613,9 +20022,9 @@ func (ec *executionContext) _TargetPair_bazelInvocation(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.BazelInvocation) + res := resTmp.(*ent.BazelInvocation) fc.Result = res - return ec.marshalOBazelInvocation2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationᚄ(ctx, field.Selections, res) + return ec.marshalOBazelInvocation2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocation(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TargetPair_bazelInvocation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -19144,6 +20553,47 @@ func (ec *executionContext) fieldContext_TestCollection_cachedRemotely(ctx conte return fc, nil } +func (ec *executionContext) _TestCollection_firstSeen(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollection_firstSeen(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FirstSeen, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*time.Time) + fc.Result = res + return ec.marshalOTime2ᚖtimeᚐTime(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollection_firstSeen(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Time does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _TestCollection_durationMs(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollection) (ret graphql.Marshaler) { fc, err := ec.fieldContext_TestCollection_durationMs(ctx, field) if err != nil { @@ -19208,9 +20658,9 @@ func (ec *executionContext) _TestCollection_bazelInvocation(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.BazelInvocation) + res := resTmp.(*ent.BazelInvocation) fc.Result = res - return ec.marshalOBazelInvocation2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationᚄ(ctx, field.Selections, res) + return ec.marshalOBazelInvocation2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocation(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TestCollection_bazelInvocation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -19420,8 +20870,580 @@ func (ec *executionContext) fieldContext_TestCollection_testResults(ctx context. return fc, nil } -func (ec *executionContext) _TestFile_id(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_id(ctx, field) +func (ec *executionContext) _TestCollectionConnection_edges(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollectionConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollectionConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*ent.TestCollectionEdge) + fc.Result = res + return ec.marshalOTestCollectionEdge2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionEdge(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollectionConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollectionConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "node": + return ec.fieldContext_TestCollectionEdge_node(ctx, field) + case "cursor": + return ec.fieldContext_TestCollectionEdge_cursor(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestCollectionEdge", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TestCollectionConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollectionConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollectionConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(entgql.PageInfo[int]) + fc.Result = res + return ec.marshalNPageInfo2entgoᚗioᚋcontribᚋentgqlᚐPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollectionConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollectionConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TestCollectionConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollectionConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollectionConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollectionConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollectionConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestCollectionEdge_node(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollectionEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollectionEdge_node(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*ent.TestCollection) + fc.Result = res + return ec.marshalOTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollectionEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollectionEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TestCollection_id(ctx, field) + case "label": + return ec.fieldContext_TestCollection_label(ctx, field) + case "overallStatus": + return ec.fieldContext_TestCollection_overallStatus(ctx, field) + case "strategy": + return ec.fieldContext_TestCollection_strategy(ctx, field) + case "cachedLocally": + return ec.fieldContext_TestCollection_cachedLocally(ctx, field) + case "cachedRemotely": + return ec.fieldContext_TestCollection_cachedRemotely(ctx, field) + case "firstSeen": + return ec.fieldContext_TestCollection_firstSeen(ctx, field) + case "durationMs": + return ec.fieldContext_TestCollection_durationMs(ctx, field) + case "bazelInvocation": + return ec.fieldContext_TestCollection_bazelInvocation(ctx, field) + case "testSummary": + return ec.fieldContext_TestCollection_testSummary(ctx, field) + case "testResults": + return ec.fieldContext_TestCollection_testResults(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestCollection", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TestCollectionEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *ent.TestCollectionEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestCollectionEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(entgql.Cursor[int]) + fc.Result = res + return ec.marshalNCursor2entgoᚗioᚋcontribᚋentgqlᚐCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestCollectionEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestCollectionEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_id(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TestFile().ID(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNID2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_digest(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_digest(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Digest, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_digest(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_file(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_file(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.File, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_file(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_length(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_length(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Length, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(int64) + fc.Result = res + return ec.marshalOInt2int64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_length(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_name(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_prefix(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_prefix(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Prefix, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalOString2ᚕstringᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_prefix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestFile_testResult(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestFile_testResult(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TestResult(ctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*ent.TestResultBES) + fc.Result = res + return ec.marshalOTestResultBES2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBES(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestFile_testResult(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestFile", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TestResultBES_id(ctx, field) + case "testStatus": + return ec.fieldContext_TestResultBES_testStatus(ctx, field) + case "statusDetails": + return ec.fieldContext_TestResultBES_statusDetails(ctx, field) + case "label": + return ec.fieldContext_TestResultBES_label(ctx, field) + case "warning": + return ec.fieldContext_TestResultBES_warning(ctx, field) + case "cachedLocally": + return ec.fieldContext_TestResultBES_cachedLocally(ctx, field) + case "testAttemptStartMillisEpoch": + return ec.fieldContext_TestResultBES_testAttemptStartMillisEpoch(ctx, field) + case "testAttemptStart": + return ec.fieldContext_TestResultBES_testAttemptStart(ctx, field) + case "testAttemptDurationMillis": + return ec.fieldContext_TestResultBES_testAttemptDurationMillis(ctx, field) + case "testAttemptDuration": + return ec.fieldContext_TestResultBES_testAttemptDuration(ctx, field) + case "testCollection": + return ec.fieldContext_TestResultBES_testCollection(ctx, field) + case "testActionOutput": + return ec.fieldContext_TestResultBES_testActionOutput(ctx, field) + case "executionInfo": + return ec.fieldContext_TestResultBES_executionInfo(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestResultBES", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridCell_invocationId(ctx context.Context, field graphql.CollectedField, obj *model.TestGridCell) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridCell_invocationId(ctx, field) if err != nil { return graphql.Null } @@ -19434,38 +21456,76 @@ func (ec *executionContext) _TestFile_id(ctx context.Context, field graphql.Coll }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.TestFile().ID(rctx, obj) + return obj.InvocationID, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") + return graphql.Null + } + res := resTmp.(*uuid.UUID) + fc.Result = res + return ec.marshalOUUID2ᚖgithubᚗcomᚋgoogleᚋuuidᚐUUID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestGridCell_invocationId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestGridCell", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type UUID does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridCell_status(ctx context.Context, field graphql.CollectedField, obj *model.TestGridCell) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridCell_status(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Status, nil + }) + if err != nil { + ec.Error(ctx, err) return graphql.Null } - res := resTmp.(string) + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*testcollection.OverallStatus) fc.Result = res - return ec.marshalNID2string(ctx, field.Selections, res) + return ec.marshalOTestCollectionOverallStatus2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridCell_status(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridCell", Field: field, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type ID does not have child fields") + return nil, errors.New("field of type TestCollectionOverallStatus does not have child fields") }, } return fc, nil } -func (ec *executionContext) _TestFile_digest(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_digest(ctx, field) +func (ec *executionContext) _TestGridResult_total(ctx context.Context, field graphql.CollectedField, obj *model.TestGridResult) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridResult_total(ctx, field) if err != nil { return graphql.Null } @@ -19478,7 +21538,7 @@ func (ec *executionContext) _TestFile_digest(ctx context.Context, field graphql. }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Digest, nil + return obj.Total, nil }) if err != nil { ec.Error(ctx, err) @@ -19487,26 +21547,26 @@ func (ec *executionContext) _TestFile_digest(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*int) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_digest(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridResult_total(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridResult", Field: field, IsMethod: false, IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + return nil, errors.New("field of type Int does not have child fields") }, } return fc, nil } -func (ec *executionContext) _TestFile_file(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_file(ctx, field) +func (ec *executionContext) _TestGridResult_result(ctx context.Context, field graphql.CollectedField, obj *model.TestGridResult) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridResult_result(ctx, field) if err != nil { return graphql.Null } @@ -19519,7 +21579,7 @@ func (ec *executionContext) _TestFile_file(ctx context.Context, field graphql.Co }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.File, nil + return obj.Result, nil }) if err != nil { ec.Error(ctx, err) @@ -19528,14 +21588,73 @@ func (ec *executionContext) _TestFile_file(ctx context.Context, field graphql.Co if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*model.TestGridRow) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOTestGridRow2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridRow(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_file(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridResult_result(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridResult", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "label": + return ec.fieldContext_TestGridRow_label(ctx, field) + case "count": + return ec.fieldContext_TestGridRow_count(ctx, field) + case "sum": + return ec.fieldContext_TestGridRow_sum(ctx, field) + case "min": + return ec.fieldContext_TestGridRow_min(ctx, field) + case "max": + return ec.fieldContext_TestGridRow_max(ctx, field) + case "avg": + return ec.fieldContext_TestGridRow_avg(ctx, field) + case "passRate": + return ec.fieldContext_TestGridRow_passRate(ctx, field) + case "cells": + return ec.fieldContext_TestGridRow_cells(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TestGridRow", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridRow_label(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_label(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestGridRow_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestGridRow", Field: field, IsMethod: false, IsResolver: false, @@ -19546,8 +21665,8 @@ func (ec *executionContext) fieldContext_TestFile_file(ctx context.Context, fiel return fc, nil } -func (ec *executionContext) _TestFile_length(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_length(ctx, field) +func (ec *executionContext) _TestGridRow_count(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_count(ctx, field) if err != nil { return graphql.Null } @@ -19560,7 +21679,7 @@ func (ec *executionContext) _TestFile_length(ctx context.Context, field graphql. }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Length, nil + return obj.Count, nil }) if err != nil { ec.Error(ctx, err) @@ -19569,14 +21688,14 @@ func (ec *executionContext) _TestFile_length(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(int64) + res := resTmp.(*int) fc.Result = res - return ec.marshalOInt2int64(ctx, field.Selections, res) + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_length(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridRow_count(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridRow", Field: field, IsMethod: false, IsResolver: false, @@ -19587,8 +21706,8 @@ func (ec *executionContext) fieldContext_TestFile_length(ctx context.Context, fi return fc, nil } -func (ec *executionContext) _TestFile_name(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_name(ctx, field) +func (ec *executionContext) _TestGridRow_sum(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_sum(ctx, field) if err != nil { return graphql.Null } @@ -19601,7 +21720,7 @@ func (ec *executionContext) _TestFile_name(ctx context.Context, field graphql.Co }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return obj.Sum, nil }) if err != nil { ec.Error(ctx, err) @@ -19610,26 +21729,26 @@ func (ec *executionContext) _TestFile_name(ctx context.Context, field graphql.Co if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*int) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridRow_sum(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridRow", Field: field, IsMethod: false, IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + return nil, errors.New("field of type Int does not have child fields") }, } return fc, nil } -func (ec *executionContext) _TestFile_prefix(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_prefix(ctx, field) +func (ec *executionContext) _TestGridRow_min(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_min(ctx, field) if err != nil { return graphql.Null } @@ -19642,7 +21761,7 @@ func (ec *executionContext) _TestFile_prefix(ctx context.Context, field graphql. }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Prefix, nil + return obj.Min, nil }) if err != nil { ec.Error(ctx, err) @@ -19651,26 +21770,26 @@ func (ec *executionContext) _TestFile_prefix(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.([]string) + res := resTmp.(*int) fc.Result = res - return ec.marshalOString2ᚕstringᚄ(ctx, field.Selections, res) + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_prefix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridRow_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridRow", Field: field, IsMethod: false, IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") + return nil, errors.New("field of type Int does not have child fields") }, } return fc, nil } -func (ec *executionContext) _TestFile_testResult(ctx context.Context, field graphql.CollectedField, obj *ent.TestFile) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_TestFile_testResult(ctx, field) +func (ec *executionContext) _TestGridRow_max(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_max(ctx, field) if err != nil { return graphql.Null } @@ -19683,7 +21802,7 @@ func (ec *executionContext) _TestFile_testResult(ctx context.Context, field grap }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.TestResult(ctx) + return obj.Max, nil }) if err != nil { ec.Error(ctx, err) @@ -19692,47 +21811,148 @@ func (ec *executionContext) _TestFile_testResult(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TestResultBES) + res := resTmp.(*int) fc.Result = res - return ec.marshalOTestResultBES2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBESᚄ(ctx, field.Selections, res) + return ec.marshalOInt2ᚖint(ctx, field.Selections, res) } -func (ec *executionContext) fieldContext_TestFile_testResult(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { +func (ec *executionContext) fieldContext_TestGridRow_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { fc = &graphql.FieldContext{ - Object: "TestFile", + Object: "TestGridRow", Field: field, - IsMethod: true, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridRow_avg(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_avg(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Avg, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestGridRow_avg(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestGridRow", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridRow_passRate(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_passRate(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PassRate, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2ᚖfloat64(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestGridRow_passRate(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestGridRow", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _TestGridRow_cells(ctx context.Context, field graphql.CollectedField, obj *model.TestGridRow) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TestGridRow_cells(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cells, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*model.TestGridCell) + fc.Result = res + return ec.marshalOTestGridCell2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridCell(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_TestGridRow_cells(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TestGridRow", + Field: field, + IsMethod: false, IsResolver: false, Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { switch field.Name { - case "id": - return ec.fieldContext_TestResultBES_id(ctx, field) - case "testStatus": - return ec.fieldContext_TestResultBES_testStatus(ctx, field) - case "statusDetails": - return ec.fieldContext_TestResultBES_statusDetails(ctx, field) - case "label": - return ec.fieldContext_TestResultBES_label(ctx, field) - case "warning": - return ec.fieldContext_TestResultBES_warning(ctx, field) - case "cachedLocally": - return ec.fieldContext_TestResultBES_cachedLocally(ctx, field) - case "testAttemptStartMillisEpoch": - return ec.fieldContext_TestResultBES_testAttemptStartMillisEpoch(ctx, field) - case "testAttemptStart": - return ec.fieldContext_TestResultBES_testAttemptStart(ctx, field) - case "testAttemptDurationMillis": - return ec.fieldContext_TestResultBES_testAttemptDurationMillis(ctx, field) - case "testAttemptDuration": - return ec.fieldContext_TestResultBES_testAttemptDuration(ctx, field) - case "testCollection": - return ec.fieldContext_TestResultBES_testCollection(ctx, field) - case "testActionOutput": - return ec.fieldContext_TestResultBES_testActionOutput(ctx, field) - case "executionInfo": - return ec.fieldContext_TestResultBES_executionInfo(ctx, field) + case "invocationId": + return ec.fieldContext_TestGridCell_invocationId(ctx, field) + case "status": + return ec.fieldContext_TestGridCell_status(ctx, field) } - return nil, fmt.Errorf("no field named %q was found under type TestResultBES", field.Name) + return nil, fmt.Errorf("no field named %q was found under type TestGridCell", field.Name) }, } return fc, nil @@ -20716,6 +22936,8 @@ func (ec *executionContext) fieldContext_TestResultBES_testCollection(ctx contex return ec.fieldContext_TestCollection_cachedLocally(ctx, field) case "cachedRemotely": return ec.fieldContext_TestCollection_cachedRemotely(ctx, field) + case "firstSeen": + return ec.fieldContext_TestCollection_firstSeen(ctx, field) case "durationMs": return ec.fieldContext_TestCollection_durationMs(ctx, field) case "bazelInvocation": @@ -21326,9 +23548,9 @@ func (ec *executionContext) _TestSummary_testCollection(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TestCollection) + res := resTmp.(*ent.TestCollection) fc.Result = res - return ec.marshalOTestCollection2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionᚄ(ctx, field.Selections, res) + return ec.marshalOTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TestSummary_testCollection(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -21351,6 +23573,8 @@ func (ec *executionContext) fieldContext_TestSummary_testCollection(ctx context. return ec.fieldContext_TestCollection_cachedLocally(ctx, field) case "cachedRemotely": return ec.fieldContext_TestCollection_cachedRemotely(ctx, field) + case "firstSeen": + return ec.fieldContext_TestCollection_firstSeen(ctx, field) case "durationMs": return ec.fieldContext_TestCollection_durationMs(ctx, field) case "bazelInvocation": @@ -21629,9 +23853,9 @@ func (ec *executionContext) _TimingBreakdown_executionInfo(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.ExectionInfo) + res := resTmp.(*ent.ExectionInfo) fc.Result = res - return ec.marshalOExectionInfo2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfoᚄ(ctx, field.Selections, res) + return ec.marshalOExectionInfo2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfo(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TimingBreakdown_executionInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -21867,9 +24091,9 @@ func (ec *executionContext) _TimingChild_timingBreakdown(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.TimingBreakdown) + res := resTmp.(*ent.TimingBreakdown) fc.Result = res - return ec.marshalOTimingBreakdown2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdownᚄ(ctx, field.Selections, res) + return ec.marshalOTimingBreakdown2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdown(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TimingChild_timingBreakdown(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -22169,9 +24393,9 @@ func (ec *executionContext) _TimingMetrics_metrics(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.([]*ent.Metrics) + res := resTmp.(*ent.Metrics) fc.Result = res - return ec.marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx, field.Selections, res) + return ec.marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, field.Selections, res) } func (ec *executionContext) fieldContext_TimingMetrics_metrics(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { @@ -25893,6 +28117,44 @@ func (ec *executionContext) unmarshalInputArtifactMetricsWhereInput(ctx context. return it, nil } +func (ec *executionContext) unmarshalInputBazelInvocationOrder(ctx context.Context, obj interface{}) (ent.BazelInvocationOrder, error) { + var it ent.BazelInvocationOrder + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + if _, present := asMap["direction"]; !present { + asMap["direction"] = "ASC" + } + + fieldsInOrder := [...]string{"direction", "field"} + for _, k := range fieldsInOrder { + v, ok := asMap[k] + if !ok { + continue + } + switch k { + case "direction": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("direction")) + data, err := ec.unmarshalNOrderDirection2entgoᚗioᚋcontribᚋentgqlᚐOrderDirection(ctx, v) + if err != nil { + return it, err + } + it.Direction = data + case "field": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("field")) + data, err := ec.unmarshalNBazelInvocationOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationOrderField(ctx, v) + if err != nil { + return it, err + } + it.Field = data + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputBazelInvocationProblemWhereInput(ctx context.Context, obj interface{}) (ent.BazelInvocationProblemWhereInput, error) { var it ent.BazelInvocationProblemWhereInput asMap := map[string]interface{}{} @@ -37737,6 +39999,44 @@ func (ec *executionContext) unmarshalInputTargetPairWhereInput(ctx context.Conte return it, nil } +func (ec *executionContext) unmarshalInputTestCollectionOrder(ctx context.Context, obj interface{}) (ent.TestCollectionOrder, error) { + var it ent.TestCollectionOrder + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + if _, present := asMap["direction"]; !present { + asMap["direction"] = "ASC" + } + + fieldsInOrder := [...]string{"direction", "field"} + for _, k := range fieldsInOrder { + v, ok := asMap[k] + if !ok { + continue + } + switch k { + case "direction": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("direction")) + data, err := ec.unmarshalNOrderDirection2entgoᚗioᚋcontribᚋentgqlᚐOrderDirection(ctx, v) + if err != nil { + return it, err + } + it.Direction = data + case "field": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("field")) + data, err := ec.unmarshalNTestCollectionOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionOrderField(ctx, v) + if err != nil { + return it, err + } + it.Field = data + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputTestCollectionWhereInput(ctx context.Context, obj interface{}) (ent.TestCollectionWhereInput, error) { var it ent.TestCollectionWhereInput asMap := map[string]interface{}{} @@ -37744,7 +40044,7 @@ func (ec *executionContext) unmarshalInputTestCollectionWhereInput(ctx context.C asMap[k] = v } - fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "label", "labelNEQ", "labelIn", "labelNotIn", "labelGT", "labelGTE", "labelLT", "labelLTE", "labelContains", "labelHasPrefix", "labelHasSuffix", "labelIsNil", "labelNotNil", "labelEqualFold", "labelContainsFold", "overallStatus", "overallStatusNEQ", "overallStatusIn", "overallStatusNotIn", "overallStatusIsNil", "overallStatusNotNil", "strategy", "strategyNEQ", "strategyIn", "strategyNotIn", "strategyGT", "strategyGTE", "strategyLT", "strategyLTE", "strategyContains", "strategyHasPrefix", "strategyHasSuffix", "strategyIsNil", "strategyNotNil", "strategyEqualFold", "strategyContainsFold", "cachedLocally", "cachedLocallyNEQ", "cachedLocallyIsNil", "cachedLocallyNotNil", "cachedRemotely", "cachedRemotelyNEQ", "cachedRemotelyIsNil", "cachedRemotelyNotNil", "durationMs", "durationMsNEQ", "durationMsIn", "durationMsNotIn", "durationMsGT", "durationMsGTE", "durationMsLT", "durationMsLTE", "durationMsIsNil", "durationMsNotNil", "hasBazelInvocation", "hasBazelInvocationWith", "hasTestSummary", "hasTestSummaryWith", "hasTestResults", "hasTestResultsWith"} + fieldsInOrder := [...]string{"not", "and", "or", "id", "idNEQ", "idIn", "idNotIn", "idGT", "idGTE", "idLT", "idLTE", "label", "labelNEQ", "labelIn", "labelNotIn", "labelGT", "labelGTE", "labelLT", "labelLTE", "labelContains", "labelHasPrefix", "labelHasSuffix", "labelIsNil", "labelNotNil", "labelEqualFold", "labelContainsFold", "overallStatus", "overallStatusNEQ", "overallStatusIn", "overallStatusNotIn", "overallStatusIsNil", "overallStatusNotNil", "strategy", "strategyNEQ", "strategyIn", "strategyNotIn", "strategyGT", "strategyGTE", "strategyLT", "strategyLTE", "strategyContains", "strategyHasPrefix", "strategyHasSuffix", "strategyIsNil", "strategyNotNil", "strategyEqualFold", "strategyContainsFold", "cachedLocally", "cachedLocallyNEQ", "cachedLocallyIsNil", "cachedLocallyNotNil", "cachedRemotely", "cachedRemotelyNEQ", "cachedRemotelyIsNil", "cachedRemotelyNotNil", "firstSeen", "firstSeenNEQ", "firstSeenIn", "firstSeenNotIn", "firstSeenGT", "firstSeenGTE", "firstSeenLT", "firstSeenLTE", "firstSeenIsNil", "firstSeenNotNil", "durationMs", "durationMsNEQ", "durationMsIn", "durationMsNotIn", "durationMsGT", "durationMsGTE", "durationMsLT", "durationMsLTE", "durationMsIsNil", "durationMsNotNil", "hasBazelInvocation", "hasBazelInvocationWith", "hasTestSummary", "hasTestSummaryWith", "hasTestResults", "hasTestResultsWith"} for _, k := range fieldsInOrder { v, ok := asMap[k] if !ok { @@ -38152,6 +40452,76 @@ func (ec *executionContext) unmarshalInputTestCollectionWhereInput(ctx context.C return it, err } it.CachedRemotelyNotNil = data + case "firstSeen": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeen")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeen = data + case "firstSeenNEQ": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenNEQ")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenNEQ = data + case "firstSeenIn": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenIn")) + data, err := ec.unmarshalOTime2ᚕtimeᚐTimeᚄ(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenIn = data + case "firstSeenNotIn": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenNotIn")) + data, err := ec.unmarshalOTime2ᚕtimeᚐTimeᚄ(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenNotIn = data + case "firstSeenGT": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenGT")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenGT = data + case "firstSeenGTE": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenGTE")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenGTE = data + case "firstSeenLT": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenLT")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenLT = data + case "firstSeenLTE": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenLTE")) + data, err := ec.unmarshalOTime2ᚖtimeᚐTime(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenLTE = data + case "firstSeenIsNil": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenIsNil")) + data, err := ec.unmarshalOBoolean2bool(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenIsNil = data + case "firstSeenNotNil": + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("firstSeenNotNil")) + data, err := ec.unmarshalOBoolean2bool(ctx, v) + if err != nil { + return it, err + } + it.FirstSeenNotNil = data case "durationMs": ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("durationMs")) data, err := ec.unmarshalOInt2ᚖint64(ctx, v) @@ -47040,6 +49410,28 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) } + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "findTests": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_findTests(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) case "bazelInvocation": field := field @@ -47081,6 +49473,158 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) } + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getUniqueTestLabels": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getUniqueTestLabels(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getUniqueTargetLabels": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getUniqueTargetLabels(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getTestDurationAggregation": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getTestDurationAggregation(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getTestPassAggregation": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getTestPassAggregation(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getTargetDurationAggregation": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getTargetDurationAggregation(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getTargetPassAggregation": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getTargetPassAggregation(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getTestsWithOffset": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getTestsWithOffset(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "getAveragePassPercentageForLabel": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_getAveragePassPercentageForLabel(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) case "__type": out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { @@ -47529,102 +50073,150 @@ func (ec *executionContext) _RunnerCountEdge(ctx context.Context, sel ast.Select return out } -var systemNetworkStatsImplementors = []string{"SystemNetworkStats", "Node"} +var systemNetworkStatsImplementors = []string{"SystemNetworkStats", "Node"} + +func (ec *executionContext) _SystemNetworkStats(ctx context.Context, sel ast.SelectionSet, obj *ent.SystemNetworkStats) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, systemNetworkStatsImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SystemNetworkStats") + case "id": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SystemNetworkStats_id(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + if field.Deferrable != nil { + dfs, ok := deferred[field.Deferrable.Label] + di := 0 + if ok { + dfs.AddField(field) + di = len(dfs.Values) - 1 + } else { + dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) + deferred[field.Deferrable.Label] = dfs + } + dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { + return innerFunc(ctx, dfs) + }) + + // don't run the out.Concurrently() call below + out.Values[i] = graphql.Null + continue + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + case "bytesSent": + out.Values[i] = ec._SystemNetworkStats_bytesSent(ctx, field, obj) + case "bytesRecv": + out.Values[i] = ec._SystemNetworkStats_bytesRecv(ctx, field, obj) + case "packetsSent": + out.Values[i] = ec._SystemNetworkStats_packetsSent(ctx, field, obj) + case "packetsRecv": + out.Values[i] = ec._SystemNetworkStats_packetsRecv(ctx, field, obj) + case "peakBytesSentPerSec": + out.Values[i] = ec._SystemNetworkStats_peakBytesSentPerSec(ctx, field, obj) + case "peakBytesRecvPerSec": + out.Values[i] = ec._SystemNetworkStats_peakBytesRecvPerSec(ctx, field, obj) + case "peakPacketsSentPerSec": + out.Values[i] = ec._SystemNetworkStats_peakPacketsSentPerSec(ctx, field, obj) + case "peakPacketsRecvPerSec": + out.Values[i] = ec._SystemNetworkStats_peakPacketsRecvPerSec(ctx, field, obj) + case "networkMetrics": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SystemNetworkStats_networkMetrics(ctx, field, obj) + return res + } + + if field.Deferrable != nil { + dfs, ok := deferred[field.Deferrable.Label] + di := 0 + if ok { + dfs.AddField(field) + di = len(dfs.Values) - 1 + } else { + dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) + deferred[field.Deferrable.Label] = dfs + } + dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { + return innerFunc(ctx, dfs) + }) + + // don't run the out.Concurrently() call below + out.Values[i] = graphql.Null + continue + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var targetAggregateImplementors = []string{"TargetAggregate"} -func (ec *executionContext) _SystemNetworkStats(ctx context.Context, sel ast.SelectionSet, obj *ent.SystemNetworkStats) graphql.Marshaler { - fields := graphql.CollectFields(ec.OperationContext, sel, systemNetworkStatsImplementors) +func (ec *executionContext) _TargetAggregate(ctx context.Context, sel ast.SelectionSet, obj *model.TargetAggregate) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, targetAggregateImplementors) out := graphql.NewFieldSet(fields) deferred := make(map[string]*graphql.FieldSet) for i, field := range fields { switch field.Name { case "__typename": - out.Values[i] = graphql.MarshalString("SystemNetworkStats") - case "id": - field := field - - innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._SystemNetworkStats_id(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&fs.Invalids, 1) - } - return res - } - - if field.Deferrable != nil { - dfs, ok := deferred[field.Deferrable.Label] - di := 0 - if ok { - dfs.AddField(field) - di = len(dfs.Values) - 1 - } else { - dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) - deferred[field.Deferrable.Label] = dfs - } - dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { - return innerFunc(ctx, dfs) - }) - - // don't run the out.Concurrently() call below - out.Values[i] = graphql.Null - continue - } - - out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) - case "bytesSent": - out.Values[i] = ec._SystemNetworkStats_bytesSent(ctx, field, obj) - case "bytesRecv": - out.Values[i] = ec._SystemNetworkStats_bytesRecv(ctx, field, obj) - case "packetsSent": - out.Values[i] = ec._SystemNetworkStats_packetsSent(ctx, field, obj) - case "packetsRecv": - out.Values[i] = ec._SystemNetworkStats_packetsRecv(ctx, field, obj) - case "peakBytesSentPerSec": - out.Values[i] = ec._SystemNetworkStats_peakBytesSentPerSec(ctx, field, obj) - case "peakBytesRecvPerSec": - out.Values[i] = ec._SystemNetworkStats_peakBytesRecvPerSec(ctx, field, obj) - case "peakPacketsSentPerSec": - out.Values[i] = ec._SystemNetworkStats_peakPacketsSentPerSec(ctx, field, obj) - case "peakPacketsRecvPerSec": - out.Values[i] = ec._SystemNetworkStats_peakPacketsRecvPerSec(ctx, field, obj) - case "networkMetrics": - field := field - - innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._SystemNetworkStats_networkMetrics(ctx, field, obj) - return res - } - - if field.Deferrable != nil { - dfs, ok := deferred[field.Deferrable.Label] - di := 0 - if ok { - dfs.AddField(field) - di = len(dfs.Values) - 1 - } else { - dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) - deferred[field.Deferrable.Label] = dfs - } - dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { - return innerFunc(ctx, dfs) - }) - - // don't run the out.Concurrently() call below - out.Values[i] = graphql.Null - continue - } - - out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + out.Values[i] = graphql.MarshalString("TargetAggregate") + case "label": + out.Values[i] = ec._TargetAggregate_label(ctx, field, obj) + case "count": + out.Values[i] = ec._TargetAggregate_count(ctx, field, obj) + case "sum": + out.Values[i] = ec._TargetAggregate_sum(ctx, field, obj) + case "min": + out.Values[i] = ec._TargetAggregate_min(ctx, field, obj) + case "max": + out.Values[i] = ec._TargetAggregate_max(ctx, field, obj) + case "avg": + out.Values[i] = ec._TargetAggregate_avg(ctx, field, obj) + case "pass": + out.Values[i] = ec._TargetAggregate_pass(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -48366,6 +50958,8 @@ func (ec *executionContext) _TestCollection(ctx context.Context, sel ast.Selecti out.Values[i] = ec._TestCollection_cachedLocally(ctx, field, obj) case "cachedRemotely": out.Values[i] = ec._TestCollection_cachedRemotely(ctx, field, obj) + case "firstSeen": + out.Values[i] = ec._TestCollection_firstSeen(ctx, field, obj) case "durationMs": out.Values[i] = ec._TestCollection_durationMs(ctx, field, obj) case "bazelInvocation": @@ -48434,7 +51028,207 @@ func (ec *executionContext) _TestCollection(ctx context.Context, sel ast.Selecti } out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) - case "testResults": + case "testResults": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TestCollection_testResults(ctx, field, obj) + return res + } + + if field.Deferrable != nil { + dfs, ok := deferred[field.Deferrable.Label] + di := 0 + if ok { + dfs.AddField(field) + di = len(dfs.Values) - 1 + } else { + dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) + deferred[field.Deferrable.Label] = dfs + } + dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { + return innerFunc(ctx, dfs) + }) + + // don't run the out.Concurrently() call below + out.Values[i] = graphql.Null + continue + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var testCollectionConnectionImplementors = []string{"TestCollectionConnection"} + +func (ec *executionContext) _TestCollectionConnection(ctx context.Context, sel ast.SelectionSet, obj *ent.TestCollectionConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testCollectionConnectionImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TestCollectionConnection") + case "edges": + out.Values[i] = ec._TestCollectionConnection_edges(ctx, field, obj) + case "pageInfo": + out.Values[i] = ec._TestCollectionConnection_pageInfo(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "totalCount": + out.Values[i] = ec._TestCollectionConnection_totalCount(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var testCollectionEdgeImplementors = []string{"TestCollectionEdge"} + +func (ec *executionContext) _TestCollectionEdge(ctx context.Context, sel ast.SelectionSet, obj *ent.TestCollectionEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testCollectionEdgeImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TestCollectionEdge") + case "node": + out.Values[i] = ec._TestCollectionEdge_node(ctx, field, obj) + case "cursor": + out.Values[i] = ec._TestCollectionEdge_cursor(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var testFileImplementors = []string{"TestFile", "Node"} + +func (ec *executionContext) _TestFile(ctx context.Context, sel ast.SelectionSet, obj *ent.TestFile) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testFileImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TestFile") + case "id": + field := field + + innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TestFile_id(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&fs.Invalids, 1) + } + return res + } + + if field.Deferrable != nil { + dfs, ok := deferred[field.Deferrable.Label] + di := 0 + if ok { + dfs.AddField(field) + di = len(dfs.Values) - 1 + } else { + dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) + deferred[field.Deferrable.Label] = dfs + } + dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { + return innerFunc(ctx, dfs) + }) + + // don't run the out.Concurrently() call below + out.Values[i] = graphql.Null + continue + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + case "digest": + out.Values[i] = ec._TestFile_digest(ctx, field, obj) + case "file": + out.Values[i] = ec._TestFile_file(ctx, field, obj) + case "length": + out.Values[i] = ec._TestFile_length(ctx, field, obj) + case "name": + out.Values[i] = ec._TestFile_name(ctx, field, obj) + case "prefix": + out.Values[i] = ec._TestFile_prefix(ctx, field, obj) + case "testResult": field := field innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { @@ -48443,7 +51237,7 @@ func (ec *executionContext) _TestCollection(ctx context.Context, sel ast.Selecti ec.Error(ctx, ec.Recover(ctx, r)) } }() - res = ec._TestCollection_testResults(ctx, field, obj) + res = ec._TestFile_testResult(ctx, field, obj) return res } @@ -48490,96 +51284,109 @@ func (ec *executionContext) _TestCollection(ctx context.Context, sel ast.Selecti return out } -var testFileImplementors = []string{"TestFile", "Node"} +var testGridCellImplementors = []string{"TestGridCell"} -func (ec *executionContext) _TestFile(ctx context.Context, sel ast.SelectionSet, obj *ent.TestFile) graphql.Marshaler { - fields := graphql.CollectFields(ec.OperationContext, sel, testFileImplementors) +func (ec *executionContext) _TestGridCell(ctx context.Context, sel ast.SelectionSet, obj *model.TestGridCell) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testGridCellImplementors) out := graphql.NewFieldSet(fields) deferred := make(map[string]*graphql.FieldSet) for i, field := range fields { switch field.Name { case "__typename": - out.Values[i] = graphql.MarshalString("TestFile") - case "id": - field := field + out.Values[i] = graphql.MarshalString("TestGridCell") + case "invocationId": + out.Values[i] = ec._TestGridCell_invocationId(ctx, field, obj) + case "status": + out.Values[i] = ec._TestGridCell_status(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } - innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._TestFile_id(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&fs.Invalids, 1) - } - return res - } + atomic.AddInt32(&ec.deferred, int32(len(deferred))) - if field.Deferrable != nil { - dfs, ok := deferred[field.Deferrable.Label] - di := 0 - if ok { - dfs.AddField(field) - di = len(dfs.Values) - 1 - } else { - dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) - deferred[field.Deferrable.Label] = dfs - } - dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { - return innerFunc(ctx, dfs) - }) + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } - // don't run the out.Concurrently() call below - out.Values[i] = graphql.Null - continue - } + return out +} - out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) - case "digest": - out.Values[i] = ec._TestFile_digest(ctx, field, obj) - case "file": - out.Values[i] = ec._TestFile_file(ctx, field, obj) - case "length": - out.Values[i] = ec._TestFile_length(ctx, field, obj) - case "name": - out.Values[i] = ec._TestFile_name(ctx, field, obj) - case "prefix": - out.Values[i] = ec._TestFile_prefix(ctx, field, obj) - case "testResult": - field := field +var testGridResultImplementors = []string{"TestGridResult"} - innerFunc := func(ctx context.Context, fs *graphql.FieldSet) (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._TestFile_testResult(ctx, field, obj) - return res - } +func (ec *executionContext) _TestGridResult(ctx context.Context, sel ast.SelectionSet, obj *model.TestGridResult) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testGridResultImplementors) - if field.Deferrable != nil { - dfs, ok := deferred[field.Deferrable.Label] - di := 0 - if ok { - dfs.AddField(field) - di = len(dfs.Values) - 1 - } else { - dfs = graphql.NewFieldSet([]graphql.CollectedField{field}) - deferred[field.Deferrable.Label] = dfs - } - dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler { - return innerFunc(ctx, dfs) - }) + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TestGridResult") + case "total": + out.Values[i] = ec._TestGridResult_total(ctx, field, obj) + case "result": + out.Values[i] = ec._TestGridResult_result(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } - // don't run the out.Concurrently() call below - out.Values[i] = graphql.Null - continue - } + atomic.AddInt32(&ec.deferred, int32(len(deferred))) - out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var testGridRowImplementors = []string{"TestGridRow"} + +func (ec *executionContext) _TestGridRow(ctx context.Context, sel ast.SelectionSet, obj *model.TestGridRow) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, testGridRowImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TestGridRow") + case "label": + out.Values[i] = ec._TestGridRow_label(ctx, field, obj) + case "count": + out.Values[i] = ec._TestGridRow_count(ctx, field, obj) + case "sum": + out.Values[i] = ec._TestGridRow_sum(ctx, field, obj) + case "min": + out.Values[i] = ec._TestGridRow_min(ctx, field, obj) + case "max": + out.Values[i] = ec._TestGridRow_max(ctx, field, obj) + case "avg": + out.Values[i] = ec._TestGridRow_avg(ctx, field, obj) + case "passRate": + out.Values[i] = ec._TestGridRow_passRate(ctx, field, obj) + case "cells": + out.Values[i] = ec._TestGridRow_cells(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -49896,16 +52703,6 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o // region ***************************** type.gotpl ***************************** -func (ec *executionContext) marshalNActionCacheStatistics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatistics(ctx context.Context, sel ast.SelectionSet, v *ent.ActionCacheStatistics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._ActionCacheStatistics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNActionCacheStatisticsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatisticsWhereInput(ctx context.Context, v interface{}) (*ent.ActionCacheStatisticsWhereInput, error) { res, err := ec.unmarshalInputActionCacheStatisticsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -49936,31 +52733,11 @@ func (ec *executionContext) marshalNActionOutputStatus2githubᚗcomᚋbuildbarn return v } -func (ec *executionContext) marshalNActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx context.Context, sel ast.SelectionSet, v *ent.ActionSummary) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._ActionSummary(ctx, sel, v) -} - func (ec *executionContext) unmarshalNActionSummaryWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInput(ctx context.Context, v interface{}) (*ent.ActionSummaryWhereInput, error) { res, err := ec.unmarshalInputActionSummaryWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNArtifactMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.ArtifactMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._ArtifactMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNArtifactMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetricsWhereInput(ctx context.Context, v interface{}) (*ent.ArtifactMetricsWhereInput, error) { res, err := ec.unmarshalInputArtifactMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50008,6 +52785,22 @@ func (ec *executionContext) marshalNBazelInvocationConnection2ᚖgithubᚗcomᚋ return ec._BazelInvocationConnection(ctx, sel, v) } +func (ec *executionContext) unmarshalNBazelInvocationOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationOrderField(ctx context.Context, v interface{}) (*ent.BazelInvocationOrderField, error) { + var res = new(ent.BazelInvocationOrderField) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNBazelInvocationOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationOrderField(ctx context.Context, sel ast.SelectionSet, v *ent.BazelInvocationOrderField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return v +} + func (ec *executionContext) unmarshalNBazelInvocationProblemWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationProblemWhereInput(ctx context.Context, v interface{}) (*ent.BazelInvocationProblemWhereInput, error) { res, err := ec.unmarshalInputBazelInvocationProblemWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50090,16 +52883,6 @@ func (ec *executionContext) marshalNBuildConnection2ᚖgithubᚗcomᚋbuildbarn return ec._BuildConnection(ctx, sel, v) } -func (ec *executionContext) marshalNBuildGraphMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.BuildGraphMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._BuildGraphMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNBuildGraphMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetricsWhereInput(ctx context.Context, v interface{}) (*ent.BuildGraphMetricsWhereInput, error) { res, err := ec.unmarshalInputBuildGraphMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50110,16 +52893,6 @@ func (ec *executionContext) unmarshalNBuildWhereInput2ᚖgithubᚗcomᚋbuildbar return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNCumulativeMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.CumulativeMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._CumulativeMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCumulativeMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetricsWhereInput(ctx context.Context, v interface{}) (*ent.CumulativeMetricsWhereInput, error) { res, err := ec.unmarshalInputCumulativeMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50135,16 +52908,6 @@ func (ec *executionContext) marshalNCursor2entgoᚗioᚋcontribᚋentgqlᚐCurso return v } -func (ec *executionContext) marshalNDynamicExecutionMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.DynamicExecutionMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._DynamicExecutionMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNDynamicExecutionMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetricsWhereInput(ctx context.Context, v interface{}) (*ent.DynamicExecutionMetricsWhereInput, error) { res, err := ec.unmarshalInputDynamicExecutionMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50204,16 +52967,6 @@ func (ec *executionContext) marshalNEnvVar2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑpo return ec._EnvVar(ctx, sel, v) } -func (ec *executionContext) marshalNEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx context.Context, sel ast.SelectionSet, v *ent.EvaluationStat) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._EvaluationStat(ctx, sel, v) -} - func (ec *executionContext) unmarshalNEvaluationStatWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatWhereInput(ctx context.Context, v interface{}) (*ent.EvaluationStatWhereInput, error) { res, err := ec.unmarshalInputEvaluationStatWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50234,31 +52987,11 @@ func (ec *executionContext) unmarshalNEventFileWhereInput2ᚖgithubᚗcomᚋbuil return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNExectionInfo2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfo(ctx context.Context, sel ast.SelectionSet, v *ent.ExectionInfo) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._ExectionInfo(ctx, sel, v) -} - func (ec *executionContext) unmarshalNExectionInfoWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfoWhereInput(ctx context.Context, v interface{}) (*ent.ExectionInfoWhereInput, error) { res, err := ec.unmarshalInputExectionInfoWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx context.Context, sel ast.SelectionSet, v *ent.FilesMetric) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._FilesMetric(ctx, sel, v) -} - func (ec *executionContext) unmarshalNFilesMetricWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricWhereInput(ctx context.Context, v interface{}) (*ent.FilesMetricWhereInput, error) { res, err := ec.unmarshalInputFilesMetricWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50386,31 +53119,11 @@ func (ec *executionContext) marshalNInt2uint64(ctx context.Context, sel ast.Sele return res } -func (ec *executionContext) marshalNMemoryMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.MemoryMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._MemoryMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNMemoryMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetricsWhereInput(ctx context.Context, v interface{}) (*ent.MemoryMetricsWhereInput, error) { res, err := ec.unmarshalInputMemoryMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.Metrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._Metrics(ctx, sel, v) -} - func (ec *executionContext) marshalNMetricsConnection2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsConnection(ctx context.Context, sel ast.SelectionSet, v ent.MetricsConnection) graphql.Marshaler { return ec._MetricsConnection(ctx, sel, &v) } @@ -50514,16 +53227,6 @@ func (ec *executionContext) unmarshalNNamedSetOfFilesWhereInput2ᚖgithubᚗcom return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNNetworkMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.NetworkMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._NetworkMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNNetworkMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInput(ctx context.Context, v interface{}) (*ent.NetworkMetricsWhereInput, error) { res, err := ec.unmarshalInputNetworkMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50567,14 +53270,14 @@ func (ec *executionContext) marshalNNode2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑport return ret } -func (ec *executionContext) marshalNOutputGroup2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroup(ctx context.Context, sel ast.SelectionSet, v *ent.OutputGroup) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._OutputGroup(ctx, sel, v) +func (ec *executionContext) unmarshalNOrderDirection2entgoᚗioᚋcontribᚋentgqlᚐOrderDirection(ctx context.Context, v interface{}) (entgql.OrderDirection, error) { + var res entgql.OrderDirection + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNOrderDirection2entgoᚗioᚋcontribᚋentgqlᚐOrderDirection(ctx context.Context, sel ast.SelectionSet, v entgql.OrderDirection) graphql.Marshaler { + return v } func (ec *executionContext) unmarshalNOutputGroupWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroupWhereInput(ctx context.Context, v interface{}) (*ent.OutputGroupWhereInput, error) { @@ -50597,16 +53300,6 @@ func (ec *executionContext) unmarshalNPackageLoadMetricsWhereInput2ᚖgithubᚗc return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNPackageMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.PackageMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._PackageMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNPackageMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetricsWhereInput(ctx context.Context, v interface{}) (*ent.PackageMetricsWhereInput, error) { res, err := ec.unmarshalInputPackageMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50744,31 +53437,11 @@ func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.S return res } -func (ec *executionContext) marshalNSystemNetworkStats2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStats(ctx context.Context, sel ast.SelectionSet, v *ent.SystemNetworkStats) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._SystemNetworkStats(ctx, sel, v) -} - func (ec *executionContext) unmarshalNSystemNetworkStatsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInput(ctx context.Context, v interface{}) (*ent.SystemNetworkStatsWhereInput, error) { res, err := ec.unmarshalInputSystemNetworkStatsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNTargetComplete2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetComplete(ctx context.Context, sel ast.SelectionSet, v *ent.TargetComplete) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._TargetComplete(ctx, sel, v) -} - func (ec *executionContext) unmarshalNTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, v interface{}) (targetcomplete.TestSize, error) { var res targetcomplete.TestSize err := res.UnmarshalGQL(v) @@ -50799,16 +53472,6 @@ func (ec *executionContext) unmarshalNTargetConfiguredWhereInput2ᚖgithubᚗcom return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNTargetMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.TargetMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._TargetMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNTargetMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInput(ctx context.Context, v interface{}) (*ent.TargetMetricsWhereInput, error) { res, err := ec.unmarshalInputTargetMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -50859,6 +53522,36 @@ func (ec *executionContext) marshalNTestCollection2ᚖgithubᚗcomᚋbuildbarn return ec._TestCollection(ctx, sel, v) } +func (ec *executionContext) marshalNTestCollectionConnection2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionConnection(ctx context.Context, sel ast.SelectionSet, v ent.TestCollectionConnection) graphql.Marshaler { + return ec._TestCollectionConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNTestCollectionConnection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionConnection(ctx context.Context, sel ast.SelectionSet, v *ent.TestCollectionConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec._TestCollectionConnection(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNTestCollectionOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionOrderField(ctx context.Context, v interface{}) (*ent.TestCollectionOrderField, error) { + var res = new(ent.TestCollectionOrderField) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNTestCollectionOrderField2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionOrderField(ctx context.Context, sel ast.SelectionSet, v *ent.TestCollectionOrderField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return v +} + func (ec *executionContext) unmarshalNTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, v interface{}) (testcollection.OverallStatus, error) { var res testcollection.OverallStatus err := res.UnmarshalGQL(v) @@ -50998,16 +53691,6 @@ func (ec *executionContext) marshalNTime2timeᚐTime(ctx context.Context, sel as return res } -func (ec *executionContext) marshalNTimingBreakdown2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdown(ctx context.Context, sel ast.SelectionSet, v *ent.TimingBreakdown) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._TimingBreakdown(ctx, sel, v) -} - func (ec *executionContext) unmarshalNTimingBreakdownWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdownWhereInput(ctx context.Context, v interface{}) (*ent.TimingBreakdownWhereInput, error) { res, err := ec.unmarshalInputTimingBreakdownWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -51028,16 +53711,6 @@ func (ec *executionContext) unmarshalNTimingChildWhereInput2ᚖgithubᚗcomᚋbu return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNTimingMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.TimingMetrics) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._TimingMetrics(ctx, sel, v) -} - func (ec *executionContext) unmarshalNTimingMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetricsWhereInput(ctx context.Context, v interface{}) (*ent.TimingMetricsWhereInput, error) { res, err := ec.unmarshalInputTimingMetricsWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) @@ -51311,51 +53984,11 @@ func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel a return res } -func (ec *executionContext) marshalOActionCacheStatistics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatisticsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.ActionCacheStatistics) graphql.Marshaler { +func (ec *executionContext) marshalOActionCacheStatistics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatistics(ctx context.Context, sel ast.SelectionSet, v *ent.ActionCacheStatistics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNActionCacheStatistics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatistics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._ActionCacheStatistics(ctx, sel, v) } func (ec *executionContext) unmarshalOActionCacheStatisticsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionCacheStatisticsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ActionCacheStatisticsWhereInput, error) { @@ -51413,157 +54046,7 @@ func (ec *executionContext) marshalOActionData2ᚕᚖgithubᚗcomᚋbuildbarnᚋ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNActionData2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionData(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) unmarshalOActionDataWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ActionDataWhereInput, error) { - if v == nil { - return nil, nil - } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) - } - var err error - res := make([]*ent.ActionDataWhereInput, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNActionDataWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInput(ctx, vSlice[i]) - if err != nil { - return nil, err - } - } - return res, nil -} - -func (ec *executionContext) unmarshalOActionDataWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInput(ctx context.Context, v interface{}) (*ent.ActionDataWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputActionDataWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOActionSummary2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.ActionSummary) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) unmarshalOActionSummaryWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ActionSummaryWhereInput, error) { - if v == nil { - return nil, nil - } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) - } - var err error - res := make([]*ent.ActionSummaryWhereInput, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNActionSummaryWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInput(ctx, vSlice[i]) - if err != nil { - return nil, err - } - } - return res, nil -} - -func (ec *executionContext) unmarshalOActionSummaryWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInput(ctx context.Context, v interface{}) (*ent.ActionSummaryWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputActionSummaryWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOArtifactMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.ArtifactMetrics) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNArtifactMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetrics(ctx, sel, v[i]) + ret[i] = ec.marshalNActionData2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionData(ctx, sel, v[i]) } if isLen1 { f(i) @@ -51583,6 +54066,76 @@ func (ec *executionContext) marshalOArtifactMetrics2ᚕᚖgithubᚗcomᚋbuildba return ret } +func (ec *executionContext) unmarshalOActionDataWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ActionDataWhereInput, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*ent.ActionDataWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNActionDataWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalOActionDataWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionDataWhereInput(ctx context.Context, v interface{}) (*ent.ActionDataWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputActionDataWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOActionSummary2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummary(ctx context.Context, sel ast.SelectionSet, v *ent.ActionSummary) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ActionSummary(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOActionSummaryWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ActionSummaryWhereInput, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*ent.ActionSummaryWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNActionSummaryWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalOActionSummaryWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐActionSummaryWhereInput(ctx context.Context, v interface{}) (*ent.ActionSummaryWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputActionSummaryWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOArtifactMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.ArtifactMetrics) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ArtifactMetrics(ctx, sel, v) +} + func (ec *executionContext) unmarshalOArtifactMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐArtifactMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.ArtifactMetricsWhereInput, error) { if v == nil { return nil, nil @@ -51713,6 +54266,14 @@ func (ec *executionContext) marshalOBazelInvocationEdge2ᚖgithubᚗcomᚋbuildb return ec._BazelInvocationEdge(ctx, sel, v) } +func (ec *executionContext) unmarshalOBazelInvocationOrder2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationOrder(ctx context.Context, v interface{}) (*ent.BazelInvocationOrder, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputBazelInvocationOrder(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalOBazelInvocationProblemWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBazelInvocationProblemWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.BazelInvocationProblemWhereInput, error) { if v == nil { return nil, nil @@ -51968,51 +54529,11 @@ func (ec *executionContext) marshalOBuildEdge2ᚖgithubᚗcomᚋbuildbarnᚋbb return ec._BuildEdge(ctx, sel, v) } -func (ec *executionContext) marshalOBuildGraphMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.BuildGraphMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOBuildGraphMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.BuildGraphMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNBuildGraphMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._BuildGraphMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalOBuildGraphMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐBuildGraphMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.BuildGraphMetricsWhereInput, error) { @@ -52071,51 +54592,11 @@ func (ec *executionContext) unmarshalOBuildWhereInput2ᚖgithubᚗcomᚋbuildbar return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOCumulativeMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.CumulativeMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOCumulativeMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.CumulativeMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNCumulativeMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._CumulativeMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalOCumulativeMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐCumulativeMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.CumulativeMetricsWhereInput, error) { @@ -52162,51 +54643,11 @@ func (ec *executionContext) marshalOCursor2ᚖentgoᚗioᚋcontribᚋentgqlᚐCu return v } -func (ec *executionContext) marshalODynamicExecutionMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.DynamicExecutionMetrics) graphql.Marshaler { +func (ec *executionContext) marshalODynamicExecutionMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.DynamicExecutionMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNDynamicExecutionMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._DynamicExecutionMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalODynamicExecutionMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐDynamicExecutionMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.DynamicExecutionMetricsWhereInput, error) { @@ -52237,51 +54678,11 @@ func (ec *executionContext) unmarshalODynamicExecutionMetricsWhereInput2ᚖgithu return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOEvaluationStat2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.EvaluationStat) graphql.Marshaler { +func (ec *executionContext) marshalOEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx context.Context, sel ast.SelectionSet, v *ent.EvaluationStat) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNEvaluationStat2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStat(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._EvaluationStat(ctx, sel, v) } func (ec *executionContext) unmarshalOEvaluationStatWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐEvaluationStatWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.EvaluationStatWhereInput, error) { @@ -52340,53 +54741,6 @@ func (ec *executionContext) unmarshalOEventFileWhereInput2ᚖgithubᚗcomᚋbuil return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOExectionInfo2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfoᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.ExectionInfo) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNExectionInfo2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfo(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - func (ec *executionContext) marshalOExectionInfo2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐExectionInfo(ctx context.Context, sel ast.SelectionSet, v *ent.ExectionInfo) graphql.Marshaler { if v == nil { return graphql.Null @@ -52429,51 +54783,11 @@ func (ec *executionContext) marshalOExitCode2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑ return ec._ExitCode(ctx, sel, v) } -func (ec *executionContext) marshalOFilesMetric2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.FilesMetric) graphql.Marshaler { +func (ec *executionContext) marshalOFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx context.Context, sel ast.SelectionSet, v *ent.FilesMetric) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNFilesMetric2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetric(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._FilesMetric(ctx, sel, v) } func (ec *executionContext) unmarshalOFilesMetricWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐFilesMetricWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.FilesMetricWhereInput, error) { @@ -52504,6 +54818,22 @@ func (ec *executionContext) unmarshalOFilesMetricWhereInput2ᚖgithubᚗcomᚋbu return &res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalOFloat2ᚖfloat64(ctx context.Context, v interface{}) (*float64, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalFloatContext(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOFloat2ᚖfloat64(ctx context.Context, sel ast.SelectionSet, v *float64) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalFloatContext(*v) + return graphql.WrapContextMarshaler(ctx, res) +} + func (ec *executionContext) marshalOGarbageMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐGarbageMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.GarbageMetrics) graphql.Marshaler { if v == nil { return graphql.Null @@ -52889,51 +55219,11 @@ func (ec *executionContext) marshalOInt2ᚖuint64(ctx context.Context, sel ast.S return res } -func (ec *executionContext) marshalOMemoryMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.MemoryMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOMemoryMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.MemoryMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNMemoryMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._MemoryMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalOMemoryMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMemoryMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.MemoryMetricsWhereInput, error) { @@ -52964,53 +55254,6 @@ func (ec *executionContext) unmarshalOMemoryMetricsWhereInput2ᚖgithubᚗcomᚋ return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.Metrics) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - func (ec *executionContext) marshalOMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.Metrics) graphql.Marshaler { if v == nil { return graphql.Null @@ -53198,133 +55441,7 @@ func (ec *executionContext) marshalOMissDetailReason2ᚕgithubᚗcomᚋbuildbarn if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNMissDetailReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) unmarshalOMissDetailReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx context.Context, v interface{}) (*missdetail.Reason, error) { - if v == nil { - return nil, nil - } - var res = new(missdetail.Reason) - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOMissDetailReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx context.Context, sel ast.SelectionSet, v *missdetail.Reason) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return v -} - -func (ec *executionContext) unmarshalOMissDetailWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.MissDetailWhereInput, error) { - if v == nil { - return nil, nil - } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) - } - var err error - res := make([]*ent.MissDetailWhereInput, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNMissDetailWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInput(ctx, vSlice[i]) - if err != nil { - return nil, err - } - } - return res, nil -} - -func (ec *executionContext) unmarshalOMissDetailWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInput(ctx context.Context, v interface{}) (*ent.MissDetailWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputMissDetailWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalONamedSetOfFiles2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFiles(ctx context.Context, sel ast.SelectionSet, v *ent.NamedSetOfFiles) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return ec._NamedSetOfFiles(ctx, sel, v) -} - -func (ec *executionContext) unmarshalONamedSetOfFilesWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.NamedSetOfFilesWhereInput, error) { - if v == nil { - return nil, nil - } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) - } - var err error - res := make([]*ent.NamedSetOfFilesWhereInput, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNNamedSetOfFilesWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInput(ctx, vSlice[i]) - if err != nil { - return nil, err - } - } - return res, nil -} - -func (ec *executionContext) unmarshalONamedSetOfFilesWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInput(ctx context.Context, v interface{}) (*ent.NamedSetOfFilesWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputNamedSetOfFilesWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalONetworkMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.NetworkMetrics) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNNetworkMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetrics(ctx, sel, v[i]) + ret[i] = ec.marshalNMissDetailReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx, sel, v[i]) } if isLen1 { f(i) @@ -53344,14 +55461,23 @@ func (ec *executionContext) marshalONetworkMetrics2ᚕᚖgithubᚗcomᚋbuildbar return ret } -func (ec *executionContext) marshalONetworkMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.NetworkMetrics) graphql.Marshaler { +func (ec *executionContext) unmarshalOMissDetailReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx context.Context, v interface{}) (*missdetail.Reason, error) { + if v == nil { + return nil, nil + } + var res = new(missdetail.Reason) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOMissDetailReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋmissdetailᚐReason(ctx context.Context, sel ast.SelectionSet, v *missdetail.Reason) graphql.Marshaler { if v == nil { return graphql.Null } - return ec._NetworkMetrics(ctx, sel, v) + return v } -func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.NetworkMetricsWhereInput, error) { +func (ec *executionContext) unmarshalOMissDetailWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.MissDetailWhereInput, error) { if v == nil { return nil, nil } @@ -53360,10 +55486,10 @@ func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚕᚖgithubᚗco vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.NetworkMetricsWhereInput, len(vSlice)) + res := make([]*ent.MissDetailWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNNetworkMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNMissDetailWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -53371,66 +55497,89 @@ func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚕᚖgithubᚗco return res, nil } -func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInput(ctx context.Context, v interface{}) (*ent.NetworkMetricsWhereInput, error) { +func (ec *executionContext) unmarshalOMissDetailWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐMissDetailWhereInput(ctx context.Context, v interface{}) (*ent.MissDetailWhereInput, error) { if v == nil { return nil, nil } - res, err := ec.unmarshalInputNetworkMetricsWhereInput(ctx, v) + res, err := ec.unmarshalInputMissDetailWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalONode2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNoder(ctx context.Context, sel ast.SelectionSet, v ent.Noder) graphql.Marshaler { +func (ec *executionContext) marshalONamedSetOfFiles2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFiles(ctx context.Context, sel ast.SelectionSet, v *ent.NamedSetOfFiles) graphql.Marshaler { if v == nil { return graphql.Null } - return ec._Node(ctx, sel, v) + return ec._NamedSetOfFiles(ctx, sel, v) } -func (ec *executionContext) marshalOOutputGroup2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroupᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.OutputGroup) graphql.Marshaler { +func (ec *executionContext) unmarshalONamedSetOfFilesWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.NamedSetOfFilesWhereInput, error) { if v == nil { - return graphql.Null + return nil, nil } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNOutputGroup2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroup(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) + var err error + res := make([]*ent.NamedSetOfFilesWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNNamedSetOfFilesWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err } + } + return res, nil +} +func (ec *executionContext) unmarshalONamedSetOfFilesWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNamedSetOfFilesWhereInput(ctx context.Context, v interface{}) (*ent.NamedSetOfFilesWhereInput, error) { + if v == nil { + return nil, nil } - wg.Wait() + res, err := ec.unmarshalInputNamedSetOfFilesWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} - for _, e := range ret { - if e == graphql.Null { - return graphql.Null +func (ec *executionContext) marshalONetworkMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.NetworkMetrics) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._NetworkMetrics(ctx, sel, v) +} + +func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.NetworkMetricsWhereInput, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*ent.NetworkMetricsWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNNetworkMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err } } + return res, nil +} - return ret +func (ec *executionContext) unmarshalONetworkMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNetworkMetricsWhereInput(ctx context.Context, v interface{}) (*ent.NetworkMetricsWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputNetworkMetricsWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalONode2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐNoder(ctx context.Context, sel ast.SelectionSet, v ent.Noder) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Node(ctx, sel, v) } func (ec *executionContext) marshalOOutputGroup2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐOutputGroup(ctx context.Context, sel ast.SelectionSet, v *ent.OutputGroup) graphql.Marshaler { @@ -53543,51 +55692,11 @@ func (ec *executionContext) unmarshalOPackageLoadMetricsWhereInput2ᚖgithubᚗc return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOPackageMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.PackageMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOPackageMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.PackageMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNPackageMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._PackageMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalOPackageMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐPackageMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.PackageMetricsWhereInput, error) { @@ -53953,6 +56062,38 @@ func (ec *executionContext) marshalOString2ᚕstringᚄ(ctx context.Context, sel return ret } +func (ec *executionContext) unmarshalOString2ᚕᚖstring(ctx context.Context, v interface{}) ([]*string, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalOString2ᚖstring(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalOString2ᚕᚖstring(ctx context.Context, sel ast.SelectionSet, v []*string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalOString2ᚖstring(ctx, sel, v[i]) + } + + return ret +} + func (ec *executionContext) unmarshalOString2ᚖstring(ctx context.Context, v interface{}) (*string, error) { if v == nil { return nil, nil @@ -53969,7 +56110,42 @@ func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel as return res } -func (ec *executionContext) marshalOSystemNetworkStats2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.SystemNetworkStats) graphql.Marshaler { +func (ec *executionContext) marshalOSystemNetworkStats2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStats(ctx context.Context, sel ast.SelectionSet, v *ent.SystemNetworkStats) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SystemNetworkStats(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.SystemNetworkStatsWhereInput, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*ent.SystemNetworkStatsWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNSystemNetworkStatsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInput(ctx context.Context, v interface{}) (*ent.SystemNetworkStatsWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputSystemNetworkStatsWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetAggregate2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx context.Context, sel ast.SelectionSet, v []*model.TargetAggregate) graphql.Marshaler { if v == nil { return graphql.Null } @@ -53996,7 +56172,7 @@ func (ec *executionContext) marshalOSystemNetworkStats2ᚕᚖgithubᚗcomᚋbuil if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNSystemNetworkStats2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStats(ctx, sel, v[i]) + ret[i] = ec.marshalOTargetAggregate2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54007,16 +56183,34 @@ func (ec *executionContext) marshalOSystemNetworkStats2ᚕᚖgithubᚗcomᚋbuil } wg.Wait() - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } + return ret +} + +func (ec *executionContext) marshalOTargetAggregate2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTargetAggregate(ctx context.Context, sel ast.SelectionSet, v *model.TargetAggregate) graphql.Marshaler { + if v == nil { + return graphql.Null } + return ec._TargetAggregate(ctx, sel, v) +} - return ret +func (ec *executionContext) marshalOTargetComplete2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetComplete(ctx context.Context, sel ast.SelectionSet, v *ent.TargetComplete) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TargetComplete(ctx, sel, v) } -func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.SystemNetworkStatsWhereInput, error) { +func (ec *executionContext) unmarshalOTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, v interface{}) (targetcomplete.TestSize, error) { + var res targetcomplete.TestSize + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetcomplete.TestSize) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetcomplete.TestSize, error) { if v == nil { return nil, nil } @@ -54025,10 +56219,10 @@ func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚕᚖgithub vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.SystemNetworkStatsWhereInput, len(vSlice)) + res := make([]targetcomplete.TestSize, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNSystemNetworkStatsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54036,15 +56230,7 @@ func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚕᚖgithub return res, nil } -func (ec *executionContext) unmarshalOSystemNetworkStatsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐSystemNetworkStatsWhereInput(ctx context.Context, v interface{}) (*ent.SystemNetworkStatsWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputSystemNetworkStatsWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetComplete2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TargetComplete) graphql.Marshaler { +func (ec *executionContext) marshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetcomplete.TestSize) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54071,7 +56257,7 @@ func (ec *executionContext) marshalOTargetComplete2ᚕᚖgithubᚗcomᚋbuildbar if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetComplete2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetComplete(ctx, sel, v[i]) + ret[i] = ec.marshalNTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54091,24 +56277,68 @@ func (ec *executionContext) marshalOTargetComplete2ᚕᚖgithubᚗcomᚋbuildbar return ret } -func (ec *executionContext) marshalOTargetComplete2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetComplete(ctx context.Context, sel ast.SelectionSet, v *ent.TargetComplete) graphql.Marshaler { +func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, v interface{}) (*targetcomplete.TestSize, error) { + if v == nil { + return nil, nil + } + var res = new(targetcomplete.TestSize) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetCompleteTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetcomplete.TestSize) graphql.Marshaler { if v == nil { return graphql.Null } - return ec._TargetComplete(ctx, sel, v) + return v } -func (ec *executionContext) unmarshalOTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, v interface{}) (targetcomplete.TestSize, error) { - var res targetcomplete.TestSize +func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetCompleteWhereInput, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]*ent.TargetCompleteWhereInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNTargetCompleteWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInput(ctx context.Context, v interface{}) (*ent.TargetCompleteWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputTargetCompleteWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetConfigured2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfigured(ctx context.Context, sel ast.SelectionSet, v *ent.TargetConfigured) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TargetConfigured(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, v interface{}) (targetconfigured.TestSize, error) { + var res targetconfigured.TestSize err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetcomplete.TestSize) graphql.Marshaler { +func (ec *executionContext) marshalOTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetconfigured.TestSize) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetcomplete.TestSize, error) { +func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetconfigured.TestSize, error) { if v == nil { return nil, nil } @@ -54117,10 +56347,10 @@ func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋb vSlice = graphql.CoerceList(v) } var err error - res := make([]targetcomplete.TestSize, len(vSlice)) + res := make([]targetconfigured.TestSize, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54128,7 +56358,7 @@ func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋb return res, nil } -func (ec *executionContext) marshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetcomplete.TestSize) graphql.Marshaler { +func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetconfigured.TestSize) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54155,7 +56385,7 @@ func (ec *executionContext) marshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbui if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetCompleteTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx, sel, v[i]) + ret[i] = ec.marshalNTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54175,23 +56405,23 @@ func (ec *executionContext) marshalOTargetCompleteTestSize2ᚕgithubᚗcomᚋbui return ret } -func (ec *executionContext) unmarshalOTargetCompleteTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, v interface{}) (*targetcomplete.TestSize, error) { +func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, v interface{}) (*targetconfigured.TestSize, error) { if v == nil { return nil, nil } - var res = new(targetcomplete.TestSize) + var res = new(targetconfigured.TestSize) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetCompleteTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetcompleteᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetcomplete.TestSize) graphql.Marshaler { +func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetconfigured.TestSize) graphql.Marshaler { if v == nil { return graphql.Null } return v } -func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetCompleteWhereInput, error) { +func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetConfiguredWhereInput, error) { if v == nil { return nil, nil } @@ -54200,10 +56430,10 @@ func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚕᚖgithubᚗco vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.TargetCompleteWhereInput, len(vSlice)) + res := make([]*ent.TargetConfiguredWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetCompleteWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetConfiguredWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54211,32 +56441,22 @@ func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚕᚖgithubᚗco return res, nil } -func (ec *executionContext) unmarshalOTargetCompleteWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetCompleteWhereInput(ctx context.Context, v interface{}) (*ent.TargetCompleteWhereInput, error) { +func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInput(ctx context.Context, v interface{}) (*ent.TargetConfiguredWhereInput, error) { if v == nil { return nil, nil } - res, err := ec.unmarshalInputTargetCompleteWhereInput(ctx, v) + res, err := ec.unmarshalInputTargetConfiguredWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetConfigured2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfigured(ctx context.Context, sel ast.SelectionSet, v *ent.TargetConfigured) graphql.Marshaler { +func (ec *executionContext) marshalOTargetMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.TargetMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - return ec._TargetConfigured(ctx, sel, v) -} - -func (ec *executionContext) unmarshalOTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, v interface{}) (targetconfigured.TestSize, error) { - var res targetconfigured.TestSize - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetconfigured.TestSize) graphql.Marshaler { - return v + return ec._TargetMetrics(ctx, sel, v) } -func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetconfigured.TestSize, error) { +func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetMetricsWhereInput, error) { if v == nil { return nil, nil } @@ -54245,10 +56465,10 @@ func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚕgithubᚗcom vSlice = graphql.CoerceList(v) } var err error - res := make([]targetconfigured.TestSize, len(vSlice)) + res := make([]*ent.TargetMetricsWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54256,7 +56476,15 @@ func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚕgithubᚗcom return res, nil } -func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetconfigured.TestSize) graphql.Marshaler { +func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInput(ctx context.Context, v interface{}) (*ent.TargetMetricsWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputTargetMetricsWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TargetPair) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54283,7 +56511,7 @@ func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋb if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetConfiguredTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx, sel, v[i]) + ret[i] = ec.marshalNTargetPair2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPair(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54303,23 +56531,24 @@ func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚕgithubᚗcomᚋb return ret } -func (ec *executionContext) unmarshalOTargetConfiguredTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, v interface{}) (*targetconfigured.TestSize, error) { +func (ec *executionContext) marshalOTargetPair2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPair(ctx context.Context, sel ast.SelectionSet, v *ent.TargetPair) graphql.Marshaler { if v == nil { - return nil, nil + return graphql.Null } - var res = new(targetconfigured.TestSize) + return ec._TargetPair(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, v interface{}) (targetpair.AbortReason, error) { + var res targetpair.AbortReason err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetConfiguredTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetconfiguredᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetconfigured.TestSize) graphql.Marshaler { - if v == nil { - return graphql.Null - } +func (ec *executionContext) marshalOTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, sel ast.SelectionSet, v targetpair.AbortReason) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetConfiguredWhereInput, error) { +func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReasonᚄ(ctx context.Context, v interface{}) ([]targetpair.AbortReason, error) { if v == nil { return nil, nil } @@ -54328,10 +56557,10 @@ func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚕᚖgithubᚗ vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.TargetConfiguredWhereInput, len(vSlice)) + res := make([]targetpair.AbortReason, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetConfiguredWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54339,15 +56568,7 @@ func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚕᚖgithubᚗ return res, nil } -func (ec *executionContext) unmarshalOTargetConfiguredWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetConfiguredWhereInput(ctx context.Context, v interface{}) (*ent.TargetConfiguredWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputTargetConfiguredWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TargetMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReasonᚄ(ctx context.Context, sel ast.SelectionSet, v []targetpair.AbortReason) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54374,7 +56595,7 @@ func (ec *executionContext) marshalOTargetMetrics2ᚕᚖgithubᚗcomᚋbuildbarn if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetrics(ctx, sel, v[i]) + ret[i] = ec.marshalNTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54394,7 +56615,33 @@ func (ec *executionContext) marshalOTargetMetrics2ᚕᚖgithubᚗcomᚋbuildbarn return ret } -func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetMetricsWhereInput, error) { +func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, v interface{}) (*targetpair.AbortReason, error) { + if v == nil { + return nil, nil + } + var res = new(targetpair.AbortReason) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetPairAbortReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, sel ast.SelectionSet, v *targetpair.AbortReason) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, v interface{}) (targetpair.TestSize, error) { + var res targetpair.TestSize + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetpair.TestSize) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetpair.TestSize, error) { if v == nil { return nil, nil } @@ -54403,10 +56650,10 @@ func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚕᚖgithubᚗcom vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.TargetMetricsWhereInput, len(vSlice)) + res := make([]targetpair.TestSize, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54414,15 +56661,7 @@ func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚕᚖgithubᚗcom return res, nil } -func (ec *executionContext) unmarshalOTargetMetricsWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetMetricsWhereInput(ctx context.Context, v interface{}) (*ent.TargetMetricsWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputTargetMetricsWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TargetPair) graphql.Marshaler { +func (ec *executionContext) marshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetpair.TestSize) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54449,7 +56688,7 @@ func (ec *executionContext) marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetPair2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPair(ctx, sel, v[i]) + ret[i] = ec.marshalNTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54469,17 +56708,23 @@ func (ec *executionContext) marshalOTargetPair2ᚕᚖgithubᚗcomᚋbuildbarnᚋ return ret } -func (ec *executionContext) unmarshalOTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, v interface{}) (targetpair.AbortReason, error) { - var res targetpair.AbortReason +func (ec *executionContext) unmarshalOTargetPairTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, v interface{}) (*targetpair.TestSize, error) { + if v == nil { + return nil, nil + } + var res = new(targetpair.TestSize) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, sel ast.SelectionSet, v targetpair.AbortReason) graphql.Marshaler { +func (ec *executionContext) marshalOTargetPairTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetpair.TestSize) graphql.Marshaler { + if v == nil { + return graphql.Null + } return v } -func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReasonᚄ(ctx context.Context, v interface{}) ([]targetpair.AbortReason, error) { +func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetPairWhereInput, error) { if v == nil { return nil, nil } @@ -54488,10 +56733,10 @@ func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbu vSlice = graphql.CoerceList(v) } var err error - res := make([]targetpair.AbortReason, len(vSlice)) + res := make([]*ent.TargetPairWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTargetPairWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54499,7 +56744,15 @@ func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbu return res, nil } -func (ec *executionContext) marshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReasonᚄ(ctx context.Context, sel ast.SelectionSet, v []targetpair.AbortReason) graphql.Marshaler { +func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInput(ctx context.Context, v interface{}) (*ent.TargetPairWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputTargetPairWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTestCollection2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TestCollection) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54526,7 +56779,7 @@ func (ec *executionContext) marshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuil if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetPairAbortReason2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx, sel, v[i]) + ret[i] = ec.marshalNTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54546,53 +56799,14 @@ func (ec *executionContext) marshalOTargetPairAbortReason2ᚕgithubᚗcomᚋbuil return ret } -func (ec *executionContext) unmarshalOTargetPairAbortReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, v interface{}) (*targetpair.AbortReason, error) { - if v == nil { - return nil, nil - } - var res = new(targetpair.AbortReason) - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetPairAbortReason2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐAbortReason(ctx context.Context, sel ast.SelectionSet, v *targetpair.AbortReason) graphql.Marshaler { +func (ec *executionContext) marshalOTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx context.Context, sel ast.SelectionSet, v *ent.TestCollection) graphql.Marshaler { if v == nil { return graphql.Null } - return v -} - -func (ec *executionContext) unmarshalOTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, v interface{}) (targetpair.TestSize, error) { - var res targetpair.TestSize - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, sel ast.SelectionSet, v targetpair.TestSize) graphql.Marshaler { - return v -} - -func (ec *executionContext) unmarshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSizeᚄ(ctx context.Context, v interface{}) ([]targetpair.TestSize, error) { - if v == nil { - return nil, nil - } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) - } - var err error - res := make([]targetpair.TestSize, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx, vSlice[i]) - if err != nil { - return nil, err - } - } - return res, nil + return ec._TestCollection(ctx, sel, v) } -func (ec *executionContext) marshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSizeᚄ(ctx context.Context, sel ast.SelectionSet, v []targetpair.TestSize) graphql.Marshaler { +func (ec *executionContext) marshalOTestCollectionEdge2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionEdge(ctx context.Context, sel ast.SelectionSet, v []*ent.TestCollectionEdge) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54619,7 +56833,7 @@ func (ec *executionContext) marshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildba if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTargetPairTestSize2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx, sel, v[i]) + ret[i] = ec.marshalOTestCollectionEdge2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54630,32 +56844,35 @@ func (ec *executionContext) marshalOTargetPairTestSize2ᚕgithubᚗcomᚋbuildba } wg.Wait() - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - return ret } -func (ec *executionContext) unmarshalOTargetPairTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, v interface{}) (*targetpair.TestSize, error) { +func (ec *executionContext) marshalOTestCollectionEdge2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionEdge(ctx context.Context, sel ast.SelectionSet, v *ent.TestCollectionEdge) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TestCollectionEdge(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOTestCollectionOrder2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionOrder(ctx context.Context, v interface{}) (*ent.TestCollectionOrder, error) { if v == nil { return nil, nil } - var res = new(targetpair.TestSize) + res, err := ec.unmarshalInputTestCollectionOrder(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalOTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, v interface{}) (testcollection.OverallStatus, error) { + var res testcollection.OverallStatus err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTargetPairTestSize2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtargetpairᚐTestSize(ctx context.Context, sel ast.SelectionSet, v *targetpair.TestSize) graphql.Marshaler { - if v == nil { - return graphql.Null - } +func (ec *executionContext) marshalOTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, sel ast.SelectionSet, v testcollection.OverallStatus) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TargetPairWhereInput, error) { +func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatusᚄ(ctx context.Context, v interface{}) ([]testcollection.OverallStatus, error) { if v == nil { return nil, nil } @@ -54664,10 +56881,10 @@ func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚕᚖgithubᚗcomᚋ vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.TargetPairWhereInput, len(vSlice)) + res := make([]testcollection.OverallStatus, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTargetPairWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54675,15 +56892,7 @@ func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚕᚖgithubᚗcomᚋ return res, nil } -func (ec *executionContext) unmarshalOTargetPairWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTargetPairWhereInput(ctx context.Context, v interface{}) (*ent.TargetPairWhereInput, error) { - if v == nil { - return nil, nil - } - res, err := ec.unmarshalInputTargetPairWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTestCollection2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TestCollection) graphql.Marshaler { +func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatusᚄ(ctx context.Context, sel ast.SelectionSet, v []testcollection.OverallStatus) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54710,7 +56919,7 @@ func (ec *executionContext) marshalOTestCollection2ᚕᚖgithubᚗcomᚋbuildbar if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx, sel, v[i]) + ret[i] = ec.marshalNTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54730,24 +56939,23 @@ func (ec *executionContext) marshalOTestCollection2ᚕᚖgithubᚗcomᚋbuildbar return ret } -func (ec *executionContext) marshalOTestCollection2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollection(ctx context.Context, sel ast.SelectionSet, v *ent.TestCollection) graphql.Marshaler { +func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, v interface{}) (*testcollection.OverallStatus, error) { if v == nil { - return graphql.Null + return nil, nil } - return ec._TestCollection(ctx, sel, v) -} - -func (ec *executionContext) unmarshalOTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, v interface{}) (testcollection.OverallStatus, error) { - var res testcollection.OverallStatus + var res = new(testcollection.OverallStatus) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, sel ast.SelectionSet, v testcollection.OverallStatus) graphql.Marshaler { +func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, sel ast.SelectionSet, v *testcollection.OverallStatus) graphql.Marshaler { + if v == nil { + return graphql.Null + } return v } -func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatusᚄ(ctx context.Context, v interface{}) ([]testcollection.OverallStatus, error) { +func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TestCollectionWhereInput, error) { if v == nil { return nil, nil } @@ -54756,10 +56964,10 @@ func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚕgithubᚗco vSlice = graphql.CoerceList(v) } var err error - res := make([]testcollection.OverallStatus, len(vSlice)) + res := make([]*ent.TestCollectionWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTestCollectionWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54767,7 +56975,15 @@ func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚕgithubᚗco return res, nil } -func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚕgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatusᚄ(ctx context.Context, sel ast.SelectionSet, v []testcollection.OverallStatus) graphql.Marshaler { +func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInput(ctx context.Context, v interface{}) (*ent.TestCollectionWhereInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputTestCollectionWhereInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTestFile2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TestFile) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54794,7 +57010,7 @@ func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚕgithubᚗcom if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTestCollectionOverallStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx, sel, v[i]) + ret[i] = ec.marshalNTestFile2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFile(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54814,23 +57030,7 @@ func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚕgithubᚗcom return ret } -func (ec *executionContext) unmarshalOTestCollectionOverallStatus2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, v interface{}) (*testcollection.OverallStatus, error) { - if v == nil { - return nil, nil - } - var res = new(testcollection.OverallStatus) - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOTestCollectionOverallStatus2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestcollectionᚐOverallStatus(ctx context.Context, sel ast.SelectionSet, v *testcollection.OverallStatus) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return v -} - -func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TestCollectionWhereInput, error) { +func (ec *executionContext) unmarshalOTestFileWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TestFileWhereInput, error) { if v == nil { return nil, nil } @@ -54839,10 +57039,10 @@ func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚕᚖgithubᚗco vSlice = graphql.CoerceList(v) } var err error - res := make([]*ent.TestCollectionWhereInput, len(vSlice)) + res := make([]*ent.TestFileWhereInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTestCollectionWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNTestFileWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -54850,15 +57050,15 @@ func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚕᚖgithubᚗco return res, nil } -func (ec *executionContext) unmarshalOTestCollectionWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestCollectionWhereInput(ctx context.Context, v interface{}) (*ent.TestCollectionWhereInput, error) { +func (ec *executionContext) unmarshalOTestFileWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInput(ctx context.Context, v interface{}) (*ent.TestFileWhereInput, error) { if v == nil { return nil, nil } - res, err := ec.unmarshalInputTestCollectionWhereInput(ctx, v) + res, err := ec.unmarshalInputTestFileWhereInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTestFile2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TestFile) graphql.Marshaler { +func (ec *executionContext) marshalOTestGridCell2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridCell(ctx context.Context, sel ast.SelectionSet, v []*model.TestGridCell) graphql.Marshaler { if v == nil { return graphql.Null } @@ -54885,7 +57085,7 @@ func (ec *executionContext) marshalOTestFile2ᚕᚖgithubᚗcomᚋbuildbarnᚋbb if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTestFile2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFile(ctx, sel, v[i]) + ret[i] = ec.marshalOTestGridCell2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridCell(ctx, sel, v[i]) } if isLen1 { f(i) @@ -54896,41 +57096,69 @@ func (ec *executionContext) marshalOTestFile2ᚕᚖgithubᚗcomᚋbuildbarnᚋbb } wg.Wait() - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } + return ret +} + +func (ec *executionContext) marshalOTestGridCell2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridCell(ctx context.Context, sel ast.SelectionSet, v *model.TestGridCell) graphql.Marshaler { + if v == nil { + return graphql.Null } + return ec._TestGridCell(ctx, sel, v) +} - return ret +func (ec *executionContext) marshalOTestGridResult2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridResult(ctx context.Context, sel ast.SelectionSet, v *model.TestGridResult) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TestGridResult(ctx, sel, v) } -func (ec *executionContext) unmarshalOTestFileWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TestFileWhereInput, error) { +func (ec *executionContext) marshalOTestGridRow2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridRow(ctx context.Context, sel ast.SelectionSet, v []*model.TestGridRow) graphql.Marshaler { if v == nil { - return nil, nil + return graphql.Null } - var vSlice []interface{} - if v != nil { - vSlice = graphql.CoerceList(v) + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) } - var err error - res := make([]*ent.TestFileWhereInput, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNTestFileWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInput(ctx, vSlice[i]) - if err != nil { - return nil, err + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOTestGridRow2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridRow(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + } - return res, nil + wg.Wait() + + return ret } -func (ec *executionContext) unmarshalOTestFileWhereInput2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestFileWhereInput(ctx context.Context, v interface{}) (*ent.TestFileWhereInput, error) { +func (ec *executionContext) marshalOTestGridRow2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋinternalᚋgraphqlᚋmodelᚐTestGridRow(ctx context.Context, sel ast.SelectionSet, v *model.TestGridRow) graphql.Marshaler { if v == nil { - return nil, nil + return graphql.Null } - res, err := ec.unmarshalInputTestFileWhereInput(ctx, v) - return &res, graphql.ErrorOnPath(ctx, err) + return ec._TestGridRow(ctx, sel, v) } func (ec *executionContext) marshalOTestResultBES2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBESᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TestResultBES) graphql.Marshaler { @@ -54980,6 +57208,13 @@ func (ec *executionContext) marshalOTestResultBES2ᚕᚖgithubᚗcomᚋbuildbarn return ret } +func (ec *executionContext) marshalOTestResultBES2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTestResultBES(ctx context.Context, sel ast.SelectionSet, v *ent.TestResultBES) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TestResultBES(ctx, sel, v) +} + func (ec *executionContext) unmarshalOTestResultBESTestStatus2githubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚋtestresultbesᚐTestStatus(ctx context.Context, v interface{}) (testresultbes.TestStatus, error) { var res testresultbes.TestStatus err := res.UnmarshalGQL(v) @@ -55293,53 +57528,6 @@ func (ec *executionContext) marshalOTime2ᚖtimeᚐTime(ctx context.Context, sel return res } -func (ec *executionContext) marshalOTimingBreakdown2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdownᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TimingBreakdown) graphql.Marshaler { - if v == nil { - return graphql.Null - } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNTimingBreakdown2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdown(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - func (ec *executionContext) marshalOTimingBreakdown2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingBreakdown(ctx context.Context, sel ast.SelectionSet, v *ent.TimingBreakdown) graphql.Marshaler { if v == nil { return graphql.Null @@ -55450,51 +57638,11 @@ func (ec *executionContext) unmarshalOTimingChildWhereInput2ᚖgithubᚗcomᚋbu return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTimingMetrics2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetricsᚄ(ctx context.Context, sel ast.SelectionSet, v []*ent.TimingMetrics) graphql.Marshaler { +func (ec *executionContext) marshalOTimingMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetrics(ctx context.Context, sel ast.SelectionSet, v *ent.TimingMetrics) graphql.Marshaler { if v == nil { return graphql.Null } - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNTimingMetrics2ᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetrics(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret + return ec._TimingMetrics(ctx, sel, v) } func (ec *executionContext) unmarshalOTimingMetricsWhereInput2ᚕᚖgithubᚗcomᚋbuildbarnᚋbbᚑportalᚋentᚋgenᚋentᚐTimingMetricsWhereInputᚄ(ctx context.Context, v interface{}) ([]*ent.TimingMetricsWhereInput, error) { diff --git a/internal/graphql/testdata/snapshot.db b/internal/graphql/testdata/snapshot.db index 2c4e38b22ca1fb723d72c561d8f4612f228aa541..d89c58b35f0cddf01279e9dbe51c1d9ad12df29f 100644 GIT binary patch literal 393216 zcmeF42Y?&ZwXkPYW;Bv!#$7G~kJmQ4!rtE3fW6+8&EmDz-r@oVNh{6n%GOGYMzXyS z2(qbZl;kClmqIF#3TZ$Jfg~i5LQ5c!LLiV(lDv?-mxLtm{qH~bj%HV7B(MDvLOD)2 z=kDBd+jq~L8BM!GJNsj4RT)dBCc|l^nVHA1EVD^b7{>NRhG9bRpYd-iBy7eXpxL~1 znIzOvckn7Z*iA_NMyZmSQ5Atv2=j31No5~c$%}C(YLVy!E+Li1k2k#lLWwX<< zL{vQ-PNidG;Yd0-sisr0h&H23G7hNI#oL6QL4WtKUl|GX?HutdeSu#89wk(4Rk6yD zG7u;&5>jeHMu&>43;C{;dEQf7%U-=R9UhIV#RkQHJ951o?%v$*H*YHz4JoliS{+wY zN?>4E35@jjD?0}Jws#NiRkr!}D%~T)1APIgXuCf!yuP%Opq9*}B5KfRTMMStaDJsw zp;@tHCOwr&=M~dbHIdhsxfUBsB`1S0jj?1R7zsxv)RH>N;+!2gZLDV?Ff=^a-4_@( z8Rb|kuFkqhnK5q}81(mT4Nz`1rQTp(%BKwaxA+JBfgb;m5-MniI0HRXTN>KOnj2w? zkGJckUh+>FM|oE(>~LAT!d~UfD!73!+r{yo2n4nB^`NHj2wG-xGMtL#d2DVE(KDul zB%~FY^5!9}$Z9sEMrZm1w~TM6I-H61NKTgNbB17)wOrnP@CA9t>+!Y9tL~oC%@1u$3#N9#$h6 zvuac7WHPPl0V_BWGpS<6Me;{EUEESZ5XiSIKljBuPnm{}+Y>KkY`kanYW9kP$&7~6 z;oLluq}_B1NzMt)-edweCSsA0l2#9=>A{N}YmPHm#-A$zVD*S$Z(aXllwlr@-5%jYFD_rqI1iCY_iWMP_&YJn+S{UZJ(Z znJ+NR?ayV7Nxmbz)#1s`-@qQ(o*#|qVVU4e$Fwv#(<-QQW~e1*vq7pj%T_5?6&oK% za|I!+xE|u6bL)A?x5L7D)~#dr(zvFbABSIQK}|hBqb4G1(Vy0WU7{$nCJ~-g*F#C) zr3+Zzvta{!#fJP?S=`55|B5$T3feG6QEYWWyPy`kK!u1H9Gy;+U|;B|AuW7RrS48l z%yy(?I9e&+JT%5M&7`e4&P`DlO?IYYE`<(6B5N7UbKA*^tB3Qu{4>CrVZe`;t(!oc zej12|W)M|1AeOEG;a&*B#fu>A^5+=%P5!j}i2Mym;y)yS1dsp{Kmter2_OL^fCP{L z5uOd#s3y`u`dFbomRz(HxSJS~rGBFk#&!p6-#=Pi~tt;5v14VX5 z78#yRhvzS}t~IQu!tiJf^Wx$K)>>mzBsn=1SD|(0#Rc=N)t202hvTVaW{Q|DgT57W z)$H>{n1O^8a_JvOCI(8}? zIS?M7-MBy?73iZh3>P?H+ax@6l$xF??BT7=ES;y|cq%+KF_R(Nfq!mx$pbp#d>eE+ zl}RMv8K6iqlSpgxDUY*|c97mQWreE-jke~-U8oDqV4@wx3tspD$yKZc^{$ZtM29J=3=dtV75|is% zEe-6=0?ohRP}aIj03SnPt0XW&wV18(>`tRyKoSgT;kZM0e(tezC$q60qv zkN$%MkN^@u0!RP}AOR$R1dsp{KmthM^+2HF^ZzhFyZmi(|Nlk#Nmv2k+pmWa;~0Ks0M`yIQCDPGa z+tSb)sqJVUi_}I()y}aHs)($v(@)Kc5fTHDa9G&FQIwRAPM)-|`ctb;!q8cH+U z>YCfy^~?*a{E36R!YQq)>%ywd-Dmsz`-Zj!hqm?Y2<{&2-m$|!Sk+aPPG!`p^;KsM zZ1=-o^&^^^(&`i8Nj0u&8f5I~9zL@)qrN{jnhK|;>wChH30147i|fR+1NCGjp?ESJ z)#|m0a8qkr{n%)$+CCa-YZ{9*M>@uuS{obH=Fzs6XnSXCQ)gRCOG{%@=V(*gSYvav zBdSK!mS{^vRU126T1VSkJI3lqV~MV!Zq?UKO{#9#qrRyh>lPWA_bO<`6 z9Y`gUll8E$Qfw><>kC2WG_@Yq7>b5zT}Icpq+pbX$d{4ocTA@zl8L%8V@v%mpg@*2 zs&B4qY}C7u(NgtsSd%F}`zNiA3X-YOlr9UpM7NVIbb+7Ia4Hc?q}5cysHz}Kuifb5 zX0mh>DN#>an5pRSC?F?X=vKp~u-2(m>|i*p)*lR~>eJyw=0L0jqtQzD)KomCuQsI& zWu~T*sdW8WYI<`j90LTy$z*&qdAMGSq+(NP;9ck=^>ukvinFFk*J)3Saw5s7T0fpN z&6}+d{4W^n*%BNX@(%`w{dLu>3>&Ep}0hnYk-aOLR-y7W0-Pb=d=qJYX<4Cz_hWz%r#<~V*j&ANrPELjs(f(M1 z*5!xgt1{^@EW!nb)YHK*X9HWvqF1nCP}O45OgIi(>g((3>R{8r6kQvus_UGp+FE^e zCw<|i{Ti(6MOL0Vy{>-hz<9mB-qv(Isfw)fRG&}}r}t|?z04S_+f+BIPSqu%q+r## z7hQB=6)bs0`dA06gOQ5r#xH<|Nvf;9zP@1YC#ULzwAVpnx2|vRXbq2s+s4A}&9IX- zw2U^ij*c}pHn%r5stpaDZLqhL)V{u|VyC*Q>gtYj92Z5ysY9_uFr1o%cu*!?I~LZ` zltXEsEN|&%jmWwlBCBmCvI=9-hUTW0hOyeV&UUr71p-cOXX_}0*NzUgsl5e)%UEYg z@NDX+YinvO6Iq)&>)IMxF|uN0E!ex>ppmst!1y?Nl$5ut1bT9k?ZAf_vU`Td0Vk) zNFl#H#?_P(7#LOpBmMo#j={d|-Gh6TZT`JV_sH--UjQoF?hg#FFRdh~B{QjrI@4JV z){qV7R|*xH70WI2YIIo(YO0#Z>&sk=jir*4!Q9fR5%Pn!q>i#UX9rFj>lp|P4G(tr z1%^#VNzTr-nfWX;<}Cw*{=TgN%B`l<8_Y}jltKR%{~-K0@((GYf_8{Azy$e-r}X;! z{orszetnMCE635@GOUl$UGq|}KzD;3t?iASqaEsKq@!7FX=sDrJJF8zsM_Ar&_34O z2vdB#T`%>Lf66$@yINs~%i0z8DrZ)~4Sd-yj`u_$sGY9|wW2kIXFRrGapCD8328;9 zym?3~vZ_#FE}#_^CAy*X6ip+DEX7}njg)>@- z7K}jDN~KkY`kanYW9kP$s|`qa`Q-%cGD>& zIVUuGlL_FMh($t58ZNrfgBLm0sTO<*<-Cto;ph7o04e-#A>?2&!-v zVVbK=YiV^d2y2g*9*i<@jn+ITM=5tX$vC9xXpTAQ#LOr%vjDuYL9e4Q0##_OaOMjP z!K*3-qa(c4;mOY5z#iG2AB}V`2b}4cmX1X~B09qWLLR$EsO5L59nC(b# zyP;CPd1#Djnn_!8oSS1u&|)seG?(mH#fHV9b&fqc@g0u*{&(BXX5`adQ_k-Se-m2x zL)>Ltz5NpVLfc1dXWIr}+dtW`_ecNq5@6av7`A<9P1&SK=T|XiIWO zliXF0nOyzosoRia3Ebul`4S6wPx~@9yKJ^5%6N{2xB6^9%rpI(WFLyJ|C{Wyx z-UE^w@*SAZds?Br#U|~g;ynnQECeVNu$@sKaOeje{xwv0W}Ob^O>XLY%-@Q|x- zJfR~jEgH-~d%3gR-0vDV)6Hd(fh%PAGna56kI+}hJ;#}izG0d3RC`$c`qyYYIjTXm zG0KGevA`TjzFm$5*+j+yr2#@x;yoS@d-QZ20?DM6&o4q?lsfKfPNbQVvC|U|^Rtd{ zDl!o}NPeJ^lgOjoyl3M^_HwH}VzNvVoZuXUCk)6<;~?DEg&WS3nttcjBx7C?nQ|4JkiXY#ne(EdQce zu^sizG8fK#*&a7s2Ciqb7wHj#3~shF!N^27F|Gy^naNT3r8X5#M?|{$yeY~ZHHy0?&T#}KHC5 z(PO&fm+bJGv+|B^IO^l78KyFjo(E zg>E<7DLuE?2aEG_Kj%&22gDY50Awnz(o12Z@Jy0%sRk~TOa;q?q?8&bm*l7qn?xmg zB}JWt=WC20r+1TXk4M8(g`4$5Wjr|!RpnnSicDn+H%vjR@a)+ned4At3!Xz8i;ZVe zdXzW6_ymC?IHp3>F!_a=d)}v@=Hi3-?Bqg63&U%u(jGKxJ5yu+9fFd`rJqu1`7xs; z{E~;$3Ji+EF3C4s_}DioO1m&S9ylc|+MSC#GizIFTG*{wOS8TO29)le;l6=@?{#oL zQ2+ja3nPD4K38s$FP9&GC4VbmTSx#2AOR$R1dsp{Kmter2_OL^fCOH>1Zu1!EIeNr zZRlw3tR0KCHP^y>j7_zpqbq)iYjz)MJw5_$JHr&$CSsQKaP~mmRD7-k? zY9)qqd2Q5EZ5^4@TTrcWpuxCgB2_OL^fCP{L56yHMBG~!`J^>np&a_;jr4=(%#V75>-b#+MAo< z3A?I`F1qO4^#!AYPq3@06x2;gbSa6XqGciz%2YUV0Q#it%fPw|?S)RKkgAB{`;~Csf!Lk0;@?=i^E=mVz(8B~$Q4tU9GT8dbvi<VOhUD`5?Ca=oeBmr!bz`d!I*W)jw6=qC#l^umX{;0x~XfqnWyBz|&(MSSB8y z-j^T?HTZkG6pHCw>s$7+5S`lJHNood-IDMGpQGJALsyR$da#%YS^B55dW^Tm^(YpD zy=ZzMlb*^z?9x|rsD-bT*5|K3FuUdeS)am~LA{OoV0dZ@b{mLDWnz}zi+^gwT3@A) zjSNYhS@i-|yMWjQgCU#AN8ez8xee>9;QJ@==Ra(L2=ZM-SY{<%AB4B(6MCSb9D`|C zZ$j@KtN}y2Q`<~F4*##BwZ4jc#017ZtnEl8_d~>`QreYo=!Zh ze`>(Y1P**;XV0Y6UNsE~;x}*<(cmZty!1Zj!bO(?$HZhx(Z7dDj$Si3mE6J{N)7o! zt^!}YBtgh@*Yqj$&``R={i8`Mg`DBf9A(7-Sil(ab;2{*lrL<~(Jk5z%eyEw@cmVA zeS)G%fic7JH<8v9GQCr3)VD9OF9B=bkdDBy0oKof*gu*XC&zL4O0{xOO^qfYJBM3S zbarrlkTcI6QD9mU^t8d)tVeCqEjW`%DYVjY=mhwJHWfYuCt&3KiY$Ut6&MHv&-M=v z1pRyZhK>B$MRJ-hg7eBC{rid}Sz`UX|1PL7xA&N==M>sq|A&_zwvn0VIF~kN^@u0!RP}AOR$R1du?EK(^X; zWGDOTzcgTdO>`742v{z(WiJ%|`9Mvt0+99pSotaj{*V8V01`j~NB{{S0VIF~kN^@u z0!RP}Ac222fedSB`bH)u(&;I!tKN8FuMXa>sMlZNYixp-uiBd0VbK6sLZA^A3TPc| z>8K4ibcSo&Bk;e`j?uQxrm+jF=xcbvk->g=3!rj+RTq)_|E&BZ1OLZ=NB{{S0VIF~ zkN^@u0!RP}AOR$R1dzaMlE5l!pHTS6Py_k>?_i^h@D9h9_%=?p{nq+|WgBeh^VHU|m&J~*SkCLIba*tb zst3dI3|)gcsHMYcZT9aYOTD~j^=kI$p`0PChZrOuH3XZPq|!6!?;iFm!`-lcs}jn~ z2`M$9STsaej2%}~N?>4E35@jjD?0}Jws#NiRkr!}D%~T)1APIgW4k{vyj}@qQt^n0 zQ?ZB^%o(ZCAhCv}MGvUc#RXU9XpRK>U@-@!FVO4XL!cH;(VVLfDFcDh;$&jx(!r2# z&aR@n&dJ5TP}uDgatQ6f^jIblNyn0jAXx%8Zx&!?B4o+(JQstxfvs~I+^k>HLOlb4 zq2a;qzQAxPusQJ#DO&~x{e4>l)UW4CSQ+$h@eld~J^mrne&?9^$VB$~`@t)fA-}#K z`HnEl9G)*U%y2zJm$v|4ESq_fsZ*9UpJ}zc1+=ESW?2(5 z?ql{kMShg$2=3DHF$L@!(zz=iw zm3%K7@bI2wBYQ-kt{2HnX5w%VIjGL`&S=ym3#Pwx>k3mZE477tOl(Xxc?vzt#NvcpOJBu37*8e-WTy19kf8Yyw5(+? zkxU0;iAX#Xg*B>!`cmno&X_A-v2vO@$NFN4sf>Q^2^~(1#l~TYb2ulYE0dQQ0<8ai z{AJP;YU+4f2&dAqF&J=}eoQ4{g|k@VU^0?B$%Ye}6r9Mw$fIhRGA0uqjK-jynUlxj zecJS#Co&OEj2~YS(sDX?f;zW0;%YcizKsg|Y;pDZVaW_Y$ysnQkz=2K6_qFqQpNS> zb}_S4ul34tceZ#gj@{X!60c-u3%%gjT`jPGB|91^^1fqtwW!2Dv9lFjzp$f~ObdIM zv#j8oKJxs(U4E94|6BgE{F3}4B=H{-Kmter2_OL^fCP{L5VwaHs5f!W$ElB0qiE)m_YB@;~zO)a&J4e)=Zv7@W0wX3Q|Bmp~R6G_5r(?-P{oz`@;Lu1R}B*k(&zM{{#~Q%g(R zoEp8z>R>X*YE(_7lBwWSDmk7~HLYUv&2=s99UZN0Eo~ih8S}Ko!Au^j`O!=)9z7=Q zrp~&~)`r%OhPLMRxip<-9ZY0SO;;LlQ(IkgdqYQaOM7SQTw2w?|IaY;!}9y&h}Z}o=!hmR@yxx)f{~~)2!`>rbE3X&#y5HvfA7_>0gWT`94!g&8 z*zz+lc;ufu2mgTDFJ0i{J&ld*6$9z;Xk1N%C)H?BQ`5oZSa2*BSGCeDbv=Xr?qR<& z+`YNquY^hsLP||277Zz}L|PqJQ%Yc9SP6{u_bWRF`?hxv?p3z=_bS~Z!vlQ*sA{`E zFudNZ8rnYTiOnODR|3h9^O8#8So3mDXposyJfx)I%Hs?Z7|dic8cfF~)sO-gCRLKk&xMP1>0~5n zRt_wK>FFsbSX`x+4yQAKtzf&->)+Bn(m$-!RQLLa{XN6}-fADTpHjnc(bF&nz`mo) zYIx7uwd`dXie@sVX~C$PhO4Ane!8|0PJY%2(JARjUXMot+X4f-1GK}DWG0a=@riJR zT#F4x!jTCzNC3pNbS$C;3%fPrlhl#&JtW^QIJe5S3=I1Fwg#v_*M#O&#-|MWxA+JB z0k}}Clq-^>=Tim(fS?~HOBwR(`;af|t>!%=E7{ zqt~8mWL=TN=$R+vD`k5y8+A%&xXe(&M~t2n&S=?23TLs{40b#k0L|yGX)W&w`q*qN zH-D4K)U?roJYBGvPGbI+Q_IX{zDY9y`ZpCITKg^m<73e1eQaKiIZ7n#B^ z^S!RLrefw5t&7>`8&}-Z_f+wo>1y`KNpw%ArxwNp1jDIxYz&SVGaJ&$sbCz=FXA&< zS`c>of>PC{dwqe)OzC4)tcQStvScuG=~u{i)V+%LR9CZ??WW#UP)ordrxbctLEZ`Z z71{OSFc%!1hC8<4xj!;cI2)e%W1qNNU|rBiT@K0cjywhU*` z^qeyqOhMF1t8vI2O(x^?Cl6$*hhynrBpEF|Voks?jr@8ltn3(kDjaYM$6qu>K{$se z2D39??4-wNxG)SB8bjrk{4;Vg@2OqQX05t=l3!Y6pQ2~!aM+&x<+VCDp}BKYnv-Xl zbE#Q-(rE6tj&JfL-qQ??%%5#U|2Y?gUtQG8XUnZJckx*Rql{^bB1#s^_;MX`-fdr5M5ESN^*E1^E{F1M)lNE9Hyilsqo)lXuEnim=j1t`tr-k|%pkGB0_O;5BT#!U`j~ zLRfAjmkY~`;&&p5AkIRq9-!E3@-^_4z)R(e<->4QATE!|A^B{1SPsZ%%A27I{D%aP01`j~NB{{S0VIF~ zkN^@u0!ZL>Nx*Jp*;T8ER1_jtt|ap0lZia(BqF_DB3G;+a`|#1mn|c5=~5y+9wL`4 zA#(9zA{Q+pa^XTE7c3xh{(K_m%_CBliIgNFMUhCin@E?7NT-uXK_JrMAd=^a;Ef%X)5Xk-iMRGkO{{`;<|3ZEce)WG<{+#>?`6KcT^40Pc@M}LU?}y*{ zL+~sAbTGt!NB{{S0VIF~kN^@u0!RP}AOR$R1YZ9H+>7jt&BFHnrTb9Bgk4x@XRIva zyz)Mmus}~<{d3tq-_F3zdTE}Wv0GS%xpeuPcMn}_bI5iE{+$2lVuwUi%cs1&2q}SK zAK~1j3~P0`XqNgntHViCV_Cahp!IPMYGQHlG(-Bgc@B=Imi~AlZ-Nd8gN>2=^MW`!n1mTC&>N(Me+^g{{LU(Kg!R`&%pivAIcBN z_se(5chWZjJ}Q4u{vA9M@J)CU;D*=#6ypey01`j~NB{{S0VIF~kN^@u0!RP}yn+Pi zP5VVQE5j}%VgV8KSpeHSl9Y*%h@cMwSluM)BEm@oeG0(hAW5DGjtINWD!?NEkg^g1 z_x~As|DT!n3XTh#LIOwt2_OL^fCP{L5O1dsp{Kmter2_OL^fCP{L5cC$Id2pl|we37C$0h zE^6-2ikrk!MbZ6d_Y>}K7q^UmBLO6U1dsp{Kmter2_OL^fCNsMfOwLfS>a$u0^0%u zy90Ld0OMH2GJ*N4?aV5c4Gatid%Anh^!HlqE2`{FyMyf+7#tkgG3@WvGlKrW@Zeti z;#GELorCS|+p@(!2%Cai`uhDt_7w^(GrQRz*wr^U5ZDeT_C+i0j4!`TuYbpoefi0D zrZs?yfNf;)@oa{ z)TFnz<({&=wTZAk-7H$GbT9N)U;^_NLvMvVwAK}{23|R`*2SH^V1d1IeYGrxMfXbkYVpDveC5De zEQ?@WzLL&bmctr-Wjbq-VZpvtf-vix%VFKVc0n+8%|)=1-#Ujd3yON?a#+?6O3ZG} zo9l(J#Gfy}1o6vdu-0E|{>E9ibi>krD;z>@7b4&P!|(r}a6H0-NB{{S0VIF~kN^@u z0!RP}AOR$R1YQ#a$n*dB`~NjDRoF=+fCP{L5-?ueD`&knzFfO+BDT99c5L1tT zNGUYcavXfhKtSpB_xl0ZkY7jPJF=sZ;=Xh@#XS{{901P2Nj05{MYN(n+Y50Q<(?4k zTpe04o(xCT{K=y5Lb;9vOWK;5)Dp-=rp8!Gt0=@@9P9fz`Bsn>iNz-yc zEv-(KTF}y|a6*fvV+U0_tEEQ9@RJAC)Px!?opr;Ycuopo6m~9uKQqeb+ZWIO%-{J& zA*IkMjEX9Dq|if(id6EH?529&6RT#k7sV1$^>8wi2D-uVR5CNA<>nwrkjK@u8qCiP z9#E(A|JGV_&NC9|+d1M_`U1WFJxZv+@ z>;)|r{LxbA-UWI8Ais_!3|Jv$G?|PW0leY}id;P3iwgQ!j87|=kYc~7*kqx<6q#0x z*4fTl-V<5JX3wV%lTg!#lBokmRGHDm+re9kHZ`02OOa8f`gCV0DnRDZ*i(wLd`BAA z>qi<7^^~IgqCcAoy`?Dkg#4vk$0|5LQM2WE6iA^X>c^&T=Z)CI6 z`gFsoNi?Xb>0oj!I2Mbmg+53(k!+2G2NL!RjT_D9yu_}+u?s8B&3Z`@GKl=WWwunv zmmNNp_h^mmk@@8YQQFnn^5#RSpX*SLRduBSRVo|u9lfBLy7Lw5s5_^^aE_{xL!*|4 z6Q`m-I}6>pDEGwNIiXG_6R}9)xoSKa3CDw}Od_GC3Ug9m4o4BQEWOa7Sfcbuol+B| zlxbd+_*g7LPhvClY}j!8lJS*4QWu#9kE7DUBlX-17Vd^+%T?SlJDc+3aM2-uCQcM@ zhuyMhQ-`S|7a3KmPj}#=0;KE4Toh;dE?wZ`J&ld*6$5%4F4>t&{l2cyeM=2a$b(C( znceM6&OFORmrB9AWMIL{Ef)GyMcYCbDmAU(JB2?8ON$mnUoD?VVgJeiDePIkQN`b?M<3{@3*<*Kb@;yMFBYf$N*DJ6)f3 zea!Xmu4`RaxGr*Ou9z$68gXrNb-UVKwXW5!WiHuecmCb^2j_FnpF4l(yx;kC=NFxy zbbiSB9_QPgN1fBoq*HaCO9@q?DRQTIu|=#PFDDf@H^od;R)eE;a=e@!fnD$ z!uy4H3Rekl7Ba#CAuQ|`whKK%r%*4f6;=rI1>W&Lj{kJL;P{2(QOEZj_c(5MeA@95 z#|@6F9hW&Sbe!)Pcbw}Oa`ZViI$9m;9IG52hv=~Kf8~GA{|Y7*{~-Y+fCP}h8<7B? z-EU_^hXs!B*_fVsfzM9pspt9ZxSo2B&yMM-U-Ma2PyLF|M)lOQd^Vz|p5e2jdg_;a zHms+f=CdI^^%S2C>ZxDw+4J<&lYI7EJ@s=wyH8L3jL)82v;j?@7)DwJm zkDhv*&+gV!KjyQ$^weW~c0^A-%4di5)Q|Y=ke>P>pB>awkMP-@dg@_5yF*Vs#AgTe z)PsCBpr;<-v)lF55BO}qp87tY-KM9$$7j#dQ{UyYeR}GCK6|E~x{uFp)l=W$vs?7k zxB0AJPu=d)+%sk`{>>3ZsG zeD*Xw^;JH*K~H^!&vxmlFZ0<>J#{Ca?a)(q@Y!}fbvvJJ(^FsKv#omSi+r|4Pkn*U zHtVU+^Vudn^*KJ-5xT_-w77`ZS+iucto6XV>YePx9GQ z_0%W$tWQtf!e?vr)XjXhT2I}?XHU^nALp}c_0-4s>>54wQ9cW6yAdz`2%m)|-AL-g zd=}PkBdHJZSy-}-q;BN1us$0}eUQ(>nrtNX0X_@M#gWwe`7A8DMpEzNv#{10N&P#Y zg@x5f>b-mx)>0#>8~7}&n?_RC^I2FcjilbgXJH*Tl6p6vg~iZF>fiV*EP_T-@8Ywt z;u%T3lh48;XC!qUpM@38Na|WX3#*us)HQq-)>k8`tNAP}Q$|wn;Ipti8A-jJ&%#<{ zB=t5v3(Jp@)LZ#1EHy?_Z{f4ByckJc#b;sVGm^TJ&%&}|By|O!h4sWp>T*5{tA~-) zWqcM^3?r#a`7EptMp8%ltVK^9;j^#?7}=KPvy7g)gnKhduy5urrut&;BC0Rq-bD4A zxC^Pikh_5D3%F^jr@6yaALb5GeTX|q^+7H}b%sk*o#r&EH7-SUiaVd`^SLRir?@25 zNiIQkf}5mzl8aLv=MGSPfTKZ;-OtgG#>O}r(AWu%hBJ1Yqrr?F<7g;jRgMNSHpA&KqdXh34m}ebgJM0FI1|7BwJ`+d$rIVup zhwb2KxMABl8f@4$j)oexm7{@%ZQ*E`VVgM`WY{K-h8VVyqXC9(;AnVZ>p2=+*gB4e z7Pgk7frVYq(Xhg<<7iM}PvvMxVSOA8C~OT!!wFl>(O|-!!qHH|uH|STVb^dpjIgV@ zD%$TVZWYz5I2u4$g`?qvUCGhl!Jf>~(7~R>(ZIoaIT|+D6&wv3>~fBV40ahu0|vX4 zqv3+}a5Px3OE?-T*u@+T6zn37h6#2dM}q{rfTJOTozKw#!Or7ocwl9Y1_xH+XlP(X zjs^zS&C#&Hx;PpXSSLqA0xNJdAg~UOh69%8XfR+w(ec?i8VFb$N5cSXyy~AfGC)kx!PF$_r$-%*l-OH|f8m|B#-SekuJ- zdQ5sq`mXdX=`QIG>2uPjq>oECO7E54CA~wsQaU2NNjfM^N&BU!be^!_)+l# z;`QQn;@iZ_#Y@Bs#I%?YC&W>4pSVlhA)Y1nif4!&Vw1RDJVjK*<>Dey5*;Gre%bw! z`+4_M?#JB^x$kq|?Y_f(tNUj6jqdB+*SN2AU*bOOo^nsPL+(B99qu#To7^4l26v5H zaW8exb35FO>t)wVuIF7(xgK{tU zcDT-TZE|(E8eBCl#kJHm&*gA2&X=7pIiGhv<$T=vkn=w0-Of9lw>ocj-srsEd5!Z* z=Oxa=&MD`FGvwUk+~GXaxyjk#Y;e{%73WgtJg39S2rmmS3C{~p36Bd83HJ$i3wOYz z;y)yS1dsp{KmthMbwPl)THt~XT+$)>1?B~+pJ$$@`Z?w~s(;P=n(ALMzoPnC=2@zr zVVZh2esQv}>3#y-Fo}~Kc%+IO*8S^u$f6Dxn>Yp$_q528t3927w z9;f=p%#W#ljCqXeN0~>d{t@#ds(;A*km^U6N2q?7d6?>ln1`r-ka>{m2bc$_{sHp? zs=v>CpX%>1-=q4w%y+52pShpv`4JE^{dxr6H4ncJ!U z67wahzsP)%>Mt-~p!)O7=c)c2^Es+-V{W7RR_0c!Kg)cU>d!Erq59Lzr>XuF^C_x7 z$$XOPPcWaL`WEIEs&8g)ruru4CaOQqe4Of!F(0G)qs&LC{s>$|A%62=xP(IVhu{JV z(Ko{76QVx|7f*=(09-mD`u%X>gy{FdWfP+R9WI&>{a(0aLi7!A!G!4R;c^Ml?}3XY zM86v@l@R@JaG`|gcfn;6qTdM@Nr=7 zh<+Pf79skra8ZQlx4) z?d?RiwGr9cN@Pn5k91*hpkU1CjOhMAp?2SzAlw`t?MvTSw%nrxNM&5m{41 zWOX%>r<_9M+OOyr_PL@r!N^*GLe!*q$m>Ub`$Ax5$SXiDF{S5 z97OUwk#Hpdq}@)W%|@ivN~FbNwF&~h|Npu;Ex(*z=F0M*>Iy2_OL^fCP{L z5FAqT2~ zWH?Ss+g7+q&9Lf1Y0Va!>?*9<=5~;(1-*+_`BKIWEi(2 zz#F#-ZX-tre{}o|-m`Wsd&T^8cr>o2!>MsK9h?fsQd)l6-!tg%9`-B4-JARUN+>@o zq|}6B(U3we@rfmF|(@fxZA#vfUpTUay4W;ZZdn zQqt<-G&P1z(M&3wjwKVpSRy#7<<$puLAI8OL{ts7k0z6Gy@6i;mhO@MVWnm)9M@DI z80GqPAeMj*_26hLkNgd(QWGWp@sbMXd$g_r=(eMT8 zk$4O5S+|b8Os2e2s+P>8B5E+Bg~wH`=+CY~YDKvx1TGPt%pU{v>R>pYDcYnSRwFr- zV@ViFejkg$_6!7uh6lU*0>etE&{7K))T5PDv}It>-?ueDC%7h5U|ekLQwIH8{Db~L zkAFxhG=(Xp4&+k?0x%{0esHrPzwSf6qias%Jw6|Mxi#NCC)IQ+7BL(&_g7CL?3^p) zGEa!LUWFEnC&N)S-@|o}G_oVf#8_-RlbVxBFp*4yhezU>Xe==v3~N&`Ri)mZ>vphs z6f-*+?Lz+C6=8Tcd9Y9GQDhd3W(v+Tz%-#S z8_&zRTHfgLr&lYS-;h%1auv82#E3k*iV-5))yaDzwQTnMLf4y|f}@#Q>>zY2ImqOD zUCwk!DXVQ!H)W7&ZzkC6q=Lc&Jdl8*XC#y`|NBTl^k})sxU^* z*;_O14o{YEWsh`Hch#n~v^p6~sOdw=)PW%E#%XQlFC#je8i50wZ9l`*iOnqM+>Gwl zW>pZ^Ho9J^vmxJQd>ij+ZDp@&q#@O;GP7-`6*|0`$q5D4(PVAUNA-a7s;)y`i*)SvyisWjmuq(w(_kR;+VQ6Io&ZGMk(^ z`B?E0Von_?-2IA*0wyDp%p}rUD*ulSg}zsicS25A zdg@M2nS-SV^DsSm*H6DQX_M1R-5{uCCMUzG>0o}>id`!I6fxgU3l>xogWVxNyLe6t zjEh<eg*suL<9^2{9dCCe9XlN- z^FQYA&lY?OW_McBk#Pwg+st z+OD&uZM$q8w&m6zTEA$0k9FF5j`eiwN-Jx5#&WOaCW~s>YVlcI?C;nI+1uE6vLSXe z%QDZ<8T1}-GQuL!?r^MN6Pb9Nz3$Mb@7e#qS=+F5o5drp-$8IH%R)>mm0 za0eQ6A~_ZhADE^o`nE;YxwQB>r=6CoV81tJQvKdsZme+va?j8kKc^~2?qA2r4anHo zbTBdzPQV>&qsm;73GcW`g+_nJy<;Z1dvayRysAm=u3WC_jhf_+W>hg!+2+4b!D9Mlo-ut_5Z)M4+CNv<*BL*Bs(xr5%FCbN~1h7@JWYk6|+T`6>TK0B>$wu!PCb@>+p5Z;cLhk9_(@b)WKA+~@ zV3Hfkjdz2$%OuwrQJ1&VB=_uG+0Moe$eqhk;&uZTNOUm$9R?c$-af4VP*5HksraWt+T>Cb`Cl8~qKC`+pvBgSXzKkzj6~ z>b-R)xyBsVd23B_4bQ9HxE^wk_eA6N+1+jZrgc!kaqqCVhi7)!b>34=I%qiIsa~H+ zuF-v;x5gyb*fnar)h4-yuU30csgQfhrnQiJ{NtJ>v(~%Dq<+J1)@)o2xyKDYID>7q zzX~ciA=e7!?MzkPRVKYOoM)9+G08RdD#g3fB-a?>O7F=gxyIa_>^;dO*O;4=yk3)B zW7fUi6(+expI0<4huq@|Tg$!6Dpa^^<5I{y?x~vfgr#1ON%e+@d%R0ba*YVG#Jkue z*I>NZyT~NhD7(nJ&?MKOxX`=6B-bdrVAFiaJ%0Z`cg8{Id*_+dZ_LuXP4WrVf8I>} zvR5*x-x!_b6-{yt{}R1!lU!p)++LSSuEE^pb(-WFe(m(a;`PP9>uxu41+T*-*XXmu z%bVmH?ebpEB-a=b=e3*U8pklZ*JhGy)NE_CLhhXQm~ifxWA$20Dm0wOas~^z`W_?9 z`R6PAEZ&_BfZPQ?jTz=e;Z{ceZ~4#iOY)2Iv+|Sj<<0VGa+};BpDM4BPnMU;3uL#<$&BA$4^ke-)*Dg8`(OnONAuJkSGF6j>GbJC}zk4ra7@0H#qy+gWEIwHMEIw(y^`=zLK zp0r0Al=`JD(k7`(YLV)s8mUV1N=u}9l2fvY|4)2b{G<3=@z>&0;!ngMi4TbPiQf>v zDt<}4Rs4kbQSk%f_2PBn+r-PoOT-Jrw3rYl#8GjdxJ%q2o+b8*XNVnQlek_yMO4J) z;v!KJ9U|j?+5M9HdG}N9$K4OP?{nYnzQcX1`)2oz?(5yxxUY0y;yw&lJSN;B_a65S z_nGcZ?hbc@yT+}!m%8V<9d5?;vg;++^RA~{kGmdn-RHX7b%*O#*UhdQUDvy=ab4-U z#C6y;<(hDXTzgzQTxYsAxjI}8t{Ru(TI!nTa<~}h%g&da&pV%TKJI+Td7txc=N-;l zoi{sgbYAbg#(Aal66ay(lykxva_({NaGvSh9H2kjV4SBl6sHiQKo3$aBsi^6ayT+`E^^J$s1Uy_?8gyNDbaA#!+_$e|%3 z2M3AVxs%8pJBS<@ATkgja{G27`}>L9wvEWM&LXm}kH|C6By#IkBDZWI((fm-x0lGC z9wIkyCbGMm$W5Dw+_;g*GtMCL^wWtv?KC1cY#_3$i^$GSB0D;WY;PyBt&PamRw7$k zh-_{qvZ;y4#zrC=8i=f~C$g@N$l6*W*RLmX-8v#qJ(WnGkI0%DBCD&3JmnN3*RCaU z%^D(CuO_mpipW)~h*T6JSFR-TAa_LecJsu*LEFp67 zVj>qUB68tEA{Q(ma{hcG=glKhmWh-kB1Mr%x0^_pi%6%FNI@Xd;UJRdiR3sU?RFw< zHX^N7A}toH6|CW?n7jXf4!!@sSb9$SD*PsoN*$6UJ}Z7%yjqNj8^y)$-@3mEzlJB> zo83!XzjJ-lb&+e>bt?RF{ju}Y&P$zVJ6qtM|I@-(gzJO};S6D+;|0e(j_Vx>N3UZ! z|6ly~_z&?1`F@_`p5kuf4#3Ori|sGk@3z0k9b*R(sMhYGH~)j`J5+hhQX@V zXc%4=DXGx7z%^Vu1g5Qq=^=HLys8u&%_O4v9W|~?4Xqyp^X8m+GzM=erIYYl%Tzc$ zG22_ida!yYSo?C;Cc`o=!tJct0fq)CFpS4WQw0DF%N;cX$FMZ+hz^_@0Lyw~WQnOs z=w;*py!t=eQNuP+vpug?cx`MdmD~?Qo3%9VhHkI!2g^AOslR$#o*|j`@l-N1HCv-` z*>zj>S$T#gkc>{ARnwQ($x4v=YR)`{rBUaZzO7(c(Nhf5tuM^9g0BPlT!tIoI!naO?O@&&0rei?(W^hGiP*y zAC-M9CkW-qP8L-UYHI2rd{|>9Vi>h-s@_;}47VHCg*UE00}PL`huv=MzGqaQo>wh6 zuKuF;O#3s2r`McTdAu_h#82~W080ZR9am8_y4`@dp{6UZQj>|k-57q?shwb1ac8{U zh;g009VZTark0M{c4+Zffo;aTeN7vfm-EEi&(S@xt+)06&^5zNTWebKu$e@gnYn23 zHG^fvxiIdKG}km8qf(>!rs_tpG=DS=T|B-ew9*Q@i*W>4;#-_&YIYo-iI9tZ zi}FlOk5)#li>eob=^Wf=VfBJM!wLa%LCt*Hxc);x)#C8UpTaZp+jr?sKEGxjSXMZ} zojIn?TO)&I*}Zj-UZ3ofXv;cGQ$mK9Nj2gznl^T9vBo_|OLvXy7?#F<<2uy|mNOn= zb{0M}m(CjD7_}M>Dy(;a@V0avKqUIwKHEf>3rEueLVsbW(D60hN-WIwc;b`+T3gIUF3~j zSlpAmvKyRv^_{+YZ!3Hq%lQ8CW`Wr#2(`0S)oZCp7yK7Yr@{%1ylY(ieo3xqW4FLG z3c|XgqG3&gWlD?Q63Z3YvfU0`Y*1iw*pS6b6w zQ!aepE@$6*n!t2A2q>^OMxA?eAI6+an8T{yATU0bOif-fQruPi{!6Z%om~QRKAn)+ z0`*hyc>t{|IP*=1puP@+mIzP5N)PE^N}YoB9_S*Z`5$x0Re46Iz-$$SdfJDB8MR*j zi~+2yS+B*&YQ$Z-3KqWxl`G%fAu!#n&^e20##3h<)n0MpyV@zeR+B39Z#vBS`Rpei za?P)AgPCH5waW-X=zdpvdP=R0OsJ6q#A#+eHKD(P_W#>^6F9e$;$U32^mN)@dwof3 z*=z4ewr6xpGx9MW+hcpYV~;&EzP&4JMsIA{Gt#W2@v(~+dKUsL=W>N(Nq{e~1VX+L zAPy|y2p9gww1s(#&7 z*&Ju-QW0Aunay2>^kuW0r9inZj%qVeukl6rS86piH!DTTHf1==on+pLlugW4rz<)G zyCuz82B`oMQ6`fVlaJ#{l%QAD+F6~Jwxu}B5S=mX%Dli4^}RsK!lZ(Q>guj`7UmMb z469nyMG&;n;SztpwK7}#TlY2x%_=|d~H%sVhi1zWebynM0vJagnte;4Ldh; zmNA%a%cWV}#$scZElHX{f6c-Yrnm%7iQd3jw$WKF^i-HV&6)ZIm~t>J zoKR6SyYuToFw|X@Ryw2klev?#}bx(d?JajxW7g9K@b!$e|DFZ5mdLT0JhslHW2@P*}! zc8b)4)r>P-Ch{v4;wM&3G+S4|ynPE#p=T3cAeI+*g=0MT*EIi$2t-E7g!`n9!GE2f=YE9HYZ)UHU3+=h&o=n}yS zZ8NcSOwTN*u*>LNC2OzIDmw1rEOi~PfvGL$^TaWbqVy8LB+m zEPK*9*@5xudoVF4XIV=p4^}EzV=|{sw*%BZg%qaQY=gg86y17OQmYq1gEl}>A71vs#x1aJB{+70hx$N}%8y8_N3%UZ{; zt)X{>MuPtxd=JF^TN3zm;Gsa#|L^|Sb6@Zu_IrFE_FeQvyr1x%ckSUG=F$+^?@gW| z_s`vryYF@Vm+PI5UpxP=^Od@ex>;ayz~q3*f!{g~+zm;Uaw`LzXUq<NJBr<3VycPa`0M^nAUbgr1`?a3#TnRGUtEo@3Ai^-(*{vSDAZ?Wv}Um5oRGI?81 zbl)XschA7k@Zj!BxpApl>u#27m5KW0HvIQOo+uHMOxA)h36%Er<})dg(q*^LIuApp zsR5mC%YK=0=L^00zCNMz-k*QUc@mN%jY%kF6H&Q2qu4jl!=3Hv%_P&kg+ewXGPb$G z;azLt2)TzL%hkf^2Ha`D-kZ+m1>DZ5FF5Xn)L0!^El}HG&@!0zObS??O9}WFANVWB zUP!Lh&EN|*osF1~PGEHKaz|?q3i?#DLy(U$Qru&WMs_9rif(LPjvT zd=F@{TP;4Uu8UC3`bDI$KV?-7x~`wiZZTy051%n=d4@>4Hq14Yc+? zk#C^a)V{aj0BHEua}rXd-MWqth|FgD;6H(d*5(S^5lFChOTs6hP38O2d2#xJ@Y#+* zX0}_LECg|HHlIrOiK7IQ#kLm`w#mY%)|Wa=C5Q^WsZ=U0b{wX&Z7<|@V<9q7Fan}n zZ?-o{IrzTk*!Dt-HyP2q=9z#flfxn-33mNnpHJnJg`}tnfcbkMshmh4#Vh%tdc4PYas@m!WL3Hc&Q7!N7_pw< z$>#FVQ&D)|_nbYDy-UV3Kh6_8>3lAkmo?lTNNK0W)0y!YPqr_Y%g`AQ3hodjzY~{P z;E&oW>bN!#j`t;#y?rTBCSaL8015N@@kox?WsF*b#)$QOnLK4Ytg8DV!=9)wWps)i zK_g7Ppy9HTHb3?DgOHC;RF_m3=Bvy}SF^pqKdLfpKUr}ClJ$v7j^dMqi$105YzPUc zK@X>Cw?WB`L#97b$Ei@ad-_HU$n^E4(s{aI40{6Jf3-MlueOBl4wO9~v%T8-Im@W+ zCAVU#KXhfcdwC*Zecswud19JxRcGL_TxkN7+=WX0Qmxtc!|<-r!GZC?==i|S;lb#c zw(>L4*qLhOOcZP;{wxIHJ~%QSJ$PhzIC^Mw=)l0}iRj&fC!zyK#z%$@0;U6l2gf%@ z&(z8@{F!JAcC!gA`~rA2zwnnK7JRi{D^+V#^^&$-MT5}T=IE}GgJa{P149SL88ntu zxZtsirq^epdqzeFhxQ&M#Kz93z}unY(b2&@gQJ57cMXn3)xb@W9r5VM!RYS6;X&Zs z*r33XINH6-z5GJl`jE9+t3coJv(e_<%uKlf537ms1&>Fjr^Rk7im#L|@K+Rn1NhzI zupBu!boj_%bm-vj!Q;^Ln?rd9Q>m$hJ3!Frka8ye;!Qi;%h#^8zE%_=+$Mf5+a%S< zw|yF+kw6wCEtM*q4``|71vjJqWty*@ZJpD~1w+(eb8xD9`AifaYoOwX1+oH%Qd*_@ zl+YRBy$xM`t~EQ?Dv`7TAepe7@H&vKa^o!E!@=-jp7A9(ivXt>$`Gtpz|n?UTS49% z)vJKn+TfdW)2-43l^{52gC~tq6+z~3{s=e+qnx^ACMo3sQfHKb(QPoQ*(MEcNuP`A z`K1e|o>}c+2D3|}0%W3;Bi7UmnVKvmq#967mlDos!t0Wx8EK%ZO*Yh&X)3hCs|nPA zKRqG~&>cyfC zXA#N}dKn)1wB}$$B~t_z?L7lWhR36^u7e|`vGIZNBV%20Fi@Is)(|Y5D+*Znb+Qak zW-F!X`Xpe3vWa?qnyx8gS%aUcw|H&gRJDOuf1amG}%$CfQYj zau$xpRcjZaMFOK)tF;x{G_``6;yF7RS(_1}HH+=^2vL@JUvt>Lx72gLn za;7L+kd!faNBInf2W#Fp=O!n4y!J>MM%GkQ<>@Amb(Bc*1#mCaYD$&QGXlHXtXs%61sYX`8DVDk+#ehLQrn)Vu$5RW22Ne6FZ09Sz z&Aq%7x1y@xd<94ZF43LQqmkJycP1{p9b|#MF z|5+B{H}hq3z~q3*0h0qJ2TTr_956Xxa=_$($pMoCCI?Io{JwGk`~Pd5CoG}2hxP~m zpWx%c(ZH_*?+);R)&9@>oBlPvFZ*8L+wT2$?`yo{2nH(@VU~<6Z0Or8hTEAswpns{yGGeh=g@ganaXRhcH=LxE+yhAF zx>K2GI#bMMis@`mE|rVILBV7)dSrZ;b878a6eAsgJEI2xsc_CqLfUq4O^G$x7fq*% zsX{T8?&(X*NNt~u4zIyTa|EeX2Pxj(*F~3?(UsSX-$6;l{k|y@Gf}ur0aWG+J()fQ zQtQMWV-Y1cHCTZnEbg)!1@O~>WaxcDTes?!Pra|%?m;r**A zCS|8(Ol|B_VoLVPRJq=>`s6B%>i|&IhD-PSt1gwjDH*rr9jlJ5#JGnDZh5F>I@`|@ z>S4|n3Nq#+EBCKpF+hEoRS#7zqZn()V=IQ1%eZ7B)hEw&ape`v+`WACc8qEpi`%WqEjhUuBYsBaz~cRjFm62S+vb;aR9BC?aN9zb1=o8P z?F&&{2Nbd(58k{r2pHcKlai_hO`zaVK*l9Y=t3q?VAPLMJ*^YSf6|9>J|lzc+wYaT zF0VfFfEZ0o&ER_XaTM1r4lcLfLowa*;KIFY8P}~1u6x{tQDJLBNtRlZ$tV8x=7eUc zc8xhP)_ykD1uoUj0}hPz=~B9mQ9DNWw2HO;v<>5Yh7`H&s1;)uRV_yiAX%^%x^ff$ z-}4bm={DWJv+DfwJeMfIZ)k(qngK96r|KLWfR8`x_NFuCgw3|{Y zHVT0y#3+=+@x{(+II-HG0^B%PR>0j_-M>mv;ee(ExNfbDb17U(mOHl!uHOnHh{s__ zg8**fD_d;0a#to+D5`6F2p7_|A#YbCmK#)n`_I}S2g?h$D=KW$5D0gEMFQKza0SM#;-x!pyJZ}=ZCj$ianr-Hq<^uZ z`U1paabl4Ifp%OOg19IVGN`~1^-v;cP=O&i!9>8I0z+~F1;3)gO-PQv->0a!0Pgt` zUIl`AaL=3I3@R|V$0a-l6&T#}B-{oS7~FIBxfB&{N@2Qsor=RrLHA2&&snD6w$e)UU~<6ZfXM-q111Md4wxJ;Ibd?&_mTrRuio0V_&plM?`ziJ_dD*u?~w?84~Oyl z>ecvt)hhhHawUFWu>!v@Uyk2zza76XTZZ44F2(P+-G<+nEWz)K7vuLui|~6Wgx`Zf z{2mD4cfTLM`+WG_>&5RJhu=LO{O)$+cb5ylJDvF5;lS^9JASv>>~;=zeZc<@zRwc+ z|3d#K^yAQXLthVlG4v0ikA~hKdUxp0LvIefF7#;Vg&`EW61p!`4V6PDLWe^`p@C3e zC>4r_)&~DG__N?s!EXZ!^JQ|ts;?8e!LJ^10fzihQnett6i-T`S1v3fFS-x!iR-`Mlk=jC?M0EhV2zUAK|X z+gwY?=MvXq^10ZxhNDclpSt&*debUKd9`IhTigdR%Vu z=>~Tn_ZU9Nr_<#ipANXBkAJtjY~<7CwBk?5VPQW1e|m+}#Ab59z}m!xN+ zDRWfCYRV32(&nf@#bhmVx}^Ce4xOBxBMFOivgBOCIlX`e$hOGLqEoJwr>``t&C=|- za+5cL#3?}MNP?}7phCL2CP^|XRKdA}S~?`qVN`Rji2qcaK zGi}a8<9po8ySl8;J*&(YJ9(};U7@*Rg+E4^V=a^|XsB`aM|A=sJg{xQsM6P#NPF{O zra%`K09E}aMWD_{qy-b9)XdZZ8D}FpI_O?Lm#{vxOvpymMu%|hl1sRX$u9!6uj&cx09!ga`3rWqRT{Y>Wn z(@humJo^C2lm5b-oP<(JwG2sMNpJ_{4?~*u4+noxmM>@xB(2;`j#8Y>v{d0+khK}s zA1SLTh!Z2_;H7e-2Fss16Sh%CQ$h~SS}7PgGu7#7u&4Oh`eb|V?RI!IO{#J!0iu9X zyNC;EscGXBm<^>(2PupLFhrdMQe)UU~<6Z zfXM-q111Md4wxJ;Iq=)UfyHp3ea&L)Y`M|o$6+5n*<2esZt>l@*iz$Lmu`b&@^>z_ z;;;3_1+jb;oS9ESl}3G%Z#GBfTC;PlrdSq%bM%RoR_Q6rF?eu;i&8ysJD|CCF&wXl z)|z~4ZdR5(79nvEaIKs~+9j+xbBa7i=znb*O5;)|qAw9Af4*aW0k1U3>|7y}9 zu&l-6487eF`dR3oL!S*j0bk9R$pMoCCI?Iom>e)UU~<6ZfXM-q111Md4wxJ;Iq)0e zz+$JzVuP2(>i?Smfa8E~t#gqB|73-q0*`y0y`FD6f_D6i$7->1pRs15_C+=d;r)^Q zBL&-sY$2=o3+LTp|3Jp}c6|Te8Tx@D{{L^NgJ!cP2TTr_956Xxa=_$($pMoCCI?Io zm>e)UU~<6Zz%6kA$0;z1|G&%fHx`Ka|0v}0+YtO!@SVX6!DWGO2i_656c`Bj{Gai^ z)PK~!-uDyVhkVcR4f@>PuX*3*t$8!tueeWgFX!&%dOS~gUgbIM>2m+f{eE}NU35EK zpLf03H3mTqzv_IEbKH5C<0;2G99JEC9gFSXw?A&L*w@>>XnT{bYU{Ip+xk9h!@3*p+M zNcXu!qR2OK=3X1KcgMGLmP;Nkv)TfX4IVBMcZ*CZ>NBdu*IJD$vvt5)sW$k@R=shh zG+S<+Q_|A2jk9!n@DNbU#a0u-a86avw&0R0`J#l+ZRIR|9?U9$r`^V*Q$EW>r|UH~ zyAoSCOWcY_M^Oz|_YpP;BdcuZaFMgzM}|bMAf`nwoyQkr$d#ap^8NgDsaY$}HqX`Z zHLBTqqXiN(S)aX9n$isO_CC(Cm*WzIhKqB2LKxO6-%K>CGqcltQTQm;W@ms;q)SSY z26{QmfR*c$StSjEEUZq$kuMeq_i_vr6LYo7G%w3K`BT~a<~)p)mD{iqvk=HHwyw&2p9k<+{j%W};r>i}0`1YJy0> zDXsG$OG?T%WjM>7Wa^2O3CW?c{I?J6mNaJ>qyj`lnaoo%oG>6I=oMTw(`jj2in9#S z8N;s33mn0Nf3{Jdn^h9JIjLZwy1J{Kg}DSU!>Sf_Q3RW_{AAO|=EhJDC^gDMafawh zE)Pf2YLj{rTj=I2TbL9i%CprX{By8r*twarjKOqUF3svTCJTHbV&g3FGqbHLvIvbG z7#JQdYyyP}ye9Id?uRr11JXBz=nZV-EQjd`Gw0dGYK!j?bPHQuWa6A9;}O*b8K~l= z22?7625(I2sElk3#*4NqjPC5*#B_DCD15M+N_SyBt{!Dk7dIw3cAcTS=(+_0Pg;^R zfxbG%64pK)vKu(dHae??o@!I8JE5XxcIVfFV5qwa3tdsntFAwPGIw&89F+^Iy@mF0 z3tpxmzWFLSt7W?EKAR}~5Ebs_N^j2S@dj7(d$_btC&ZzWK$_8($T8p{we#k^>(tj2xbOT_25NQFwNS@=0~r?rJetb;Y54G(DlB`_IoW~n>U%ITCudnpCl6LCSYtA$PPYTp zK7|yf*=&QqSmiG~BF632hP4x9J!!U;|*wR%@||E4sp1 zb1)TU!15K#faOt(tr+@?&|GM1$PxT(@KwR_;QGK%0`Co64eSf7_J73xuz$CI8SDW3 ziSHg?)cbGVk9eQs-Qx9fPjZiOQ(U*_-#kxvu6p|1KXSj@J?n;%cHMA2*R>mxAbi>R zD(A4{7mg38V0l2LN1I0=*f* za0|8R?t(#WdoP0F7OJbjRU^RkD(qVj40gBG)4dXRR;^iWY|X0Z52D_JG% zBuHY(-W5o_Dq*O}f6(ni7Lb3t!)Cdm7Zqm?v7c zlVRI_AK7nG>_}4bgfZx_PTKls=|W5CP>o&3h1S+xCxZd%vkd{m?6PgI(IrR)t{MoU zblLj_w$T~z_lQ#p^>r8awk{|yBsqLn-Idh#0w1DFR}Gk>0Y8pmoSUwy_aaqoum76fIzShh*e>{e`p&y zON}R}rpsfE@xCwJu1l4Fu&z7nX8(LTs=!h9Q*S2kw zx_a4YXO8OS!h}M*Q6|pAeYIuF;GV0VG0bCI%9cArw|FRf+E!MqhqdNRt^}Q=)2Yo- z(A$f(CGMq$r@A#g!_&6w9=t@b9%K-KoZq92?Wgft<=L=Ou~N46lCeibhh+09*g_^w zg&>AvUG^5TEqM|Qd9r6m`$+WyHk4Qk+LjZflikbP>BD^YF^y_UT>o+Ga(m*+&WUD# zwEyXozA1J>DT4jSVAj&1qypFG@`#$zcuUw8KT6#lkyEAgt(2I@CW$S3knRLPwYCYavlC$JJpjs!%DzgLIyhFgo?+_#QZkE1?yqh8 z-NYVH80&3n-DtbXR@hHBCZRp5*zsqFjY(T>NNcnfl{+}tc45u&{66|bK14g?tw$Z@_*-1u^Hf{L+{|U?YEukBscZJS| zwuhDlp9+33_|jk{*c%K6eh~OT;AMeopg*w8|F8a!`Csdw@$dBC?)$0lW4^1tao)oyku721X_=)o)&OdfmojV-AbbP__R>!j(M;)6THv8A?@3KE^KWR_dJ+^Pz{@V5e z+r74&&27D5eV6t5)_bj4s~ky1bM2vTL?N|obZT8yu%K%7R(0wMgkCS#;7QxMd)WK5q&-ZkyS;0h0xbxx;yev*Ujp0~+iKk3YjlQfjc)-+%0%q~MUYa_ z-4ZJ*kTM4P8tr~@V;?{oPR46g$Mx;*T{y!fEWO^8l_ViPugEl9=C} zy9LsynC4gs~P_o95 zvU^R}9SW2T{Ye2K5x4>+i#=2#BeAdolGQj>-r=s*ipCiuNQZj$?p3#-b|{urv6USN zqmV1RRsf{o2%jV)ykg^W0F>0acFUA37IeBSkKay^8cY|u0^A;7rT}HF(Z{<0CXPWb6eLE1)za~ToPMM8W$V^?b8R+`=f%8FODq&NCN}+8s+_>^&x=N zovp_Oe4%)dv@9@_$ssCb!C0U}({y47V*YtT`eVKhkaXVoHhKY4k`R^^E{&x(#&y6- znZ#}O0OWj)Bv0H8&~z1LGK;dnz1cMn#B{2=x|{%QAW_t)b9On50crb=-S($prrYB- zMTeP_f;MQo%Su3(b9zp%xPD^)|Cr_Tme5y2?+iUJ)E`gL^jZa;Yc?{&Svb=sA5+2C%#JDd+X$DA?8uZjPE z#c|MahyDNBKV^TTywj23WqpHn%-Uu76_Bgz=X8n zelxW6jkBU*W%;W(5-pt0u>U)bu#MC0TuESAD&fYIFoi9DIf%f@!JdU}0iMMTdN0B9 zu8l~rfrP%WhFb>yi#Wn|kWt4a#P5tF(H7F}xCHx-IKtKt#miVAWg7lGjzn`v7w>t> z{I_pH*dPK0LSWlY&SNEeLxO&MBf>@zOmzX8VUyjcQq*sYBWxH^l36k^MPc7Mh`>Br z@K{NB{w#(><4GWpvB`AfELmDD|0{;D`2?%5KIoJH`j!~NHWVl#0@`j#+_*qH^5z&4 zjVL`(i9c_OA#6rbtnKTdJVpMr8(}*N6f(i8vo9pG{!d~En^P1ugJ6k@H^z}@Pf>!E zJF{f?CD_MeNVKRZtY#lgruYqU1U8l7;7hXo`Zy9TCxVt?qRO)Ux){Q?lR?iUVSQ~3 zVe83gfS=qT4DcVv5jLKvs4`~Plk4dSzb1~b^+aJAVlBhII*zdUG%wg!Z9>?7GMtGr z=2ylMHlpT*`4w@5?I=t^CTcFODP8uIdc?qBz2q6(yL}IY}fQ=|M%X! z4!$ssuzf{w8#3bsF@&uvD)2yuW(!L0-t*%KTUQh*^(LNMI*oLAUXjf)3S`j0|f#?rpmyJ`fX!6P?eE2$Qn0YK0O9UrC5Y@7y2 z$>C&eNl4-|9lM~w${Khoy%)O9cOvuznGokUR+Y`O{9UQ}>h5#5AiZ?1p4)sDTI?hi zv?FI@QyscY=hjq=S3t64ol?MeRREGoFhi|Sf4LHy?0}MzIT@Q!KpKtAM7#`;)VXIl zKa%*C*Pj7UU6~ZOYR<$;il$lprrjyU@11Yc_wK&u7KWNOc+dLNMjaRv7NpbhQ=Q17 zTVkhTCl!5Vv6(h~68rxb@bmwVgx(UWhYp7_a01{zA?M!{!8Zn<9V`cT1vdn~AGjWP zIKT(?2I3G8;HUo2LqvdC|2F>`zt#7B-*w;Pz8Cr~_||*B@4fDQ*votOdgE{w;HTU> zx%1pG*TXIH{DbG6o|ky)p2MCy++Twj0e|E^=icw$><+qq?)nn!2)xF1*>%#j)%i>3 zH=Q4HzRCHJv*O(2j5)oIe|3D$@mG#lI9hNvpx3d={-phV_CK~?v7fSUv#+sRZQr+D zhnobvZLckE^I3mt{k-+v)>m53z>NZ{ct#+6!E51`2Am#`pk}b~aQ(Dsd0yy052Z%~ zPOB9^32e?lnuv9q*TJpuMhiY`^{MIdg)8LSbbYcsz3v`T{nXa2B7*bbDg!+oiE8Q` zlx`Oq*WC^fP>Kl8h0ih&QhLvZrx>Nu+Q zT{^EP!evG&U5Lv&&OqsL%HtWnQk|Qb2a``qOgqN)Lrc z7^PH%N5TggrBqA~h7T}GX}br)!v>|p;ky~7w7++U_cKaqyZggKj8ZzGhr;_9rL^w8 z@LqCI2d)6%5e9p1&jPxr=mg?BPaDYtiq2N*dXh*oez3w&2lm75_Mk$rs?cr?(rQ5<=8Ksowt>G<<(lg>%YzY?`rL;%I za37>7wkYjy&M2ki$HOs3DV-Xza2KPL%4%2mE`!p$ zc5Hyso9~y*nGNCf4E$7X*7x5Dr8mv^lEk(<2iF0D1&P)f#bjC+Udy183eVbblu=4& zRW!VYQA&HbCVU5@l#b0E;RvIYj!h&SW|Y!V4~JJXN-52&Q>&o#rqtG|@Ja)OEBjYK z=}ixnED0;Z%NgjYgf9=@&M2iCf;ba`CqC5=JSlyJW{= zD7|_4zef_##oJjirJB}L+QL`Oj%rW z>|q-NAr&6mb}N($GlrYD`+7}wU$Or`3779f-wS;_^f282zawM`eh*FnybW#u?14ij zzX*IS@czIX0w)7o19t>0{_j9EfH(P{~8; z@V>$OfcF$c2Urd90YAXKp1Yqr$rZU(o?m*t;rW2)^`84Z1-K9JVfUZ9pY1NW``v3? z|Lyv=>%*=`Tzg!1xm?a4IzQoj*x3(f0>0_^pyM&P6>!?I)e*M;+Wxn2E1+m!W&5S= z8@5Mn_t_5HQntm`Z-T$ygp&ao=*AZ<{Rn#q!gR7I+@B5}{f(L=?hzz=5#-4OcF*NU zfuD;Y*^iLRrRRU2jUd^Rkjtf~nV*Ru*_V*ZrE>}YxCdcxLRfF$2*FJ_a5ttE>y72p zX@ngQLE7}sB9PwIMQOAALj=kGh)kXI(DYLg1RjaFog$ad()yo_AlWC8%cb6ZB7$VE zL@t-=U*ChUU$UUy*Ok5hcm&DriQJGhOdpFN*+G%ZrAPXIA3?H+~@yF-lf4@Qve5Xt3|s5}ut zvP&eF&rnhMKm^H7kz6iKl=nvvc8i1^DY;yt?{6bW_JQPbX#}rD5cYz!^`-K^*@3Wo zBPdVc;XL>L6q>^Fz5#^YotqzqSxMVi-n#=~r|721sLLqc6G5`)By&QN^}miF*>{r5 zB~HIPg0T0b<&-3Ce-%NpTO`+)@c-`tgdL+>V#8mi5q6*EH4UU8d{+cvuS!e!IVxg* z5kc5#(w0ji`pyWFT_U+-Qu#X~NOp?ka!CaLJc4AmNG_Kc^Y#dm9V15h;}ImgMsm3{ zFmH<>>>O$7m-_$KG{UaW%`xrIb|Bd!TA1AbuN?^cL^mz7)3VIIC4yvsNbb6{5WP8q zWRFNLmt^uy5hVLWa=FC%KaC*SE0W74*8WKZ$$pVsF5!P;1j(L}TrSmrEP`a;NG_K| z;|&pny(8_=Nuu@o2$EePxxQ5Xx(LF~khZ>b;_J2B5%zUp2;jNK{73Q04Rmh&aRkZk zkVJr7{+bAq9U?~gt0PEuiR5zWbjqtD2%IAEfr4Cq7j6HQ5d?0Le)%gR2pl8*@<$^G z+#db%mq!pdKKkV^iy+ypk?EtSFDx&OAlb2z%O(E3B!XnuMlP57|KbP&=SIK%7ex@b zH~Qs|L=ZSQ`sFW-Alb!{+n4BnK?H%5qhJ5|5hS}ga(#*ZKZ+pP(UHp~{LkBou&V=> znjnBTVW-MM_S^`<9+5`Miu3xU4~L&(E!qPZaxsW9T=Id&ol?k_}(r!0`dp~%tO-0Zu_;@axih9K_05JA(sd{SDU2Ffvxv+gJ&i1WX|bPA491kwH%5DCmE!UYTfWCXaAq!566u z86`7?AgsR-TNn0ub?Ri?k|M#R@9MWmT}tX8074mqZdJRe<7l~Ty0))j2uY> zA^u@PLwHiCDly+68XU;35a?eBe~0I^aI#tvBF!kAkYhW7SpPy8U}%y=NTU--7@1B3 zl0y*fAGjohf)hflDWk#Y#C8YO0&)M<^;Dvu6AIXoLJ;^L^GFkJ563?AYOflJif;>o z2>&?_=ai6#M2P)Kf^QkCha`e9|8Y7csO;cR-C2=C5Y=CbDn_exj9KF-APDL&M4Xoi zolle_kUEIvZyd15ra8t(=f+M15&aL-xLHgQ>KL33W;O))2RRbvB@>Bp+7Fm+5bj^d zMnENT9%0`cX#}DDVP-4jkI0cxaA-##T2Cr51Ol%buy*20Q}DGttBhk8y7Oj&rMlT# zVmn$l&oGcKHi9@>OVio_yTov??4&)8Jt%sUu{Lq|ZpF1BDk|F%g!CUGnE~h+sV1!) zDAuk%!%Y#KKoH|!NU=i^P<3;wDRvzD5o80n={!d|J!M|X_LwbO=2|te@iLm-Zc7Xe++>0tkTxJvjzHcfHC?JI>-wQx$WK!7U2=WoE zrGbm_s-_!D50*1XMSu+ivI4e+#8m_#X3h$G0YPelg_vUY1cI~#LbeJzbVRfv5~}7% zuqey+9D-~Fl9Cr_oku`Adq09a1d!7zK_XFWO&D9-?M|^hMHuQG2(lB9h{=ku-$ljp zq7$D{fnpniGzCC}Dss4#kVR+lRn*^sAZdY?1X27*aL~?i$9EveU!a46L~ZPhVn2c; z22WGSM|%-LN`nO&N4uaBp>O7D;5M(-#xTuvTNG7GT>?_9d z1cJN=WveHn42|BdN{D>PxdQBr3$E>=I*K4W!U8mkQMh9JRs_irKzP-$+^IE>JFK{V z*G^LbL2d*|Nl?`}52b6z{=dyRVex(t-prTD0h0qJ2TTq;jU0I7m~FY`$lkpk&&zgl zfrnQF0)Y)1`aCI%HQ8I{lf8YF?(Af5t~;B|O?LNYrY5^96MWy)WU4aRmrbXW>1=m0 z*PYBnlgVN_TTJD8GKK6W_#>HYFUvj&D92_1RySp@Y z_t2rzvC)A;hXzO271y;IbNE8YzL5ii@K@ppc!$kItvtg+Dy}9J92ywk*Itkq7V?@V zc9kJ*R5L+L@@n%!qEf$9o358DkQnn^Ii1TVrY3THVPZ0$o|?=|_D-d9sT7}?$Y(2s zzFfL5pUq}d>As0{ekzrz^j7#uK3mC7@_ec{o0}-)dZ!ZLvK2LCC30EVp_w>z zrFE`e>zSgzB#r_Xs`Xkkk?BdL1QO<&jRd&>CjUv$kvCk}B;SO06-nfG{0rG$I3T_88N1@2a>AdZHb<<_d~<@TQLE9$jY&wnN5@k z)g?Nu6NTPfd7_-3Di<;^$&%TLWNusah+Q8#AB}biQ{_m76WXhjyB5j|^C;X59>$)gftCZmN>Zq_fGX?tEW?@6Lk4 z>F&!-fO_ri<WYEg1-uWHu$FCTyP&mcKBJ~V{rF>EU*dU|3B$}tN*J1 zfPby;7rsyX9)jrqJ>LKFe$o3@?^W+G+zI#(?o)6pV3r%?R(gI4x&L0}8TLfsrvInh zZ*sTX`yi^q&t2Eye!#42hij?x$Iz(xGC5#!z~q3*0eub(A_!5u&PBLiVsb;2aez(@|5O*kfr=qt8-XVW z@UZhha$ghBm>7QjhcSc$71QwR>JxATvP(P%XZe>H5+fB0NL2^D)$RWvj>K5S6m(iR z${-UU_3is{Bt|PHu!em2UL4_g#Wb$H=6tQ1hVRA^4p=-d*zasWIBcLh*t{RpR?UZA3VHG0nrE zy{DyS<~QQ#LifLQQzcb zCSQr6aRpxW{R7N~8&V*MFUQfCf?UQp$*@nx(dc|(zZ6G@6|f9hkpm-ru?HOjSjf#u z=1WN@>9CwG<_jCp2vpD$?i*)CRq**ZI;bFmF>W#upWBEI0IVT{r5FyM?MA}@+#!L6 zOz>yo=xzleEN)2gAO3Lz+7Gb0o#VyMeR>cLEqvCv=7>&LWoalRBLdmL{e-r zk3JPgdllVa2@6kk>nG!Aj{=s}WeN5Z-DnVCJF!!OzrF$O26$Z-(TrLj--LDnoRp%5 z75raL7WJ_>+S!3(Y1;gK91SR7S)7nyKe_?!FrxOnIG`UHME$ogpptNYcmvvQ)LvC= z??Z94P01=s~J`h8F0Bj(t z5)q1$ARnNe=K_~uuV<^)J>3IR^dp4pp0X4{fCI!Cv>y0R-0A{jG;_tfyt{+ zn+tt1^l0c-2rnk{81zSmaLR1g1yW}o zP2g*zV%P+AABo)PTpM-fP#G#}?wu>|9&6kJN^1t4!FM**4R*S4>@1)XiSM&PITWH; zG-v7;coG{0Ussm)#l)c)L-)ePQ%!sAk|7-SZ`R7Q5MZlSYVg8UcAL)BD4=^7_w@5|9fcS) zG@^kJ^Tu$JL+Gs|5PFNokP&JS<)$|b5TayU2qQ#PiB2R0ohTa#GAl_Ti-9Q)Zq%1m^8A6n;E3?(w*{06W6wv;8=v)?eb7+W2xuB|q zQ6UzC$52bbg1Zk!6Bb<6vZ-DIVKCcQKt~4cHCR=AY?TU9yGK`QjTS!#KK~!~{kU zPag49OwQl&;|Mw@1r{dxeiT7xjmn=IKvPhD(>Y_5H2g4vct#siH~%Gq zDn{i$h@eTM^6y8`gi-nTBB;zLmku0#H-gSE$|W9sXAdetxe!7ghT$esC_EKYCcFA} z1l`MMNMgjdBIq7QxkTtcN6=|TxkTtU)94hG-<**BQv{u4AeD&vMg*NOD*t)}9XBe! z5kbd{%D)ytN88Kg(>z~|pd&`*Ux}b`qw+6D&={lqES&*QM$jmuTsn;Lr3gCAD3>zO zd@+I!8I^xwK#}TBDLow$D<~6KeIEP&R_?nN_}_e)956Xxa=_$($pMoCCI?Io{628t z#bKv-k^U7$?#_po(u?$*HCISglD(O}?x{*X(+#0G)7=vj*-CdlHC33HDrYN|GEXnk zr!vtrgc`{d)0v)RR*Zw!UYPGmXBQBDR0z*_WONvU7@ljjW+BRCg1=mznT5!P5cf}r zoS8_aGg-JW{Cgj(lmyIFg%Ir=)l?H-v!`}(CUKEQF|FZfn-|l`o?MR@&6I}zG=rd0 zPW;{mLCr{^`o;&x#>dEI{=wbPe7rt-082L(>I~m1<7lVbgxNnfK6t1!Jg{?ccpI)N zo^*iuzOdM-bY$-+g!Mi3z`AM$2UMpqYw-8_Y#EN*@2bz$TI(Pk z!Bq&SK6wrzl{fer#5y;E%>yi6b!$`AvvUo;GF+bEA!7Oi>!yV-5IUWj4dl>Tx+r{r z6+kPIF!i_!rfVHUP0VM?*?bwICQfGfzEou@U75=DWhN$jC-_R=R5F{&B=hBLI$KGW z%X}tVNcLqb{6uddlYz)#>#knCde3H6?;r#_#M=SfgcHSRtsZU8O`eO2=bEFZ=OFJw zK~1LcTQN#3nJB)e4b@uFDPU10TCGLPwJXv3RJ3)Dhu@~B>k#(*Y?K~%guIqL(Sb@O zT2|I?)!}anawQ6XLWuYa(P}GNZbFGjP0#6Cv^$zOTA!YqfjkVu^~v(|ZX7|L1lT7h z!O3||>Pnm;#f0B|33M z6cb6Fh18soeo+vW(>0JhmQ#r9qu3_qD_f#OE~4cL z$o9ZjictxXP=2~Lba2l|F*-O6*9f3Tt?CS4j3#?B*;I3LbXTL=f{YQ<(L?3dIs9`n z*`zg6QP35W5Z#rsk%nubi&o zL=A(xi&4U+d+d*#u!43fyy~2FVAuH2$U&^0b`B37khm)zcP7e0Qd#9moPJ@K*n+6V zsyNZY$Xsi74s@50&7r$K*Gee!56C$OaDEE)L`js&v$H+tacUGDoh7KOXH;WtUMKVo zt3h7MdI8xkKzBiB@Xt8c19VXQVe`7VMiu^i+7d+YkP`#ath5rP8AuW$ST zcQ-FoXJOA_*ExRj0{H=Ei%1n+Q|P@55Hm>kfh)nF?yffAD+M8vPKxj-=#@b}o#}Fe zte7kiSorW{hlO}IxZ+i^5W_0cET6^v5@;6HMKMZZ&_yvj=3035lGusoFiu4hjlt3~ z6BR=5MVV7G9wkdd`-FSCIUQB=8U7Tx9Hk6Y@fgYl4ok&^zwoojUCr;Xr4fxacr5yu z73nA_URZx?t!5Mt?<`-5pRS#*!JZr@1b74ab0GEZ#N1iDjyHKe3R`m%bto3u-HZyn z1N%S(7grP*mKre|=+A=I#$zd2(=cq%?SZ zXq=YINpjL)`dq!oMxOtB-fMyX&6mjmlLICPOb(bFFgajy;J20oFCDVGEgSkD+ik^< zwDi0|DV%}i5#mQEV?wSyY$xftBvM5%n%LV;nOyHgC7*{Za})VVK9fo&vwV4SVv6T; z*>a&bQ!eoNe6G-!&lhrqWN$9TXOokubP7Py`Fy1hZ##iwq9&(RZ+p739n9PE^z>M(+?s>* zhwB7)9=JJGZNLbCi_jW}{e%P6>FFviPZd(Rd^Xby2Mba;0Go!Ke`BrsEE_1-+gAV} z%C6mWf`=nx%k-w99BwGfoYVs2MtcAb`|)a`phP~_FfN{X$o2qXCpDBYA#*)I9fmBo zTKvo`B%~AuQ?c8Fc?sDf=cb9HJ|7{3uE0hsSor744G;(TCSoVwpJ@@5X~mQekX@A# zaR&t?Q`~95Wkdk}=k^+IT~=WSPD%ddnQ0ig--)S)NySr*42d9WdJ_Szt81pAq7s>m z!qgd@txuj~%tq1Yuh)iZQ+2F2n=P1e{Mjo2D+~ekae;ZTPYB-VPPiz40p|iH?kIjX zK$Mus>siQ2%;UK&z++!E*@QhJX)vbv*5o-O0C@Z`XNX~L7H1(gLeb<~b40=D5g~Q^ z7^pHeCQ)@LC>FOog=x`LL#z8%<6AiCa61SLCY43J5U2rYi{h#=AdEN}Jf<3w6qaJB PseIre7IYu@|IYs(zXTD| literal 507904 zcmeF431AypdH6MwM$$-{dCsXgR+6!^iK9(ySw3TDvsuScvfg#QnTNYZ50xkdV&AgFj zMkB>ec7ZIPcQbD^@A}^FecyX?ykq8$iCkHc7mKBpbXo3mwYWIWb&D*!T;AVvxm?eK zf5zW9oOq2N;Buz(2B(ll_xTs`;2R(xr(u@JcgZ)&*T`4M=gGg4Pm+(jZ|AEr9+}W(%Q;h?klGk@`@f?KT0h9+0o+@NptjEc3#dE z%F2>bk|(ET<;l5;3Hi+Q`03H$`^4h9m6o=^<6f>V*Q1bPc7nIeM za$YHv0hUE+sFpPr&kBBQYI0_FdUSkpwn?OvJvp_?YhFG%HJuzkHL2MFSX#r3WAb$J zWO6z=IhLHM&r8!y;1;8ndm=fJ1UsKe>ZTUEG$!~XOL6Yfqq-T;JWO?$Sxy(0lvJUz zvY?bwtLbuPSyjsHw_PAL$STrkJ~omVvn5*@HQ6mf%tIB}nraqhBED}_k~ zRlPD(i@7`)i7gAw;BD7b!P)rj!LF2UlR|0gdl>1?P@~lzIGk7`>U0em2_#%q5XiiB~x5k%_|_*yu45> z=CxiAWKqlD*Pux%DS41St7(iwt5i|JK9!YpuG*UEVrtj(+4QQ>$my50UOAO7E}oextdDYPYY)lI zMW~er#IdzBc42B^>w{Xws)aPn%PiaElsq~%J3ciTdp_@vbaio;;+idk&Eyu-nR04H zDVK5?)&AROwPd^Z#(V16A(O9U>CIJIT~#vlhM_?$<)V_QI3-&G!znANOd6~twVZQE zf>lDw0s)#9YffhEvV;VDMm^RgVZFs@kdalRSfD1F9=m+RuX+Xo9lgIXWYKX!Gqjc%@EXw^6+SyQk91)N+F}zetIknZJq~c%X$c6#xC#T{E;I^ zxX14>8<(ze^r0Z9mZ_2JMr6P2w&cOAgsE#S@>SKD_SBLqGrmnQ(<>m$GB65L_0@$2;;3GyM2{!57b zpF6!_x8e0GPX#<-&iM6fC%1S+@r$lq;zh5tn|w-q4|ywjE%{6GljI?CKe?6kkf`(> z@y*gtOBbXMNPi%`K?;ihDLo*ak`mHq#Fvmw;zjY3KRQ5A5+r~GkN^@u0!RP}AOR$R z1dzawB7sd&PbUXo!0Gr27^Dh=lUsJ}^c>^t!(XanB0XCrPZwt&Zo;>DJV&?&pRajg zm#0)f3}j6fJ!6*9LIG*ok(+dG>Oq zaWs6cxOE4Jp#{qn^LZHe0fST2_H7ZGCXHND)JndbT7VDtkmhzslMX=8hOp!ci$%3v zdaaA+A}yYT8%7o^!H~)f8%?ETRo9n7#M8#HaTdG_K2Q6(a(dGi&pspcY8r+mxW2S) zv!~0rWQ-|;%bPcOI^D%ed9_kbEtQIuRT{JvWV3Ts`As6!03C)^9U`)o{F%!uw0R<& z#>uleOo`}qWQepqWd@&~uPmO=r%fVbG{?akD@Z&`Vjp7JzFj zFz}_cRt*>Nd-^zi*w<1iy}DfWAUyDAwM&PGh?~41bg5D(z?ip8u>!+%TgcNcpOD__ zquOB`3&gj$J^Q$ooT{d>N*PATse8c|S_D&#((nJG_&pc-06hEu6uFn&1ke0mmfk15 zQmRPPQWtpP9}++UNB{{S0VIF~kN^@u0!RP}T;~MhqTm`8Ts$8L9H+Bx?kbkfsqhS| z%xJS^N42L(?&ak>Kkw6yfxThD)lMV0-LvqlKdn87Qm-$k>GA9PxR-ivYYz#oJv0#K zo>kA3iVwomKlkz@AN?ChW5w0AlkgBu{r>tZdN@y9SeIZB z@q)!wd-0fo}2?Wd~dFp``@PBIl^SgLf z48dHCryfXA?+5a6@A~q4>VY0DLT|grk=#>1GeG@+?)5{SukCg_k$dWA_R$bOGvJPM z>&){N|3hQE;`{pe|G#pgkhvdPaKtM-l_weM9{R z;YUwTz2{(e-{6q$`AA2yaQ@D8N$nVUq~rMLJ;{mjncGt{w~wDm-8DUW=1g+BW2B>8 zswf?YI&PghorGWUIanQ9jnm~oVWN);9%n{pZ>{%;PvjO#uxNFBjIKqi#021=&khX_^bQa9_xC4yhZlMW z7ZZKi!&xPx^k@4sijp|oKd>+~aCkAkkSmPX#EN&XuE919kVS4q9f=oL%kkyniUO0f zE`UJlxl*yX5{Ic`IarT+Nde(hB@VNuvS}?XL-dm+DCGq@hAVz%t-M?;bT1lL;&%cC zI#Da$*PTe{B2?5;JP$h+5(zUPKKQ(0Xtmdf$ll=AUXItMC03+v1;6fefr zOewco2K%zM3Gr@|71+JjsOXxcHlIu}tHhUzj()WSp}k3^#!jZ@W|GsX+2q}`DcGrE z>csfuDQJRvE+$S6jPzWDcJE?O$D@Zjk{5I3F-Vs-Z@SxasACe08UoMEjlmWdGc=es zR+AYt_zZO?x_cla?ebU=*4{5)s%&W@3LJG#2`nTh(OLAoc1wi5r*?)d7tr8rycKTeaPGZf?YD($kI3+_dojRBONePlZw~P7QpXbdKmJgb1+BZ@wlbJS61U}eRrenjt?Ck zNH3%Z7t=$1(31A_FZ4k3l1TIo^(K^_p5Z}g66;bw)X_+uk&f=}^-KRKK!Z7@Fk4rK zodybWNm(tz+3G4>8Igk%&_meHdSo^mRF!G}p=5)jU@&VBYrzKlWPMtwDj%o|cu4N4 z`qt*g#;!U3y*+YVPTViwdj_OW-!E$l<2dG5hQ+NxXS(Pb*A=z9e6buHpFBA=BI`v^ zvhvjwpqnLf66URD<$>O6AweVLL;`kWP|L2n_h1LDdK=_b0hbYUua zrhJtYWT$j{x_ha-TDn~qlm%)~8&2OrUURx}wUilweSg+jTeT{6Ot;2COBd4Cc3*;9 z%FMZSLS9e08`T9WwL`jz?x8AQOzM`<&dWPgRrAzm;>e8^* zYGi$94)P|bo(1*35?Y!b85R(bi;G5^M4J)4byA^CSJm#|?6Oi)AsKmW!$) z(`_(vi#bSwuI?}ARA@1Z@A zLCTO9n_n&RJybRk04s}R)5~L2pctM ztrH{`qSA3_#~Rmc4c~pdj=Q#dl$0VK`#xMT^vdb!Dq9s^x*`2I~^S8?@IUg z40mM{hZXpEn1#=b10EW1Js(-!ou0XMeVx;v|36E||9=xk|9^>m_Q$+(vG_;;2_OL^ zfCP{L5gP!7q!9KurcvxwsXnp>8a_|ndzxh(=grq-bXreSvnmZrqnLM z&c52p8^zW1LltF=?mpWAi+4NeA1!kQ; z(vjCsVEQ?ot__oE^~tHdBibC*UYL+>M1oy+VKO;Hp>lP=AiBZ6bpK!)2GM2ul;K2n zu{XOoFxHgmSY)?9^^z{$*4EJZ1g~LOAeK5kV~tC;$=5&8Js_{0gB&S!^cWqDDX=nNBj z(}guUpO8)fT!c-Ni!cd!NzUdoHoaYNq^~TkTp67a3x(jC+Es? zS_K~_P4|5Txl4}USUm?5sRFHCvr)Z<%U;U*Y8F_(bx= zh^%3{-}B^uF_>w*|o-^+GOeT^y#_{c?F87w_-W4 zqO~b)e{e8yO;fvI0&d(qb+$HLmagt$HI#S?)_j2W30kBEZI<3-J*zF&p$@%lv`9*I z&5*>Q4rsfeF!VBAGy@9AE*$EplydO%8Cz+CZs`t-5|!gAx*CSQT9U>wRfdH-*a``8 zJ@>9Yx@gG{i`JnII#nFXUfsb>s~XcPDfc!~1DJl@MGM+BGus7=Il-Xqbapr~d|1;8 zFlT)QoHU*F4cp#V=tlBo_4&^SpXV1=SF}0w`jRv8vYP2umm%Y>F7;e)6_%|TqbmYv z7ocaQJLJK+(WgLYt%n3PgRYK7f=WG+E1805nyz`#T4>}4t54aYYs+a=o5=2nc~n_L8;=bPFX(yUCWBm2sxT&Om1Ls&opV6 zW^d*UKN)4&kumt|gsa{fU%1TZ+OEUmB=UihLRFu3W1lsU|V$aUXHh_5VH6W*50Ay%!w( zLjp(u2_OL^fCP{L5q1a?A>e+*d+8D3*$3+Q z7Jz>L=g5m(@IU?`0VIF~kN^@u0!RP}AOR$R1dsp{KmyM;feOdF#^;uo%jH$rqS%Dw;H6TL9`YOr?*HVuF+1QM`Oz`#QP;jVPgaJp+K1OF`?UKkwiU4*UC=`_UD z-1G#@0%)FI$D{Q7KSw_0g8%Uk2_OL^fCP{L5lmsj%}G@% zxoqm3vZh~b8JkXy&L-u#$#GbsKpvkwk-S@;cZ@JEPfZ$L^YQ^bke1wh?40C}3_yO{ z9P%sWYuS-?WT%I3kXtD~AG>r&^ha7-xyya!^gQx(WhF2k}I)krlj z;5aW+nr1(xaDHBm!o6=A%Yb;(>k+sCfrY2`*r$@&pXPZPy*^^VNyyoSTQ`5=uQ+9nun?GGRx`0l9DP^ zRu+^}YBgQXEUQYH{k99F23bWKt<^>nW42@~qb9pWho%xh9BBIy0&{NU7nY->9SJJ zttiz#0CWj<0jTrU+Xk!J)r8W0?K)`pS1Yi(c4^I_{eZRw#xv^cvCqp3#bVxQjnp#u zHE5DbN*<)oY8vCvDpgdlPr4X$wO`c5)UM~V=~bhV(=Thia*8f4WxbwRtynLtLawmJ z@=A)XrEc}IY(|kEMm9!xd28FkGsy`8ZHur<5d?GEdsoZ8w$a&r-C)HPaS*wHm@eY z3r^_*WVCw+-HKpy5PdcMRP{G+6TlXH;{rfaz*g)}W#HxqOs!7gZ2l?O-30U@ldojy z&6U0ce!ZF+vGi5r9g>AL)Qe>$l}Uq@q?U8^p2Mc;tP)Zd2+*`xgPFC<5)$wk^;nmL z^%kQ+MplhtftqM~?D7$hKeBf(_k?~E%%p+lH*GrgI#wYKbXB$I+yLqM!3}`6X1@b{ zBS51{U(A)%GQD%wdXbg-%E_m}U++a6eAP8*)L|P0=ymM%HLtbJ^**sPZeQf!LGG+( z2N1D>e-N^p^6>5SX}_Kfw)!KTo!m=jw3j#Zoq6?rp5;f> z`l80-_bj}g$t1TvUj?VsVx^Q(QpR;P1#ed!-mThRIHzDQ%GFBQ6irnW_#SP2 zt7X{d zR#}2C1NsOL_8pUcSjd9e(a#Kd@HBsNq&p|W=g4gR+v(+^S~h>0(-X6lg1&sEpFS4A zq|arG7wYWOY+II?ut~5jYW)>^MOzZ6ix0ya%!456l5b*Bu}lO$J?-T#d9<(X+7~lw zS8TkD=?mIUR;6r!gFU4F>D&~aW=da?!=Rhk%65OG4>H+Q%S8W}OUKjDK?tUJtt=nw z=r2IH(=hhZ01-SMayZIs zM|@>@Z83hp@>=Wn%F zBl|es&||x`F=-fXtLh~m$q69;1J@fO2EE^=ZnR2l~ujr zgPSDB#~r;5uMfGQ(IL$f*K%HGdbt99YSQjrjPY5}4#Vh$B^b8pTm*Kvzp*&wWf*U^ z!5pf0>dtbk!VRZsd{v6}ktGWV%)kF;LVx~;_5Z#NZvegk?*MQez#m2pW1dI=2_OL^fCP{L5F$B?0_+R^Agm`0zf+5_BWgEn=iFV$(%rv%dwTo35Oc(Av> z_h3)&NKeo4!92-aJYYHcs=<(#06ZL zb>-W%sEO|3o}q!hbz!Fa|MA4-A|E2ZL^7lwPVo;3AOR$R1dsp{Kmter2_OL^fCP}h zbB;it=Nz}eHz|C#Cw-+jrss3-VNa!5@P_X&JpG=PW)U4!9b5)E5LIOwt2_OL^fCP{L5@-ey3a!RzYh(wUI%K#*h!|la!gk|yyRgkT-6m``PPYnMjMFW`X5)0Tu*o>xB(xZ( zEdnu4i69xLk{}wVq7XJt!$Qb74GBTxG$;thDV!On0l{ya`URhH>JwmQh>ndHV77*S z>J?xbg?{Q0U{-^E3i}AMQ$duz>)I-P$^+m3KTrOF`~`VA`6KdMU;atL zi(CCJuNy=<@vYGdJxB!C2v01`j~ zNB{{S0VIF~kN^^RZW9m#Z*>vZn#=z`zVGp$;BWH2$kXFK%l(sU4aWAji|yg(hyFz9 z6>faSDERjJw6reIclaZno!k?TmD3A(rC2GiR?4ZRQn9kCT7K*wn@*0-Cgs`D;}c1F z-r_eeADGW&=jB|XtSl)dd2(u2o}8PQkk3qypB|k)E8m_xE0502PK{4ODyNf^vxnsQ zLV87+m&>qJrWO{ihQq=apK_<&~7GlvPu9kkui1Y-)05c6xMt za@HnYisqE1aZ<%aExu|_<>b_Ka{Sb!CgTB1xlEyA@^tcKaymH)t8nU~TN7jiV)E3a zd?GoK1W+?c9YXApc7NpHLGDtQM!2AqFBD7XQY%Wil*_2LpZlzoZN3}l9J@3t`y**Z@oYhJdhsYCQ?rOjW9 zvR>-h?T_r+$6em5fnCX|YAUOgVG&!^eBNP&Y4*A*j3p%vi{qM_1F)i#qjM9p@`28| z$=fHV?wah30q#t(QYc$3KXxg)%O9C*<1Wo%^!)yxW|VWpp{~2%@ANkG5^|Q$7S}sDr&1r`dn(Y2n#hY zWm3yZdNrly9)|v>+LdXRJ6}Oxu)EGvadiS|9K>5Ni~Z;ceY6yK@! z5SO=U0G86Fh4hkQ5U~B+Wkq1~y($LfwN+bBY^0hg=JQIXteE?D01|^+hsWt%!%Js0 ziD}bQRT;~Vs8w2v-&M(~`eOQx%TM|b_#*=Y+=~*L2rfboVV6Hc!64+gIgKrc!sBQOscoFOjD~$2~x?_=THkr z(`g>g$dMLfeIc8sX;g;M$~eQeTAKMT&MADghMOm9>n*MKTq&o7?Y*Wo3eySa_Ul-u zb_;#^sHTafN}&Liq4iX%<;Ov*?pgeq zQg6IJ<73f%gapuwuB&p~x zyS%Qp09%2GMD1d=H5O~9gIGITAv{;h+a#7}^9~2OA`Ru*Zjwvd2672CT&-Ihi?zi; ztk%tqX>D?ls->Y+#4MF$lS*tXRoFqQouP)X+?#_AVhIh!3Yf+6+r;ugHiENi@eX3O zdTDsaI^67WkjmXqD$XPouK$0XG)*Wb5Wt|3C6ogaRS~B!C2v01`j~NB{{S z0VIF~kN^_6W&-s2|4TyLwErJ@ihO|lCHYhGN91?OTgV&8Ysss~Pm>ps%j6MKCM#r- zq{!Xm4ssi;5pWY3Al;;s$fT8QCL!XL{zv+z^i}B#(!WUm2rCEtjr3mWUD7+G-;v%V z{i^hf($7gRm0ln{CS8;sk{*;Y(tXmLG%1~uZjpwiUg@B8gR~1)7a&qV;>7QW|0#Y& z{5SDa;>W}fihm{knfS-z+r{4!e_ebXtUB-t@x|if;`79cSP+-QdGV|`E#5Ak5RZz3 zVq82Rwu?K(En-;Y!~YxpR`}`g7sLM={zUjA;lB;PFZ}NCAB5i)eslQO!oL)LW%y;` zC&Q10FNBNXrEn^IS9mIXDts(F6pn{G!+XM!FbVrZ-w%B&^wrSkL!SR-yM8M@U6i&245F^Rq*A(Cxed#F9eIhrC=&}S8ytLDtIh76pRNu zgL{IJAPM?~?+f1&zAAiP__Xjb;RC{72=5l&A-q+1qwqT6Rl>`KCxyp^3qny?5>mol z!jy1II3^51rQ#nFKmter2_OL^@JtZkFFnY+#DI&xlw|S#yTIHu zbLW|xV(tUXy`Q=FG522P-oxCp%)OhrcQN-)=FTy9mbo*`oo4PG%ss>0DdtWx_cU`S zn0q^OZ)5H_b8lttDdwJJZj!ktm^;SYXz+{4Tr zV(uVw2bkN>+&<>^GB?589_GfG+s)iA<{o11LFV4b+!%8YFt?Mr`I(FLOJX z+s<5>xqFzqo4LD~8)a@Ab9XYgmAN~Z8)5Et=5Ax|R_1PD?q=q0Vr~m_33Dapip&i& zH^kf^a|PxGnCoY*kGZh*m}V!i_n79w=3|-*yN_uuY(J*CF5P`hcb>01kLu3zbmtM> zd02PWbmyY(T+p5Kx>M1evhJw5Q_`J>bZ1p}in>$KofX~5>&`jdVV2Geo!L1vb7tkt z$eE2Z6K59A44m0FGjC?y%($6tGt*|4%?z8_H8X2w)y$}wO*4~b7R?Nr*)ua|X3fl) znJqI@W|qthnb|QjV`jz7h?xyD6J{3744Bz3Ghb%C%y^mYGSg+2%M6#iNSTc?6J-|443ya?Gf!rn%s82C zGSg(1$qbX(B{NHAmCPuaO)`^Y7Rd~f*&{PYW{u1knJqF?WR}Pbk=Y?LLuQ4{2$>Bs z6J!>s8z6rFeH*fCP{L59lFyR&lRtzN|9*+Qggi!8NrudlVRC@9kuU_nKO}$zkN^@u0!RP} zAOR$R1dsp{KmyklfgP~IUMwPH)1?c!LYJB@WEYATCyJSLUW7&Wc8Nl+P*zHXbUp-& z??pteTr8*aZURg2MIu6;hSLJK!eV@}2;mhjhM2fbej&xenPZwUKf7rdVe6+&l166|RArr=A0E3kK6Lik_d zpM^gY{*UkiVOh9MI3!4crvo1jye@DlkO?FModLoBW&hv$-|BybU-i%VhkW1kz1{co zzKgzld^hpm;s1hv6aNx^g`eUR{1)#wydU%aq4yWPk9t$yQSV+a@AGrg^zwZ8s`}f_ic0cUC&wVrZJ?>N7tGFz8lJmMgXT}lD1zmxyA|D8}afM1g&;88u zQsGN;lebS!-8GrJWjW{)?iP8DYvbk+#!~X63sR#clMUdHI_RQU2rJ(7g+8K(R35vbadXq z*I+guO*!})DW{?jIQSZIABf)H#P|N_eGa}x%J)U@ZQ^@x^qwZZ_e9S+_!?53J$5(v z-f3u=x{}W4H>6~DNAGe--Vpq*=$#I}M&@@$=b9+OTy(Zc+}Y@igRhbAOmw=5?{xHz zCi&hGJ<}xandp>*uTi3@=wuV$$>?bZUqj~8(TOHfO+;^Zh->6~d-S#@zPClk9ej;^ z$D_A8_!=qS8a>s-_f+(xgRhbD$wU%-H`K%=Z#e-`uBFvG5gltH(pdDkgRfE8uYd2a9_o zI?}{10iaCFGQcberp6di2hI~W~s@HJ!}i1s(}?T_|3_!=_zMSC55 z4XJt)3GiK)UK>#o(H@5cjqG}&aR=WS1|c5pcJMXgc1OFK_;y7PHSs+ZJ=nzeVDv@@ zUjy%r(O47TSoA;>-viOkCcd4C{ouPUyauBE(R~gH8u0f;Z)oCsLv*i$uTjjs(GCY+ z1Aa%e-NDz$w>>F??^WF|O`GnQtP5K(|J>IQSYhwB?q~;Jb1CsC68hqnnzfzbV?%#JA-b0pAU?tYuE3l0))_a!SX< zYfD~Ch9QA#R>p8NcJMWb!4qWFSI{4R;nDb^OH*)M&51$P<>Nl*3Y7dW^8KvmU%AiG zed+aQXYNzxXJ|aX{s{QGeVWjZuXXt&U0vLh`^xEsyi!)waw=2I=ao!3S1hRZ-@Rkg z$2gIi`N=1eCr9TdX5|B&lT)dg z+0og#na&u*P|Nh~$I_Z9EL_c`Gs{XgrM;WUgWp22m^boZekEn4SXS(w*$N~F08%TE zd$lxB5=him=#=(bt*or1OOS;%@YvMk%TV{fS4!+ByASyz`}T1!-lNxjy0oN} zQ>*D*3AoqJ6Sg`xc|BA0UB#VqB(7F68AXM%RO_#%2aD;vs!$DJBA?3@KmpClq}NaG zVa0U07Qd6-;cHryUVs;Ssgjabiv?5YTIv)I*hgV8x5PBWUgL%wEab{67!@dzJ(xu? zOhi+x7)BmSR<-A6(~MeVTR@vy=;2K1Hc^d3@|ES&N(O41Db~u^OtfmK26D0Vnn)G9 zq#pD~VlnRVcC*5+DCJTvW2h|q)#IqN%=4<$R<9Tp8k2NZF}HE7RnYj>Dr;Rq=)2xj zu8_%Bvbn-iDy^bfzmZ>LshFy#vXruOz(^0)`Bw4s1}p`?rKm-t0L7JaF#Zb?@jdLy%#%Y#!r>$53L)#IwHA)AA*Gi7TpK{K>o$t#7W^0K`S z<_qZ+Tfo(lvY5MQwJ#bf3yM-s6&Llw=*PO`%!I+>^0|W2Af(NRQn{5?=^(?G4RLA9u9wWd;yky!hlhT*KLSs$!P z|6}VFYeJdvt2X{tvCJW@0j+YG!!r52+3O?l%AwWN?TjHZboEXY*~5;J}OKd*iNA0VRi zIq6-}tE5%wl(bj;p7;^*x5O8S_lSd{82-2LJHuG1y0e}_I8dP`_IbSxAJel7U7 z!8ZgS4bBC-gFg87{~Dnr+$yvO{yXrYz?%b?17`yX|G)d+=YO65f`8KA;rov7!@f8B zF8l8G_4oq(zwqzmf1ZDkALU!&d;iwbwl z<38fv%zcr2H}`7pA?{XAc75CRLF-fb{`-Bdtq1uw`l91OxQ)v#l;D9)E>-f1{?VR2 z2<*SFc{t*$}XEZk+-5 z&qBZ^sEq{o&(y+|!MIAr2bE0OR1%{~rq>tl_#Hmi&BJ_K!=V%@sCZh!ta4seO7N1w zA+IyFyqwgeD*IIPaK`t{1CtQ0X+0Pr_n&T_qLFjQ1cckiw=zYh_cx|uoKBt?pF6$p zb_jE2UFy6If;H34s%+!ylWqU4>kD`A6oeaK+Ic})pvswAs1&kh!!R=6cM<}#!q7V` z?Y7obWTqv@lUHB+MphjsAge2|-9J_I0nHQmE#VkBR6aEHEu}m&>E8cMj{sYW69?}NsRUBwzX$vq4w{t<=9L+I(I?1b=6Yz>TAi+oQ}3D7q4??O(4hnP@~EZ zwnD%w_02}kH|~JIS7-x`u=^sK1dThKGg^mpeEZb{G0L!i8)Vhw4%857|JL<|>)fJ= zu01?B_T@%`9h)IwBSm8!?~R)v%zBx%K-k7Naz?iMh$i2a(C(KY7{k&`dH0D|NKU^s z=mPgN{Dk#+8h;@HB!C2v01`j~NB{{S0VIF~kN^@u0zc9O{IFbL zfa6+Pcvvbh5D4(FKp=HsH9+dXDuC31B6!Htv>+GY`~Nq|)6z?&d!$bB%i^2GW%01c zhu<2$5Ke}^5c-|a#n3GwA^6w97sD+7uM2M%9v5y7{7>MGfpdW#|9|`6-r^V^>1dk(t426OhG=N@!>xc76f;BMza zuJ>O%$zw2QbX*W(Z7zol*mDgb0<+74FXZ^a!zAN5x%gr#Y z^t2%CYpo^F#Xee&EL3uN7^AgPJf~T7RBZ@zg5W|j^GgoAo9*ynsTUMwgh@Gn(XZQrVx_(zWp!KBk#!~%@7?|x7nd62&1 z);}BB-o&$rH{Fzg`KYieLtM}Cd_~cu26XznL(_LV7Ujs16EI2jj;fF}iayW+M!m#U z7=x5oM$YHTN_Vr2M@A0AWY*zoDu%?Y$nX)rSa243cz75lx*oB|jH~4}7zsF{JI+xL zA3hATV23z^FUvH(3S%bJ5$))920S!$2h5O7b3&ia$r>}Ian@8#J5vhf(%Nbf-XOXi zzV$MWgM-~Lt#)rf=&GV;ie?+1!NLy=+zbo^QCZlELm{e3VMHxV#w#2{W) z^?I_t8D*LF_1y?FboW#ZYeauB3V2Ol8Clrg-a(kJ+bIacdVw?np+4v^VTe#aCWFOG zB>G{}E(pf7zE&_gu&}H;*JV#nFHGi*aRxr4IMnsnUwX2f;_+da;#*UlT255op@nEn zue*B~=Kmh8qeru&rPFa#PqdG;EZ45CVVE3T%autQU#%?UbD0s{aW3ScLlZD_I9)|k zjjAcEN$vDiMO5XXic-iZbozpM$O04O;K741)407_2C!VsP#vYd@y1b@cznvzGSr#5 zZtZnLk63=OSQpGr-dD{}R|wiPIOcfZKo3k;hPH?qgdSXPd!0?9v-4({!aTqk8prUA zXqBQVo~Nni?caYG<}){H59mOBwLyEZZ{O`O*EwCa3r*H)e?i9{!H`}$_$XB!_@WK< zaxC}_Hw?nm=mDYL0F2(tSv&Ub9fC>IwN_tO=XI!6N5=rnr%u@0L1x^gjN3WC_V!_z zXg$gqouq-=-aXRjw*FC;M__XGF;0l-{i{`xtIemUdZrnW-nZ`AGYiwIA6mB&6w<4( zcn_W8Lx(*#GKSr|kHK_oXijU*Z+$n`O!;>08iN_zqt@oGssbhr02&S3zGyTKbGW;z zNcAc-Zd@Edwzb^^v$c=GjfSa_t#;WkL9U>#!tg2kS2$*oJ9i$1+1$|U4wzzBW!A@L z)iky?REtG#ZM_Sodlv&nFKE+xdwfmfQ?<^pabLJYn<)wOM5wb)_2{Z8+SHcgKXx!VLDSX$RC| zo;JmfouVWqVCH*QRrP46Sd+w25K-)gx$Y2!JuJ{D^zougd%mb|{KDZ|U{X7DrRMrJ ztWJMOxPsk=LdRg@`!zh@1cM_mHy)aex(6hSfjK@K2|@y9&L4E>o**ui);!9ZH536D z@(qM+O40OA)9>$tIrh*JcA1{QG&uB2yxaakUhf)wzEd!*{;aiF2H6od*tFK5HSU6W zeiUZlk6WuquNwLYr9D7ip^m)XL71iwx3TO|s_x;)Pp{@i z;GgqTiQ9b?X7}rTm>|UCj*ntgGxZ*5AqUUd>ZK!aTzL!*`CI7s|Mp;yi~K!#Jy|A4 zh)?>Y^xM*lq^xvI+9`fRe2@52aZWrSdcvOy|6%x2cp}^xc8A^17o?kDN%VFP_tuhi|E(-f?xD#JXvuPolBKWr8zwkdOEA4>&%|z;UKgaN zmEI83`tIJ6R-fF{GXnAr40R9mnG*B<((c{2@2aI{l8&ZUd&%E0wZX$R(N65zH5;Xg z9kq+*{IRZKqW#0QL|yNWMyJ{gfKxDgt3kcg0W=l5f50T>*|xT`J86RBnwX{pYad%I zqb{|te~pCg9~!C&JGXP^ovk!!sza8_u@0iGeXwklY~Qe{nz)75*0~)t-KxqPmEPbB zfBYnf&iG)cvy@ee+Zi~=lsV<#1qpgvkAB!AP({;&|*lHv)vE4|^QuJC{&8ldi z$5chW-`&3bzHKzM({_+rY7M_!Hk9;mqE<_;H*eeaz*d_2Ok)k8vg+eXEuT0WWzaut z(%ID3t+#D4SX?XDEn6lw8);d%UX{IX-hBI}T52ZUzZ!erv}v}5CicwQdrQj$gr@pz zRSqF%C7S-TVE&SHpGcE_cC=O$C&D!Kx|aW{EG!(J2t9MDg+jLljnuAHjPGt$TTeE0D*!DmMkd4ASQQ-5X+-|KzA zLsNaW4Bz89>!#^H3x@A@&u}#9V|sTb2+vIaEc*Y~73B80!oLxGI(Vz_mOznzRlw(Y znP-^W=l)k$8veVszjH@?k;l$*F4(Y3xu`yrr&FBLl`>3?hTkceq)+!>Gd2=oKebKU zw4GpbY8f_tN+s48ZuE&why9VUG49Ejwmpn37R;$CXp?{{g>na(FhhOeVE)YD-TB15~L$oBgqb93Aa51JLBu3cQosVYo0FT>tSDwD5im=Q;h(XsLcw~-qEe-f0)NR^HHy76EqML>~a(Sdr+uQL8 zw)tOfh3<%G?CogzHEh*Y*elNzlyR=pzNTByW>zcB_U*es_o8jGn_;?;&abIC*uiEw zt-?w?jrToNE-IOdc4KqJpt{wnsah0$xfF*D6=RoLd;O8lPVUk}`sQ~!k<2nIO$4_| z8Xo<}VMlbj-&K*L8>m@#yrZQzU5Q=Vm+(j8aqiNZS+)guZakMQUVtrzb@^%+h8$(A z`9Di?K0eT+Dc}oCJ0VxM(be9_RDTWHB&4rvs-9OR-o`gPE*Hz`ytWuXrU-Yfrsh`P zj}-1!U{br2Pr3}dDz4JCpPYQv<#Z|Az)#P!P+3W7wrpBiPZ2W>JJ=+^8dgi9CFr2@2Z*4-iXI&j|HS`){pXuUpGa``+oQLyky zrhY%l`tVknEdEB_K%YdDtg9l&z|Ns+TaD}Mcj)!dAjxVanH$AiVXjzF zb?Sa~3k%+G7|JbMP0?qp(K*$qv}RN3ySA88SV!?THxBNkgPR%$chbQYQ}Ffc;4Q=y z?W!mokxVIUR3(p!rpOzw!>+I?x~&ErG^Q)GvEaeR!JX7sXdK+BeglnzJJj!x-xPc! z#+&e&BCfBMIm(-&Y`j(myr$?IP(_a^+SRF|+mymaRdJ9rMLt$-53bnAY0v-CH(lgA zE|cex^JJ3TN{*A8 z$ROzjgj`4$it0$;;a)mvv!}-^dtIXF-J>0MYsX#MF{&NgwBt_g*h-K4cQj2m(bvz? zjc9SUYsYQcajSORq8&GD$4%O?MLQDhC}~GgJBGDmNIM3#qo5rF+R?8aecF-Nj$ZBP z(T;BI$k8Kx{%?`q<04-rpCW%t{+PU(yp*V9o=lOONf&7&LFwONOu&17#3~TQK>|ns z2_OL^fCP{L5wK~=6E|~B|JrdpUz^@W`uHLf=wwtc3>Y-cU5xad;Wi?++XZ0%t9=G>w zf*qDC3cCPL-g`DJD&@>Fy8sX2dp516ODYSo37*I8^1;J-1Nq>A-BCVxai9`-j<%5TQi+uF?pC@l~k*CQ&lMj_Wq@C=9Wbh9OAOR$R1dsp{Kmter z2_OL^fCP}h^+JH(>)|+l7vDjhy?i@$I(V5n?ff3<$oy{V?BRD&XEz@O$Gd}XqfUh1 zNuBL{D|Mp$4(hb=5$f#Zw^OH;-v*9n6Tg)@E&LYh5Pmatw(*;&vz2e5&K90fXEQH> z;|}s7bp$?4od6%Aj>HG4Bk}@u!hC=_A+O&faDJ~3jy^9BM;=zG5jZ%&(c^W)(e2?p zf)IpS*(-gGKL7t0`FHX;@@evM@*(mj@+;&u@DAWl$sdxpk%vf*q~Q&~i^wIiMutg( z#Nhe=4p=X6RQeh``L~iyL?E2>-`7hOqnt~cWJJ7M^twsZMwUCr|w2ub+>JY?(U4}ZtHf< z^=#UvyDeLFmu%79ZJTv>>n7dZ(xSVYiRQY4lI{wk?gqlTD}{7d4C-!J(A`kL=M^~4 z@7ElkPjh%)bG%;7@pv@H?e=Y6=c(i4z0`rD!}ouY{Ic==|D)vJVEq5zl6R8dgOUI5C2s&P{6hjr z00|%gB!C2v01`j~NB{{S0VIF~o=pPUH+^2zzy0%J{oB7csDJzS3i`KyZ$SU{?+pdC z@E*Tk9}D31>8_90UB1!x{}%E}z&|8_ z1dsp{Kmter2_OL^fCP{L5oj^feX*qay(%fCP{L5PM z;NJx%{r|#u`d8hTyzdjgCH{Kon0rU~Pr`rVkAMtM_-^q>jvnQn+*3|3Clfm)k4;U^%ubJvPtGE!sS$tD@bICMrX!GPbAkVmK};! zBAc&O7?UTJV`!Q`X{fXY1ZHy z(l~3>Ytp*2clmOAE(sHYohYh7z) z%Q{g9)0X$iCr;kvkIc<+PZl*TO_$2K#dM}zRsM2uHI-M+L!)STsm4jOFk@D2T~8FM zv322S-8;0Ai@7}5p>Ck-Qdg&hp^>zwW7E~TSPnF*T&g(g^|daYI+d`8SZ{-AkC$?i z&|XSgdv5o(HlJqSW}3Qmv{}2lE;$qH33Vd0&lR9I59f~fB9EPIWE!BuwEI>oD=X>J zT8jP5q%+G(N-aa*u9j)ro>*VF(I*}q@kh>{Hn}rB|n9_13O~u?K8y^s%{NUnDoSPGv7u z3I(ME_ZF2xxlY5)p2xt9c1Fg=xF=(FwSFRdTE4S;u|RdhUwGt;)WhD3pp* zIA;Z(XLoCw@q?YOt9NLSJSe+EYBnWzid%Qb=oE;`Z@qD75X!3T4uKl$jaA@hYq>WgHKHqz_*yBGaw4DP_|?tFBgTk(0$^4$8|f1YdbTp6SL z|MBEgF7h4t{{IzN{r^++rUCzu01`j~NB{{S0VIF~kN^@u0!RP}Ac5T03ILxZA0rF&<0n(N-QMR!{^>n_=(yOO>Qpj*_p z0d$A;Z2;X{^lbp$o5S#^&vAi}=J7S*4ls+Q;o%EN| zpG)tQ-XXnB`c3H#((9zxNIxsRRC-eS3299#N#~@jbiZ_`G$q|CjY=a@pL9stC+(IZ zQi~)=Zt=U~e~JGteop+f_;K+=;`_xv7ym^3J@L22H;S(pUnBmE_!996@lo--SQMAV zw0Ms=BTk4(@n&&I>=9#Phu9`=6-Ch({(kt|;je|i6#h*3pTZvv|6TZf;dh4L9{$bn zuZCX}{^{_O;m5+~!zm*B-9&&MhlkoS#`-MLh{y=!E@c-NU68N@?bKj9H9qm$B9f$;q6WeiOTbAWbb~Z0c zFmY_hcJ@V)rDI!(Ejf~$#DqW(VW*U`Lkp#pmbH}9QVOM%LQ7d%TA&mPEl{A)QeNL{ z`|f-9_V)GO_syI|I?_4EvI8xllOJe4pZR9{&o}$b_uX!}*>a8LGRt|EY0IQ#)UwO6 z(b8k-w6t3MmL-}37bCPKYNNu8k01X1G>D+{$o4LVS_oS_tuZhHECo4;ZeA z5ce@$BO&f(xCTPp$#4Ne+`@46g!lr(`3dn0hN~mQw;9eyh;K4nEg`rxTS=+nc=DkaTCKWA;b*~S4oJkFkA&8o@cmnLj0KF78BwLhFe642N|x6 z5ce<~95_Pd-pFuph+k@c?$tHtxsH0pk>QBE}4M))>>+ zImej7&VJ)Q?5s9UV&^Pl5<3Tt3GA#hPGG0QIF6m9aW8gQBXlY{{Z=D%DLVZYBXlS_ z{g4s56PDL=Ourp?aEY=jO#r|&kbg2U)lhL!N!XjlQi^9;-3*Jo&h--U)& z__Y~=@M|@+!0$>!GyI-vXo6pTNh3brZU8c12*7WZp&ouuHu&N9L_;0?E;sn#x7tt( zzo!{$;P(_mHT*Uhmceg@VJZBsFjT>BnPCb1mKrMI_as9F{4O$-!|&;a#qfK)VG;bg z3}x`^Ff4>$#^8nDpy71*U1~TDeis-#@LO+i!>`}40Dk8iPK95S;S~5?W|$AZ#~DiD z*J(Hze$9rH;MZt45q`@JC%~`Qa6J6B8jgeCX2U%AZ8Esvx7Of<-^B(8{8|il_-!G_ z|4Yd6zndKYpFocPJ>>YmjU4~ifz>~Yu7;fdSCaF8J30S1kn{f%eE$Cj^8)0&!g(># z0hn-|>RRU-b=~evJICP`zzMG1F1vHHYpcuXdct|P^H0v(obS83oIi4ME}!cW*E7z? zod4xI+sQiLcK*BT7T1*XCD0=Hfb%BU3m9-d;(8wT0e}a=dbsXoo(!Rm5)$wQhjgGqiUT-*;FX&p7>#OB`=G_BnzM zzvET=Th3D)x55s<3y#N~$Jy_QQ#yUTWqy}|ab?N|1t zwym}oY@F>WSm!@#J7AlzebaWW-D^MDcA5PQ`%1gruD9N2{Xf>jR+sfF*3VnlS=+5f z%inAs%QKcgTOT(6(Q>`D*|OcT!TNz^g|)_Vxpk4nW;?-}w#K32s$d5oYT(_L6Cp^@HB0v$K z2v7tl0_q4@3}7S}R>k;yfKczD_XX-*^m~E&E&826y^Y=zs9&LX1?rdRw*vJ(dPkt% zL~je!8|W>8`ZfBEK)r_E6sT9x8v^wTdR?GiN52-RKcHU;)bG%10`(4hRiNHNzZ9t7 zpjQY5D`oVuK;4dhAy7X=FA3BG=;s3Ub@ZY@y?|a2sF%_60`)EQGl6;p{Zyd7ik=gw z@1maw)ZOUE0(BR9R-j%&&j{4B=xKra5qe6XzK?z+P+vn&3e+9whXVC8^aFwV33@`H z9z~A})I;d|0(BdDOrU;>z9&$Rp+^PkN%UQT`Z9V%pl(Iq5vVVshXv~M=pljn0s6K; z-G?3&sC&`31nN%ofI!`Xz9~>&K;ICkU!eO1>f7k+0`*OFpFn*BeNCV~hwc@qpQEn| z)c4Rm0(CRGTcB=2cL~%D=uUz93c5p}o=3L})Q{0u1nLR&Wr2DS-6l}?pf3s3jp$Z^ zdKi6CpdLrJ2-N-P3j*~ebhALch;9<7=g{W`>KXJofqELoqpJk!v*=2JIuCtTpg44eKwW`8BT%14mkZRT=+gpqF}h5k zE2xl-NceO;x=^65Ll+3tRp@+yx)L1`s0-0~0(As&0(C9= z6hx=jm7r_R<u%DX#8c(Whv;-6ZiU37`B0v$K2v7tl z0u%v?07ZZzKoQVFfS&(rp+o&B0u%v?07ZZzKoOt_Py{Ff6ak6=MSvpk$v}Xf|9>*- zk(PoYKoOt_Py{Ff6ak6=MSvne5ugZA1SkSp2slhXKvr}uay(>SW%{vky=jmBal@a` zwfc;6q3r|P`;HfFH`#8|ih=iAx1K2t2X!c(h_MIL`^MuLHXNDCB+1|52{x08N7La< zBsI!rq(A-2-!~rFai*y>7}Q-jn28LJvm)07JCRIHi=maj4s;HL+J{2kq4tiRkaxE- z({Aq)$tND$?Tsfg>?oV^_Vx{VdpGvIne~ zo$Z62?OmZF6D}xme){0KN?r{}vLgRJcz*R6rqb?i-9@6T3zi=sulP8>A59hK z3!G-I@^e|3Ed>s_=;bkJ$lmsDrZm~D-cM=f6l74#f5Z8wt}&H{Lb{8ZNx#J&WTTmQ zG7*j^Mv~zan@&!pqHK674HBv4*DIGpEmmAoxxb1K%2O{&1`FZ^5iE=*PXgtE^4R5` zh4ozEFRx{pcUrxY<sbuLQs#&y(C=BU4qI*D|GhLu!(znM=QxZ=dyL zWX6MVDr73gbfE-e?voktg(K$ zE)ry$>YG}cni`sefndxZiv%0$@;h<$_zI*~m9^N$r5ZsvzuftMV{GMvj{-+tTa!bn zp2sSL^lu;9sMZpOr{d$W@MtPBIVMgF!tDOY_*5jr#=@9jp%E0rbtXcF$4fVE)Lp!c z$bT_mis9)dQon9PG4rHMd?KD0l?o!wljPsG9vL|@(rYShYtvmAmCdRrBhkIGDH1vE^Tflx0TFRD{UL1I$5U3L{Vv zm^?q1SkR&0g3=cfFeKC<4Wu z|2uXdtHGi-{>549aM)_CZ<#N!ewt}FHyA7>x9hHT?9eUNEj~sO_>i(ax|1nAv{8rR zi5Pn@y>C38VZ)KBOp^Q^o{U8IMn>821e;04qiN~y@nj?>ee@T@HDdgW{%oA*^PSsF zrJbF+ixy=f!{e+N^YB!BJQf~JMJC6@h&8;Todcovp^$f|y`v}O-L1j7+q*>aj>mR; zfjXgcp-rWjGhm~3T2fEj{4{Y_W3vHE0s~$^LwYRgccW`K+y}Nfv zGpAy)N!n}r214Cydka&l@(zU7ga$&rouNS~I$TJ|ah12P*V`592>~*l?Sq}|U7_Mr zS3^Ob6BRCbt`wNmb66GG%9Kv^sY#$%wXx|$WFj67vj^GeR3@HGNO1XQWEnm(vdL81 z)~35KDwDYcn>mn7?G&!; z)N4*zw(?WX|GO{;3LNsB%94}MR)px}2bqKHC`F+nJO!*`0SoIc$46dgbCjNf zqzgV2=0S-?o|$|O4GHSo$dpD`%p^hi5oTp5-)GnPh5=J)d%NzU6J#`$y7Rfb5R zXdW5qRhkXvl~9a1@AIa@_;MFUIZ`1;fa_FY)sYwP9E4pV1{G<~=giI8M>`wLPdWeZ zLV;J{kV||XgNDdc`+pzOCIO9xB0v$K2v7tl0u%v?07ZZzKoOt_Py{Ff|2zcf{r`U+ z_B3UR07ZZzKoOt_Py{Ff6ak6=MSvne5ugbCBM>O={y*fz=z0^m{^t5O*B@Q)0ZIQ+ z1SkR&0g3=cfFeK~n4 zEM5N}OI1emN)ez4Py{Ff6ak6=MSvne5ugZA1SkR&fmtJf_5aiL|Eyu70Vo0#0g3=c zfFeKzh-!2*3r=b6oHQk0_W>?n@WR0-9?^EWO$s-#3$m3(eQAJ zjqHsj41kL~ux6B%}tO?i9!hP=HS zdwQy+7)X*~WuE?l?)B{hTfOT-TU8=fh^MOB+u7GUI5g1S-8&>DlUK%YLH=s z(JxQ46QZk~eea{4+~W9C}(a?rleRGNp$$swFlViSCVzvf&9flZr>v zVe#+rWF#hi^ow7$xJJ$&+hHo*xKVd;fHZ+(!W6^9%9}wkL2yIJ=Tm72BAcwd%DnPU zSPUpa0%4#{h+p%g%WEDYn;2O9TNX_GC^F$($@@VC4!JbuF%;V?=LJ(13Xw^cXZ1Yh zvngjZmrFgjRU5Z6rSVQRnHB*Xo`OL)^BD?BW*LuYl6)jNT&7{_KA5OyQ%%ZkI8bCW`iO?5ugZA z1SkR&0g3=cfFeKM+h#Bnj%0Epa@U|C;}7#iU37`B0v$K2v7tl z0v{6u@cI8d*OkciSJz)$AGm(&dfoN1>v`8Rt|wfNxE^rb>$=@_i|Ynpp#LZW6ak6= zMSvne5ugZA1SkR&0g3=cfFeK<`0x-g&MVRB7TS$2?AVM>>{yKs>{yI;?94OTu;VgX zvEwvau;VbA!I`f&m`Ze(`9=o+VJ6o9=9`QL{1;;^!H&_W$Bx0M!%hi4|DWeN54oO& z75`sde|Ej^ddu~y>jl^6Tvxd+b)Dz>j_Vt)yIo)U@Cry{q6km~C;}7#iU37`B0v$K z2v7tl0u%v?z`r5_#7y5p`#j#Yxp>#=kaYcldKW8ht53GW*8zz!P)CHVY*HF_4g9(P^n>UG(iPdG1kZgW;TK5*Ra7<05Z zjQ0EN2kar+?`>bV9ki{sS*$;{UTXDQ{$hE+a>TOI{CD#m=Dp@R(_c;BH(hLMGwGPe znJbvxjK}yVW-qh4M zyRmm&Z{OzL)vH&ovZD1COZh3;2x<_{wCBJ%QfSa2)G+9wZX9@5)$wP(-&P3D*#U?VT=}FKtm(j=KDX6PtY8rGbWyW%G zY-*})w4z03ON|7*AZV$n1O`2?su~+tHds+eDz~zXgO)=T8EI$;1gyw!7Tbu3cqYx) z6BL{$ax)nSEUmYq)1`*h%BIJmSO9Nf2J7pa{2)sfOOP)R$sp86B$Ww609>jVyFbmQ z_JexFoVMrpH`I}g2~wXeSR|chGil|ztgEZ{Sy7cvgikCEc1uWNg*o*3g0;D#lT8WB zJM<9h=+)K+YpiHR0eUz&Tsl56iO4NqO--;mH&r30zR9WK@p!bAca#fRUEQ+`#7?Sn zDJT)x9I+dYoW+iZ)9k(}HW6i0eA~r6U2aB}EnBwKiWW*`fR;-ds$#B}E^V)}qP4P~ zp`e-btv#HWoDfxr<)&9vRkOs3%BA#pfxumZVv0+a)K!wcNN5l|JKy&zH;Ky1l@(Uh zs1qelfTxvI3K6^#P06dMXen3k57IDTq*eNZ^73_yp<<+VLGmh%7g)}rGO(bBg)u}+ zKo&1sTr<}5Rz7-|S;wYFW_4(n+5Ejl3hU#PC{Q0X&$^5Ng`XqGB{M1c-=2Tj` z{A420!q_87hiS99alHc zik9Vd@{=icf1Eu4qF*=)&zsleBE7ay*&aEFvC&zJDOeVDX zgQC;vcR&l2X6|@U%;rQfBMwKcodgjU3pfZ~Ueu8FMPcV>x39L5p)|L?#a72J5@u+( zHrp!eQLS&R)>aGYHVPI;R(V*X%&_gX0zV}2;~Wd=1kLM3NWN6 ztjSq{5peK?Q_s&k^M@6NS!=YSt!l%tVuz$ClSX5^0RWaJ$;)`XZC@R`_3eRif9g(1~N1#p3fRF2%59@*_}TQ^f24 zRp|T3b(?G0Wp}>hyuvx?JkjxM$Cn(t9Rd5F?ccK>vNziP)AmhR{kPbV^-1g1*6r4l zEKgf5u=H3?Hos~9s(F`riRpit9yJ{{tu#5Amzm3%t;~GmZ;Tfi*BDPWykof2z#0~p zyjgO4$yiCH{%`v4>#x*D^cA|7bhqfnbtgmebN)AU?uXwYoO|xjIdh9uYr1jHIsU`5 zY!QbK2hJt}CA3+wpVVwdXP@1Fh)hgpxlmOfn-3ido;Amn&sk^HPLo0x*-=hU*Bpce zgS3&$(N*$Eex_1BmRZ~YbIhmSTo`KF+p>rXf?+kr^CexhG zo#hwwD{7lU>2zlb&xy>z1q<+|m?%Hk#VZu;Igzk2mP)PKhnGm^WwJ<$*fJ3!*tf5J z63_h2Rj1_ULWn?`@9QNhXXY(LIr8){^vmL-|a-2&!kXD}r#oYk@HiWr{8nrljP1v_W* zs634oFp9-iMq$Ulo?YsWbnts9=*ynA@KIg;Cv@N=9ZwhJPWhTYJSXF;O^Z$;SX=`DjfFjn&n3Iu3e2g=dh~VxwByh8Fpv29l2vi^Y&bD#m<&=Yg!xO z+qbuGJIa~owr%US5~On{_u~4Zb(Wh^Teo&?Ir@`~EnAv46A))P8Q#3PZWC#|xl1L@ zZShT;f*bKZu(?JyK#@H_y~;vbXo4FzHVw^X{XR6ba`0$&;s*zV1G(c_cCjHW#?-f= z2L?hLNT;JTrjs4dqDJ)%8-o40IS{0^m@Vo4{;s~GKmO_KYwtbE-R0h1|9a8^7PM2< zW$F6$t9$SUrny`;7b)06QP_7AM5d=_*}9^)(ATY7edf`v`_DYHrF(9tWZm6OXB5b` zG@9WBt#Yq@#u+Qt9>osf+O@4~$W%ACD%XbLOdr*^>5tdFuX{{)gKnSh44oJK2V`CLuVN>IPF!YOkeC`D&y#Q6Rb!)x_l0I$ zu|u2h_afi+;`l1J0bdQDZvNIgNmz`qd@C?kNg2%9k&)%Ed<%)M6qh4^3zCnI+!t7~ z8Q6S$Yz2pKvUDpqWiyyRD9RZ(7RgvK1dJL`izTiY%<|>!V#yM&7$}afV*`U$28|20 zm&N%*&qOj4ADIS&evt&6;+O4I-szX8)t^mENlc`?_i6Kq;}u~j=pi7%+lC#GD{QY2q+1<@H%MsFnOdMU1V(KlipH#!*q1&AFBURQiBV3koss6}8z2 zwd$d~Mzf4kbk*7DwDMQ6EX$|VzQy>;mue;}!m^|a*hG2HKj)Fjd-;;0BCM;-d0iu_zmzW*w9j$+xgLzA`WHsr85be1Ew3bl|BH##MYd zQa-W@TotEfQ`C}_?*k&kfrHw>IkW&;p zsR&kJRG-<1HCt{eimk#7Y#IU}MQ6%Jr!b6)nX6!ct%mO#^3N#v@m82q8MD!847ZgA z5}i6IV)mAl0HfyMA#zsefwPE?x>A=-T2qqo`hN%d9RjU?UxOC_yIjq#6P*9U`Ly$V z=T2v%)8Y87;~~ctjuFQyM=9I}c*cH#eW$&_?y$XQ`=0H3TiVuR^IHFEebIWi^*rlV zYn|0(dBgH8%cm_-OPl2c^B>HQn{O~D&1=ja)4!R1V!Fe0mZ{HF!Tg>1Idc!}0BnOD z0Gsh`>)6otN=8tn+UP_Uab9i#8+Mp-J>a67Olb(|}* zA`Z(qJg=N#+>**r$>GVRfUi=&Bym=7czUVP_C<^DtkU5Na2C30IBtPJL;3XWMLTZU$@()7dOF2A= z)NTM0a~X$clPaBwn0PstGmR|VLdz))o=2)9p2L_klT=v9N!hF5@KjPGMzPQp9G*-H zj0(b-6C<8c7R88Xlmeqdhm#W{o>LYPV>yRsm8ya(p;OM~%qj~9(wv0xv{Fb|8$~>? zEQ%4&EQ?~qGs~hFb0(JBDB_u=5TjBBX4@9hW|TzY+$^7F%I9eECB%*M^25NVT6qyOce3gn=v9Gwa7Ec17`u@-FdHgGq+1M1Ctdweb z0{FBLd!I*ag0QJ?EQmEwIbI~=(!IbqgIr3H?kO_Tia0P9Ax;%zBtmjFrMONOP%Iw> zKA|ZpGzc+&9V16y{bej9rPip#?8Hd6RcBy=c$Y;rq7vKZux81m1`q)@_3l>^ql(={ z@`bZ}3Vo#%-;!OxC(c3?PhUjr@%lfG-b1c;Tu-|0a9!fs>#71R0DpA;%z3}_YG;qr z=J=!ItB%h&CLOhodG^29U$Wn7-)--(FSawb$84XrC2ajRr}Zi8UDnI26V^V^4Der; z-@qM!FIz6Oj9JdKR9l?pKbc=N-(u#>QFF))FU?IKn4UA;XS&jqHf=ODGyj))fVqx2 z2zvp`jsIzU%6OOYGUJ4?&*(Rv0QUi2F+6Peyy37RY-l$uG8jvKSMp5BJtccedP-_b z=IQ^Ue+hI6+@L>8zeB%L_y2Tn=&skrbZc}5C{oTpH{515p9ne<%?ot+K$UFfhyzf3 zHC$D-Y;hQMx&^xJWfN>HJ~dHRT?S7GQyFj*$&vBM-f2ROC!>+^vYjOS`^%RLQ@JrW zTwJx}$1g%W>WAB?mX&G{ zxSy(8?$~O$etJ~pj=JFjYC*Y0;U9LxWm8q(h#PK{7Wj&o?RLY>(E?vF=CB)Xf)@CS zVRyOV(x|fjc&LEsTy|34Oblte8qGJ-Efgn)px)R zcNJCB-Qb2Rh^k@x-EhmWz*j6$pBruus`~c2;bLHcubAicZny)enyVf+-0v$0E2g{7 z4Hx=Ueb02mWxWDlG2L!AT*xc%6=Ocb4cG2eeb>6-R$YOw81ouG+?$(S7ZX|yx8#nh z^>?}9zMN{7I^A&Zt-x0-Y=;{zwW<2HyFr(Ws_*L6tH5{m{q=yXT-sAzUKY4|p| z8x?%TJU6-+%&jEM6g0GmXdcPlhiyAt`Abxk9LWE*+b#9-6@1OvJ&t0qF zD~7Fg*J$|GxT`gMtKG{qe3!YGD)@@X!tI1S8DiH`YXV$X%x3D}rCz`|g3M{2m|N}z?o&1Lb*lRm4c}AT^EG^l zR)^X5v>PNqOZ_K9^s@rxWcNuLIXcOGqJpmo=!xzV6nw=RI$`zk;5&Qy$POIGyN}a| z|2X$N4c~dIT;O{Yqn69hv4zB=iwjYL@04wYs z+uv=k+a9&uY&&e*Xsp zu*rO~>HnHuHa%#%&a~gO$<$;z8Fm5g1-${?Oci4>zGQsBc#Sb*tTX)1@RZ>WL&`8< zs5cy6@|ThqOTJNZRY|JEr~gm=Yx?i#Kd0ZUZ`POU{$2Nq?%TSwZcrD1!W`W{50_nd z=taP!b2s!e50_nfDER->!(|sA3jWWvbFlmnL`h%#e#2z8Q$#=UaM?vlHVsi+(2qS_ zcA28!|E!10E>sl!pYd?nrHX?8(;hCnSjqZ}hp_0W)f`@~@O>4OvB=f#WWUt!qaS%V zyr$tdRTPqV(!=2eOuoMa!4ExLc6p)@{s;9OUZC(J3uIB{Edkz^$=3Z7t2w-GIht}k z?%}eFn{2+s_K3dk;qdY%zg&|4V;&AKUh@5=a(~amW!EOz_@%0O)Wc<0Cs}_f{C7QE zc73AY|A>dnu22;GzvJPuYZL|lhwC}KN|_z*4|%xk;w2lY1j@HPTz2`A^_L3u;2I7u zVEC~qx6RGGp*Fc8XM%{n)z0Cy4nGd$Ml{1_VXkC9;Nh|>B8AL;)5B%gL|K2S2EXCq z@Tw?Z0wgf*ujlX*s3?S`KwtN8*_BZ?QOW;450_mVW&I_Dzt+y-)zR#M$?4VZ^>EoW zR5lH%;9p(C;Z@X}YOz1J7Vq(J*#(zELU(((?2;?%FM)BFhs!Ry6#Vb>aM@*7)?X^f z9UcxZyi{xOb`O_bWhsRJiigXtv$FnDWqjGgWmj5Re<}UjJY06IrQrW150_nSDfr)7 z&*Amf>?G`qt2w;7I-0)o77v$QUS;zomHP`GF1x@|@W0u^;U!i+?h-CHt>Loksod5* z+iv3X9xl70Qpm{XJY057rQm;~hs&<2vi?#=Z&=Bd@>s%&24u9z@m0FIfUfs&cwLo; zMArX050_m;Ng2rcU+dvu871tYD)?XH;b0-9?tit1gQb+Z|5Y6vUMN9R=L;wu%FJ97 z*-T#P;j-%~g-m|d!{HT`DhOA2xa@*SA^c}N94wjC3vs!JgB6jw|EE11EOpfVFY|EO z#ZEST@!Ses>fvCyqaOYe4+je#b^nV!94vX%{V(!xu;@|uztF>Fmoy6byTHT2qDDRZ z`5rF2tWgMm#KUD5HVXdddARJ-ChITWKtY^`Lt5#7%6+a>{nD|ZbZzHc_c?{W(h2K1 z?!y|shuvpu_@3=Pq~I%VjURHKrQj>>^q&=)248*%%ss4}xm_gQ&lv)z-3JvC5wmd6 zeL%zafP24&FHw~>`(29cVd|7SqY%AV{){`V;hT1+qzad6G$A&alzX3s?>_gWhVP_1 zsgZ7SRRVlxy)~09bizHM5WQF*6Yg;h-*NX|4d1=)JsQ4y@KU+emxdREt-j=B#y6In zV24xe0iX1RC?Xn;q!OTnkxeBc;|gBb zPNw4f;e6Y-Ka%ogB8jQJ@mAkP(4(C8B_b0nXthoI#8lcRCqa33Boj|2yn|DdSoNOo zOg7Vzio_FX-%v6+KAb%0OGi`j$xON+mRfR>&*vvEf!b-DbyEI3pC}x^`$m(Be%T1& zR5u*%Toc|n7#auf2!~d5Zt}Y85jAuF_-m=z1 zWxVUJF6)KD0&~a4?w+pjn)dFVjRPUf+<}#RZvr@=(BR)(>#wbY-}t9U9icg5Ifx709|*gnQASot(|2#4g-dhQyEauahX5#doRKKMhHy5cDb9b!8JKBtmNh40X{c$cJD++r{k^_)Nn^_gULLtI)+?}kAr!R53~rK{ zoX)}S9?1_Z97av#=ZSu-vxheYD^2gWB_+EQIJ_ME(1a@ z&ULE2ei#ZMh}02w;4#x1i^sgZeM4U8AE3+2fN)Pmap)1avN&zU^MFtnj4`5QktXMh zCt~cu+D!W3l0aQ`0QW#)QpdiOP0(@RPiSky)(;*npD{HNSnWO^k zXWk(rCo_v4Bc>Pj- z3$>K-$^r7DDfqjXc!`>Uf{@xD6yo*Qf-Ycf^HwuwS|Q%LT7rXIJFa%_3VpTX%?`wc zA+^6K#OvqfOlS&omSCK2Z=ZK?sC{VTAmA#GZ!UGF7+=1wW9{PR`DXKHU=%bv6^vHcI*gSG>!vYNHS=e9)8A+OFXNq{&94UR{ykzi#}EQdeovQNSh7z4-}>A2 z8NFZkcij`ZtKp>nH1s|sJoCR!d?3l2q0;MBv}8Ub?8+Ozlj17E z_#OH>`uZw-_!_iWz$lXxRLVwvF0%UbglntlK6GCPKAwd}k}Cyb41AT&U}OM&4SlT< zpW%W$fYNp;8`5M;#Q+Ps7v0;0&uu~CK@YDyoR;}M^i}lLR(zlf2DCuoIu0l{mHhdl zf*9R{?g`*STxih+sj6!KMR%jSgZO9{494Y#l38rRau3RJ^X+7!yU<;&_}G^>9+{I} zK@7FD;-fp!oo)C`m^WG}3p}$8Ky(MXV-TMh^VTP2q4UiE5^dvIgIgNbC(-Tb_FjC> z%o`WfjFoIc%w${;eFc4GD?XU!&4b|xo3A^TJV!;hpj+DUQ95rgcczHssszMB zeF1%;1s}KbRtuHmP-#jxqnq3CNqn|7%t8S=x(VG>htKK3x`fbg;8ja@b)nE(d>(zi z9iPa9?Np(IRJ74ScE$3QT8QQS9Qs@UpW5>_t)%j{ira#66>t8_|tz_$)tLRB`Ys z!gdL|0o~A!Px`Ucm*(}YGIiP-b$va$ekDHa=dI63`6fa`;+Sb*xDH*{gl_}Px`V$K zUAq!r6R0tpYff1xPZ^u^%6~e{&k(xduw;FIPiQEFu0hvy;Clpqi`ln;L{MyV&57h{ zboCB=*&t#u2lz#-0P!<>>NGd?TV+il(4C)hrq1#PVtM=}vraA{&dmJ~cYxoLDYH zmu1gEl2!QnhTm+il4Gvf;pD_{F}gT} zFLgq39T^H_<PJ9uh0PvZOi|530A-b>)Ul*BMM|c6cU^Tu> z0zE;V1oEATni;~J44jY7Z^QRZ3Nuhl6F7p7^y1qnAKGN;Jak?gzP^&pPPX3=IsrJQ zP_g8VIK*}0%Pj?Uk~ekOk8`rKio3H4w zIF8JBIPN{EygX?3BVAofgsp1{#%da(EsZq|b&b)QmcU4~CN|6lN22~%G}usIUsvBy zQ`cBi7x32AwbnPZ`WtHl%?-=o53uN7;Mr6gXlmv?50!-y`!_{Wa5v#lSx5V}P*3;Z zy71t-?*8!Rf%g9X&;Y*Jkeb5k5zYVwd-zg={00Yj^tTV4QRpGvD(S>08gNeqU$ls) z_xfVV0||U;pZ2B4BK3_;zLDWZws|<(R6i08L|aDc8~uJZFx=D-YYsNn2b&ri8vOOa z;rgZ#IF)UQu~D`m)(~Y`e@jCn+*)WEQNBDfIn5^aLlW@`xT+$%htnBJ?}g*b3Eu=9 zox&03C<~cOvv5wHh((|SZeS-`h& zaxTG0S9M67_%D1Z2&5r({WhIg-_=mx28ZKwEBN}hjKt~!^$m3+HBG^0wx$7Ks0lU> zL#4O0u=ULiP~?$dL6z3G)Hc=oRqDGwSld+BNbCEbRNwR}nC!0EKl>_Jopcp!aARj@ zXmIe@x&-!dyZ@D&pP8?I<#N+oUmwP;Fa8$Ank4LCdmHPet8Mt`P&mgFzrlSW;R;2; zor{(R#XA=bigzyjvO5?2QKNXnDCZUipRo>d2}8Vm0q3DPw=b%_a8WOrfmDbK_*)MAF|Weec3mwLCVw}aoTKEO3~T+V7%w^Fm#s(nHQX#BSBTfo-yI<9mpPxr zE5u9hl8AR|^6se2qPipzQNWkpCE@WUcS$twR*1JO$mNPp)J+P!{?{=_5d25~Q3NOg z6ak6=Mc^Naz*T3Ji2F;uu)nm#;rMK$u%8$kY4$h6R#Hu*#Si<5O^ppTk%qcpP0ZiI z!iHK5w%!`^_LqXK4b8O;4FSddCF0rKB5XF&{ltIxej?pp`dIBRWuF@Mw+{@$3&oG- zu_3w7^%1=`$zIGut<_ueA-rX*|41ICwE_sA zSj2>9encTH$*a{gJaNEcK%U(yvJ0>E+PtLW@eyZ$H1{(ZIJCt>G;+~;jd==IT@4&=2VRL&5ruKLu_q*0ZiW0p3_nTfr zu3x(DbsceSbJe@7&bOQoJ2RmBf05&Fj-Na3c5tBiui9Y%y?+nbFSAGN%k3xF{$P9D zc8zV1t;06o`g5@Uci1{)U1oXD@~GuH%Y>!NvcUYm&Ci-|h4%sdpz~i3Nzi{30g3=c zfFeKa|J(&^dOVv|?P$VM9-5sAj&Uis4H&hp znB^qDH5;oIcpco9qc3bFw;58?u*@vwtDM_JI91!(ByaG(lG~^qqht|vDK`Y1+OSB> z72F^&tBbv)ytslJ$TBM6m&3Rr%cvmE5@Q+H4~!~3hiEUklIsJeVodi|aJ|`d^Bxn( z)G$|Y>x*Mt%=G}HYBLhESkA2@Y{WdD{5CvESJW@MggX=Xw9KkWwNlA-1FMGkNSqbi z8NjLDhI1I#W*N1rrJP$s(pDEp(b8}T_*DBrshTUeu57|e^&;X^&UI!ZEbRNG2s^lr zIX6%#l5(!Ss8UvNtF?14Dpf7#R%LUqAqC~!$}FG!g-)K7t|%_yN^UtYYRQV2#U)%D zaEbz$|58yVtL0p4HkTTu!RvpYp$@s8bzSc2b^e$0ht5l#XE^6MUUuBz7;+qEf7*V& zz1M!4?M>TlwozM!^#kj@u+pDzdB}2srO$GT`Bn3$%*#yw%XGgfX{u-bjk%3s8N2bP z#;c5*j2^?gpuK+&L{I-w1SkTZ00f#jP$$|bJx3Mx-i2F#_+FawgX2G-KeTdjcmXZF zl9Y$c-kOBIid=8YQ5o?*dcTbu%Y7;(ZuuAUhW7X9_l?{rdA=&VwiJ`rd`O3Whkn<@ zjo>F|qOJfjoc5D5usr%+kYn-Nth|6Tzs*ALqIZK_48lrk4aB1Ik5x4uH=^I7-?nj4 zc<(1YD-|=0-5K8dp?Afv44tWENB~T*Bvq(XE$(!iSR&E!(gw3xXnF@)#Ns{;${0;QRDsCrfK1#Y_ zK*wf^RFk@PnAg$kZQKq;EQ?Sh@oV(!Ah%sXxMqIHiGGECHNb7duc$2nnO++Kwz<5b zMz5jQHgQ|$CUcsaU0y}6uHd$iHm`&<*0%t4q>l41(JuqsW-F?a^aaS8l8Wx1SI{c~ zZWB>VAiUO>t5Z|%8ec{)*Kr%IXpxjwHk3+2zd*m($qiZ20kc>cm>V z@&bCHh3m(Up=HnX3$Cjx3_s7K=T~BrP;*wG@H6zYAhsEm?bP|FzQw#d{wext5ZjZ= zh7~1|^iRB2V3q#nIrQ9eY-?(j&Qhh>)#(-cx}Tt*)L=7H3#IV{3+DcKhK2O_r{lRV z#?g<_j|1536j=E7@m&N~vH@?ITk(75d@JN)r}!*-wjCRznn5)OuM$Y5Jv@V+S%VE$ zZIF6poHDP(2hR0^;%8Do<>iv#+I!hGr9VK3KXnpyv%+IJ=KnlUx8|sIjijW z5&BUZwth9Yk^D*YWHYvn1vKi ziz>_iW9YHv*f!THd3;g}z}jWtadC97eA7T|2;W2BYsIF!<}|f<6g?WicD<@3U3DS~ z*i}<$^Ih~^y#8Ncdkne0>$(Xv0Pb)t2P=`U!ukLC&X}{yx!7rN{LFEmW3OYKW0}Kd z|G@rZ`#o?6V6T0hT@S(NKZ*cFfFeKynrfQZ`r(>}NNgB%G{#~z(fVLr(?~=8a4go8_oh0~8fdOg>aZEV&Pr|H&_?^RuJ*ZaSa&U;ef9hhnC6yFY*} z#Ww8p`iaR{!Pr=vcL+oUzn+%f+UDP$Y4g^ly>fkf!gJZ9QU6z8+Zb#qd|X>!TMt$T z`4TsXk882X#q9IiYB0Yj&(18>|22G4)c;X6eL3n13lC@c+lP|cJT*c=ot|Jqk-yu4 zuRY*CNiDqEcZGPVn!X%0eX1Rf8lY<5vnET4`pT^OKYyWV#ZRvKKLfx1e;1X!`^i}N z&{9wYC<1v1+%VrL>;_!bYFTvTM2F*s60#d$(KR;vV|6WoV9iLZDNqB}3+iizhZ|xw zP5zPQ;gLuK*p+9+-2j*j*28K)&{`j;t!ogJ^cQ+I)z&x6!D0i^+7sS5&;!;P#xj{n zFoXbx>?7C`HeA}9@R5sPK7W0n;a|bL1F^y|OG6NHTNl3C#Qq5%TmeplIo`A{k&MB* zyuPlsu~sku(Zd@#^nDa|BdDIYkCmRcfMiEtC^R@UNVL3#x;}bi0>ULkAEmM8n^@n@fJ3>7xa8ThgHUt&^@m#<3AYqC#Mz87^;EBPZ@dWrfzIp##E^L;vHOOvU#{GQx(| z#H26GTf$6$rIm3|>*i~2X^ad1@Oziki5kw*UTg%1ja7r?ab9s2w)&kIiH}aD*jSa7 zXvR7Vb_80za|*wmY%lEeZtqXAlaZa?4s3sCGqz;|FAvzkOe8avZYPTOy>Od&c$}>P z2fo#24rY!%alFY9*af$Z~Utai}TUyZT1ureI zMOFzdf0Ghiwbt!HysuKnrwNt?_%w&WnPFGOU$CpProFqTRedAIyCluB-lG>}_8=Pt zJ46{@cp?%{@CK4?or2%261&%7&zL39J0t}ml z;Gq#esab^x)WR&Ja9ao#&XQG#dbMpKf8Mr`Xm_GRZVUqN7U5d*Vz@(HcsuDIXAYwI z<7*B=T3++GfI2&`ADgf6ew;xh!&aQ{Hx+5s>(xBXCAJcg}p zih8Uuot%v9n_@dbt8%6cjCUSBTwN9&i;u@Z1urb^w0N6<7caQPbwf{Nm>tJaj`KuW z-tK-qysC%UbUiGY#6Xb@>}KE~IA3LOXQe3+X=sYTot0>S4fLzfd3r}eD6Vf)a(rq6-puyk*-jT;Oh@s~5n1>0TX4drT-rC1 zN`evhL`<6ElSOB24p8jN_r=W$JFQ_EXtyp02vwZ&hjs!>^G=-TO7j7_` z(kW@A&MLIlPPs_c);kjXX?ni(J$2pIVhCmB){=bj%c@vB=I!kpf-MBRy_;cWruKFu z__ai`NQarpB?+tpIt31ZlGnel<5q1yBqBwSOI-W#8UjrUIc4xD^| zkyXf*IHrhdi9pO?sKbGvQoG_QAVn4;i^L0W$*B2(_380Qij0^_EHLn4J@&AH*9E(< zaWW7?&!2`N7x3cqEOZyGUSgNRi{Y3eTT~L9xD8L>W%3dj4km=1nu6^plHn?^N`w2b zk@m`QhCch0g-IdEa3h%DW`R;(ai@TUYM|O9C)vCNgO+uHH%5 z5Z#g3k$~eEoDslgIC&8I{^6-nY(N5U$HA%0a1y))xYJ%9J1|TkunQv!R7--`s}Mhv z5!j2f1tu&~UJ~gjWCCOXc1jPBnGqhZp#poMqudr6=nIFobPtLCA*s8lhpEruoqBTq zZ+-y5fAk+kfFeKMr=>F{j^wbh_>(kQJ6@%h~#e^o$Z z(-{_q?v7MYHSWRH8nRj z*R?eI*@n8Pzupfl^-WE&Al`O@g^4^n@;9Me3}p0J$7UjcHnZee{f>_Sjj%lDsF0|N_bIIYY6;B z8IV?~8Ri5cjgfCUp~+-)3^$Ct)+ns`lZo!cND}v(=?t_uc61te4#{VM;TGsY!fYNh^vp zo0%dVj9Mn7VsD17OdgZab>!q2wilwXbHX=^w47UBwgj6w0N0udS)ejSSxjakDk%!1 cMmt~Qr1AhgjSefPszv$0Rm}H&u>Lpxf3S{{(*OVf diff --git a/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-URL).golden.json b/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-URL).golden.json index f4739ca..a981560 100644 --- a/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-URL).golden.json +++ b/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-URL).golden.json @@ -26,194 +26,168 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjU=", "invocationID": "571d0839-fd63-4442-bb4d-61f7bfa4ddae", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 9, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 9, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjI5", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMw", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMx", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMy", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMz", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjM0", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 2, - "id": "TWlzc0RldGFpOjM1", - "reason": "NOT_CACHED" - } - ], - "misses": 2, - "saveTimeInMs": 0, - "sizeInBytes": 1549056 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643804513, - "id": "QWN0aW9uRGF0YToxMg==", - "lastEndedMs": 1715643806245, - "mnemonic": "TestRunner", - "systemTime": 397, - "userTime": 842 + "count": 0, + "id": "TWlzc0RldGFpOjI5", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643803398, - "id": "QWN0aW9uRGF0YToxMw==", - "lastEndedMs": 1715643803400, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 - } - ], - "actionsCreated": 22, - "actionsCreatedNotIncludingAspects": 22, - "actionsExecuted": 2, - "id": "QWN0aW9uU3VtbWFyeTo1", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 2, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTQ=", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjMw", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTU=", - "name": "internal" + "count": 0, + "id": "TWlzc0RldGFpOjMx", + "reason": "DIFFERENT_DEPS" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6MTY=", - "name": "darwin-sandbox" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", - "outputArtifactsFromActionCache": [ - { - "count": 11, - "id": "RmlsZXNNZXRyaWM6MTk=", - "sizeInBytes": 690376 - } - ], - "outputArtifactsSeen": [ + "count": 0, + "id": "TWlzc0RldGFpOjMy", + "reason": "DIFFERENT_ENVIRONMENT" + }, { - "count": 15, - "id": "RmlsZXNNZXRyaWM6MTg=", - "sizeInBytes": 692189 - } - ], - "sourceArtifactsRead": [ + "count": 0, + "id": "TWlzc0RldGFpOjMz", + "reason": "DIFFERENT_FILES" + }, { "count": 0, - "id": "RmlsZXNNZXRyaWM6MTc=", - "sizeInBytes": 0 - } - ], - "topLevelArtifacts": [ + "id": "TWlzc0RldGFpOjM0", + "reason": "CORRUPTED_CACHE_ENTRY" + }, { - "count": 12392, - "id": "RmlsZXNNZXRyaWM6MjA=", - "sizeInBytes": 1157054839 + "count": 2, + "id": "TWlzc0RldGFpOjM1", + "reason": "NOT_CACHED" } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 2893, - "actionLookupValueCount": 5586, - "actionLookupValueCountNotIncludingAspects": 5585, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", - "inputFileConfiguredTargetCount": 3091, - "otherConfiguredTargetCount": 25, - "outputArtifactCount": 2405, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117718 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", - "numAnalyses": 6, - "numBuilds": 4 + ], + "misses": 2, + "saveTimeInMs": 0, + "sizeInBytes": 1549056 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643804513, + "id": "QWN0aW9uRGF0YToxMg==", + "lastEndedMs": 1715643806245, + "mnemonic": "TestRunner", + "systemTime": 397, + "userTime": 842 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643803398, + "id": "QWN0aW9uRGF0YToxMw==", + "lastEndedMs": 1715643803400, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + } + ], + "actionsCreated": 22, + "actionsCreatedNotIncludingAspects": 22, + "actionsExecuted": 2, + "id": "QWN0aW9uU3VtbWFyeTo1", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 2, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTQ=", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTU=", + "name": "internal" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6MTY=", + "name": "darwin-sandbox" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", + "outputArtifactsFromActionCache": { + "count": 11, + "id": "RmlsZXNNZXRyaWM6MTk=", + "sizeInBytes": 690376 + }, + "outputArtifactsSeen": { + "count": 15, + "id": "RmlsZXNNZXRyaWM6MTg=", + "sizeInBytes": 692189 + }, + "sourceArtifactsRead": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6MTc=", + "sizeInBytes": 0 + }, + "topLevelArtifacts": { + "count": 12392, + "id": "RmlsZXNNZXRyaWM6MjA=", + "sizeInBytes": 1157054839 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 2893, + "actionLookupValueCount": 5586, + "actionLookupValueCountNotIncludingAspects": 5585, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", + "inputFileConfiguredTargetCount": 3091, + "otherConfiguredTargetCount": 25, + "outputArtifactCount": 2405, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117718 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", + "numAnalyses": 6, + "numBuilds": 4 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczo1", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczo1", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6NQ==", - "packageLoadMetrics": [], - "packagesLoaded": 1 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczo1", - "targetsConfigured": 16, - "targetsConfiguredNotIncludingAspects": 16, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 1101, - "cpuTimeInMs": 3287, - "executionPhaseTimeInMs": 2849, - "id": "VGltaW5nTWV0cmljczo1", - "wallTimeInMs": 3149 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczo1", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6NQ==", + "packageLoadMetrics": [], + "packagesLoaded": 1 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczo1", + "targetsConfigured": 16, + "targetsConfiguredNotIncludingAspects": 16, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 1101, + "cpuTimeInMs": 3287, + "executionPhaseTimeInMs": 2849, + "id": "VGltaW5nTWV0cmljczo1", + "wallTimeInMs": 3149 + } }, "problems": [ { @@ -267,54 +241,54 @@ "targets": [ { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMTU=", - "label": "//next.js:next", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTY=", - "label": "//next.js:next_dev", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTc=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//next.js/public:public", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTg=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//next.js:next", "success": true, - "targetKind": "directory_path rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTk=", - "label": "//next.js:jest_config", + "label": "//next.js:next_dev", "success": true, - "targetKind": "js_library rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMjA=", - "label": "//next.js/public:public", + "label": "//next.js:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -323,127 +297,127 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjE=", - "label": "//next.js:next_start", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjI=", - "label": "//next.js:package_json", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjM=", - "label": "//next.js/pages:jest_test", + "label": "//next.js:build_smoke_test", "success": true, - "targetKind": "jest_test rule", + "targetKind": "js_test rule", "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjQ=", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjU=", - "label": "//next.js/pages/api:api", + "label": "//next.js:jest_config", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjY=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//next.js/pages:pages", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjc=", - "label": "//next.js:next_js_binary", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjg=", - "label": "//next.js/pages:pages", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjk=", - "label": "//next.js/pages:specs", + "label": "//next.js:tsconfig", "success": true, - "targetKind": "ts_project rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMzA=", - "label": "//next.js:eslintrc", + "label": "//next.js/pages:jest_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "jest_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzE=", - "label": "//next.js:tsconfig", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "ts_config rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzI=", - "label": "//next.js/styles:styles", + "label": "//next.js/pages:specs", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzM=", - "label": "//next.js:build_smoke_test", + "label": "//next.js:build_test", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMzQ=", - "label": "//next.js:build_test", + "label": "//next.js:next_start", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "_js_run_devserver rule", + "testSize": "UNKNOWN" } ], "testCollection": [ diff --git a/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-UUID).golden.json b/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-UUID).golden.json index f4739ca..a981560 100644 --- a/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-UUID).golden.json +++ b/internal/graphql/testdata/snapshots/FindBuildByUUID/found-(by-UUID).golden.json @@ -26,194 +26,168 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjU=", "invocationID": "571d0839-fd63-4442-bb4d-61f7bfa4ddae", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 9, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 9, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjI5", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMw", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMx", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMy", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMz", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjM0", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 2, - "id": "TWlzc0RldGFpOjM1", - "reason": "NOT_CACHED" - } - ], - "misses": 2, - "saveTimeInMs": 0, - "sizeInBytes": 1549056 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643804513, - "id": "QWN0aW9uRGF0YToxMg==", - "lastEndedMs": 1715643806245, - "mnemonic": "TestRunner", - "systemTime": 397, - "userTime": 842 + "count": 0, + "id": "TWlzc0RldGFpOjI5", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643803398, - "id": "QWN0aW9uRGF0YToxMw==", - "lastEndedMs": 1715643803400, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 - } - ], - "actionsCreated": 22, - "actionsCreatedNotIncludingAspects": 22, - "actionsExecuted": 2, - "id": "QWN0aW9uU3VtbWFyeTo1", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 2, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTQ=", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjMw", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTU=", - "name": "internal" + "count": 0, + "id": "TWlzc0RldGFpOjMx", + "reason": "DIFFERENT_DEPS" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6MTY=", - "name": "darwin-sandbox" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", - "outputArtifactsFromActionCache": [ - { - "count": 11, - "id": "RmlsZXNNZXRyaWM6MTk=", - "sizeInBytes": 690376 - } - ], - "outputArtifactsSeen": [ + "count": 0, + "id": "TWlzc0RldGFpOjMy", + "reason": "DIFFERENT_ENVIRONMENT" + }, { - "count": 15, - "id": "RmlsZXNNZXRyaWM6MTg=", - "sizeInBytes": 692189 - } - ], - "sourceArtifactsRead": [ + "count": 0, + "id": "TWlzc0RldGFpOjMz", + "reason": "DIFFERENT_FILES" + }, { "count": 0, - "id": "RmlsZXNNZXRyaWM6MTc=", - "sizeInBytes": 0 - } - ], - "topLevelArtifacts": [ + "id": "TWlzc0RldGFpOjM0", + "reason": "CORRUPTED_CACHE_ENTRY" + }, { - "count": 12392, - "id": "RmlsZXNNZXRyaWM6MjA=", - "sizeInBytes": 1157054839 + "count": 2, + "id": "TWlzc0RldGFpOjM1", + "reason": "NOT_CACHED" } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 2893, - "actionLookupValueCount": 5586, - "actionLookupValueCountNotIncludingAspects": 5585, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", - "inputFileConfiguredTargetCount": 3091, - "otherConfiguredTargetCount": 25, - "outputArtifactCount": 2405, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117718 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", - "numAnalyses": 6, - "numBuilds": 4 + ], + "misses": 2, + "saveTimeInMs": 0, + "sizeInBytes": 1549056 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643804513, + "id": "QWN0aW9uRGF0YToxMg==", + "lastEndedMs": 1715643806245, + "mnemonic": "TestRunner", + "systemTime": 397, + "userTime": 842 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643803398, + "id": "QWN0aW9uRGF0YToxMw==", + "lastEndedMs": 1715643803400, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + } + ], + "actionsCreated": 22, + "actionsCreatedNotIncludingAspects": 22, + "actionsExecuted": 2, + "id": "QWN0aW9uU3VtbWFyeTo1", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 2, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTQ=", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTU=", + "name": "internal" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6MTY=", + "name": "darwin-sandbox" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", + "outputArtifactsFromActionCache": { + "count": 11, + "id": "RmlsZXNNZXRyaWM6MTk=", + "sizeInBytes": 690376 + }, + "outputArtifactsSeen": { + "count": 15, + "id": "RmlsZXNNZXRyaWM6MTg=", + "sizeInBytes": 692189 + }, + "sourceArtifactsRead": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6MTc=", + "sizeInBytes": 0 + }, + "topLevelArtifacts": { + "count": 12392, + "id": "RmlsZXNNZXRyaWM6MjA=", + "sizeInBytes": 1157054839 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 2893, + "actionLookupValueCount": 5586, + "actionLookupValueCountNotIncludingAspects": 5585, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", + "inputFileConfiguredTargetCount": 3091, + "otherConfiguredTargetCount": 25, + "outputArtifactCount": 2405, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117718 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", + "numAnalyses": 6, + "numBuilds": 4 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczo1", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczo1", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6NQ==", - "packageLoadMetrics": [], - "packagesLoaded": 1 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczo1", - "targetsConfigured": 16, - "targetsConfiguredNotIncludingAspects": 16, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 1101, - "cpuTimeInMs": 3287, - "executionPhaseTimeInMs": 2849, - "id": "VGltaW5nTWV0cmljczo1", - "wallTimeInMs": 3149 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczo1", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6NQ==", + "packageLoadMetrics": [], + "packagesLoaded": 1 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczo1", + "targetsConfigured": 16, + "targetsConfiguredNotIncludingAspects": 16, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 1101, + "cpuTimeInMs": 3287, + "executionPhaseTimeInMs": 2849, + "id": "VGltaW5nTWV0cmljczo1", + "wallTimeInMs": 3149 + } }, "problems": [ { @@ -267,54 +241,54 @@ "targets": [ { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMTU=", - "label": "//next.js:next", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTY=", - "label": "//next.js:next_dev", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTc=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//next.js/public:public", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTg=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//next.js:next", "success": true, - "targetKind": "directory_path rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTk=", - "label": "//next.js:jest_config", + "label": "//next.js:next_dev", "success": true, - "targetKind": "js_library rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMjA=", - "label": "//next.js/public:public", + "label": "//next.js:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -323,127 +297,127 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjE=", - "label": "//next.js:next_start", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjI=", - "label": "//next.js:package_json", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjM=", - "label": "//next.js/pages:jest_test", + "label": "//next.js:build_smoke_test", "success": true, - "targetKind": "jest_test rule", + "targetKind": "js_test rule", "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjQ=", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjU=", - "label": "//next.js/pages/api:api", + "label": "//next.js:jest_config", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjY=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//next.js/pages:pages", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjc=", - "label": "//next.js:next_js_binary", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjg=", - "label": "//next.js/pages:pages", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjk=", - "label": "//next.js/pages:specs", + "label": "//next.js:tsconfig", "success": true, - "targetKind": "ts_project rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMzA=", - "label": "//next.js:eslintrc", + "label": "//next.js/pages:jest_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "jest_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzE=", - "label": "//next.js:tsconfig", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "ts_config rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzI=", - "label": "//next.js/styles:styles", + "label": "//next.js/pages:specs", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzM=", - "label": "//next.js:build_smoke_test", + "label": "//next.js:build_test", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMzQ=", - "label": "//next.js:build_test", + "label": "//next.js:next_start", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "_js_run_devserver rule", + "testSize": "UNKNOWN" } ], "testCollection": [ diff --git a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-analysis-failed-target.golden.json b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-analysis-failed-target.golden.json index 42d2be1..1c2a21b 100644 --- a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-analysis-failed-target.golden.json +++ b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-analysis-failed-target.golden.json @@ -15,194 +15,168 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjU=", "invocationID": "571d0839-fd63-4442-bb4d-61f7bfa4ddae", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 9, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 9, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjU=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjI5", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMw", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMx", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMy", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjMz", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjM0", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 2, - "id": "TWlzc0RldGFpOjM1", - "reason": "NOT_CACHED" - } - ], - "misses": 2, - "saveTimeInMs": 0, - "sizeInBytes": 1549056 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643804513, - "id": "QWN0aW9uRGF0YToxMg==", - "lastEndedMs": 1715643806245, - "mnemonic": "TestRunner", - "systemTime": 397, - "userTime": 842 + "count": 0, + "id": "TWlzc0RldGFpOjI5", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1715643803398, - "id": "QWN0aW9uRGF0YToxMw==", - "lastEndedMs": 1715643803400, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 - } - ], - "actionsCreated": 22, - "actionsCreatedNotIncludingAspects": 22, - "actionsExecuted": 2, - "id": "QWN0aW9uU3VtbWFyeTo1", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 2, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTQ=", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjMw", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTU=", - "name": "internal" + "count": 0, + "id": "TWlzc0RldGFpOjMx", + "reason": "DIFFERENT_DEPS" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6MTY=", - "name": "darwin-sandbox" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", - "outputArtifactsFromActionCache": [ - { - "count": 11, - "id": "RmlsZXNNZXRyaWM6MTk=", - "sizeInBytes": 690376 - } - ], - "outputArtifactsSeen": [ + "count": 0, + "id": "TWlzc0RldGFpOjMy", + "reason": "DIFFERENT_ENVIRONMENT" + }, { - "count": 15, - "id": "RmlsZXNNZXRyaWM6MTg=", - "sizeInBytes": 692189 - } - ], - "sourceArtifactsRead": [ + "count": 0, + "id": "TWlzc0RldGFpOjMz", + "reason": "DIFFERENT_FILES" + }, { "count": 0, - "id": "RmlsZXNNZXRyaWM6MTc=", - "sizeInBytes": 0 - } - ], - "topLevelArtifacts": [ + "id": "TWlzc0RldGFpOjM0", + "reason": "CORRUPTED_CACHE_ENTRY" + }, { - "count": 12392, - "id": "RmlsZXNNZXRyaWM6MjA=", - "sizeInBytes": 1157054839 + "count": 2, + "id": "TWlzc0RldGFpOjM1", + "reason": "NOT_CACHED" } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 2893, - "actionLookupValueCount": 5586, - "actionLookupValueCountNotIncludingAspects": 5585, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", - "inputFileConfiguredTargetCount": 3091, - "otherConfiguredTargetCount": 25, - "outputArtifactCount": 2405, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117718 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", - "numAnalyses": 6, - "numBuilds": 4 + ], + "misses": 2, + "saveTimeInMs": 0, + "sizeInBytes": 1549056 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643804513, + "id": "QWN0aW9uRGF0YToxMg==", + "lastEndedMs": 1715643806245, + "mnemonic": "TestRunner", + "systemTime": 397, + "userTime": 842 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1715643803398, + "id": "QWN0aW9uRGF0YToxMw==", + "lastEndedMs": 1715643803400, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + } + ], + "actionsCreated": 22, + "actionsCreatedNotIncludingAspects": 22, + "actionsExecuted": 2, + "id": "QWN0aW9uU3VtbWFyeTo1", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 2, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTQ=", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTU=", + "name": "internal" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6MTY=", + "name": "darwin-sandbox" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjU=", + "outputArtifactsFromActionCache": { + "count": 11, + "id": "RmlsZXNNZXRyaWM6MTk=", + "sizeInBytes": 690376 + }, + "outputArtifactsSeen": { + "count": 15, + "id": "RmlsZXNNZXRyaWM6MTg=", + "sizeInBytes": 692189 + }, + "sourceArtifactsRead": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6MTc=", + "sizeInBytes": 0 + }, + "topLevelArtifacts": { + "count": 12392, + "id": "RmlsZXNNZXRyaWM6MjA=", + "sizeInBytes": 1157054839 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 2893, + "actionLookupValueCount": 5586, + "actionLookupValueCountNotIncludingAspects": 5585, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6NQ==", + "inputFileConfiguredTargetCount": 3091, + "otherConfiguredTargetCount": 25, + "outputArtifactCount": 2405, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117718 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NQ==", + "numAnalyses": 6, + "numBuilds": 4 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczo1", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczo1", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6NQ==", - "packageLoadMetrics": [], - "packagesLoaded": 1 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczo1", - "targetsConfigured": 16, - "targetsConfiguredNotIncludingAspects": 16, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 1101, - "cpuTimeInMs": 3287, - "executionPhaseTimeInMs": 2849, - "id": "VGltaW5nTWV0cmljczo1", - "wallTimeInMs": 3149 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczo1", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6NQ==", + "packageLoadMetrics": [], + "packagesLoaded": 1 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczo1", + "targetsConfigured": 16, + "targetsConfiguredNotIncludingAspects": 16, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 1101, + "cpuTimeInMs": 3287, + "executionPhaseTimeInMs": 2849, + "id": "VGltaW5nTWV0cmljczo1", + "wallTimeInMs": 3149 + } }, "problems": [ { @@ -256,54 +230,54 @@ "targets": [ { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMTU=", - "label": "//next.js:next", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTY=", - "label": "//next.js:next_dev", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTc=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//next.js/public:public", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTg=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//next.js:next", "success": true, - "targetKind": "directory_path rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTk=", - "label": "//next.js:jest_config", + "label": "//next.js:next_dev", "success": true, - "targetKind": "js_library rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMjA=", - "label": "//next.js/public:public", + "label": "//next.js:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -312,127 +286,127 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjE=", - "label": "//next.js:next_start", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjI=", - "label": "//next.js:package_json", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjM=", - "label": "//next.js/pages:jest_test", + "label": "//next.js:build_smoke_test", "success": true, - "targetKind": "jest_test rule", + "targetKind": "js_test rule", "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMjQ=", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjU=", - "label": "//next.js/pages/api:api", + "label": "//next.js:jest_config", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjY=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//next.js/pages:pages", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjc=", - "label": "//next.js:next_js_binary", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMjg=", - "label": "//next.js/pages:pages", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMjk=", - "label": "//next.js/pages:specs", + "label": "//next.js:tsconfig", "success": true, - "targetKind": "ts_project rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMzA=", - "label": "//next.js:eslintrc", + "label": "//next.js/pages:jest_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "jest_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzE=", - "label": "//next.js:tsconfig", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "ts_config rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzI=", - "label": "//next.js/styles:styles", + "label": "//next.js/pages:specs", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMzM=", - "label": "//next.js:build_smoke_test", + "label": "//next.js:build_test", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMzQ=", - "label": "//next.js:build_test", + "label": "//next.js:next_start", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "_js_run_devserver rule", + "testSize": "UNKNOWN" } ], "testCollection": [ diff --git a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-ignoring-target-and-error-progress-if-action-has-output.golden.json b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-ignoring-target-and-error-progress-if-action-has-output.golden.json index adf69f9..54df876 100644 --- a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-ignoring-target-and-error-progress-if-action-has-output.golden.json +++ b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-bazel-invocation-ignoring-target-and-error-progress-if-action-has-output.golden.json @@ -12,178 +12,152 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjM=", "invocationID": "df7178e2-a815-4654-a409-d18e845d1e35", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 4, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjM=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 4, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjM=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjE1", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjE2", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjE3", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjE4", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjE5", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjIw", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 1, - "id": "TWlzc0RldGFpOjIx", - "reason": "NOT_CACHED" - } - ], - "misses": 1, - "saveTimeInMs": 0, - "sizeInBytes": 1549398 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714696187752, - "id": "QWN0aW9uRGF0YTo3", - "lastEndedMs": 1714696187754, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 - } - ], - "actionsCreated": 0, - "actionsCreatedNotIncludingAspects": 0, - "actionsExecuted": 1, - "id": "QWN0aW9uU3VtbWFyeToz", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6OA==", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjE1", + "reason": "UNKNOWN" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6OQ==", - "name": "internal" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjM=", - "outputArtifactsFromActionCache": [ + "count": 0, + "id": "TWlzc0RldGFpOjE2", + "reason": "DIFFERENT_ACTION_KEY" + }, { - "count": 3, - "id": "RmlsZXNNZXRyaWM6MTE=", - "sizeInBytes": 26106 - } - ], - "outputArtifactsSeen": [ + "count": 0, + "id": "TWlzc0RldGFpOjE3", + "reason": "DIFFERENT_DEPS" + }, { - "count": 5, - "id": "RmlsZXNNZXRyaWM6MTA=", - "sizeInBytes": 26241 - } - ], - "sourceArtifactsRead": [ + "count": 0, + "id": "TWlzc0RldGFpOjE4", + "reason": "DIFFERENT_ENVIRONMENT" + }, { "count": 0, - "id": "RmlsZXNNZXRyaWM6OQ==", - "sizeInBytes": 0 - } - ], - "topLevelArtifacts": [ + "id": "TWlzc0RldGFpOjE5", + "reason": "DIFFERENT_FILES" + }, { "count": 0, - "id": "RmlsZXNNZXRyaWM6MTI=", - "sizeInBytes": 0 + "id": "TWlzc0RldGFpOjIw", + "reason": "CORRUPTED_CACHE_ENTRY" + }, + { + "count": 1, + "id": "TWlzc0RldGFpOjIx", + "reason": "NOT_CACHED" } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 0, - "actionLookupValueCount": 0, - "actionLookupValueCountNotIncludingAspects": 0, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6Mw==", - "inputFileConfiguredTargetCount": 0, - "otherConfiguredTargetCount": 0, - "outputArtifactCount": 0, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117503 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6Mw==", - "numAnalyses": 13, - "numBuilds": 13 + ], + "misses": 1, + "saveTimeInMs": 0, + "sizeInBytes": 1549398 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714696187752, + "id": "QWN0aW9uRGF0YTo3", + "lastEndedMs": 1714696187754, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + } + ], + "actionsCreated": 0, + "actionsCreatedNotIncludingAspects": 0, + "actionsExecuted": 1, + "id": "QWN0aW9uU3VtbWFyeToz", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6OA==", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6OQ==", + "name": "internal" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjM=", + "outputArtifactsFromActionCache": { + "count": 3, + "id": "RmlsZXNNZXRyaWM6MTE=", + "sizeInBytes": 26106 + }, + "outputArtifactsSeen": { + "count": 5, + "id": "RmlsZXNNZXRyaWM6MTA=", + "sizeInBytes": 26241 + }, + "sourceArtifactsRead": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6OQ==", + "sizeInBytes": 0 + }, + "topLevelArtifacts": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6MTI=", + "sizeInBytes": 0 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 0, + "actionLookupValueCount": 0, + "actionLookupValueCountNotIncludingAspects": 0, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6Mw==", + "inputFileConfiguredTargetCount": 0, + "otherConfiguredTargetCount": 0, + "outputArtifactCount": 0, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117503 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6Mw==", + "numAnalyses": 13, + "numBuilds": 13 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczoz", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczoz", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6Mw==", - "packageLoadMetrics": [], - "packagesLoaded": 0 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczoz", - "targetsConfigured": 0, - "targetsConfiguredNotIncludingAspects": 0, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 0, - "cpuTimeInMs": 2508, - "executionPhaseTimeInMs": 32, - "id": "VGltaW5nTWV0cmljczoz", - "wallTimeInMs": 307 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczoz", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6Mw==", + "packageLoadMetrics": [], + "packagesLoaded": 0 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczoz", + "targetsConfigured": 0, + "targetsConfiguredNotIncludingAspects": 0, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 0, + "cpuTimeInMs": 2508, + "executionPhaseTimeInMs": 32, + "id": "VGltaW5nTWV0cmljczoz", + "wallTimeInMs": 307 + } }, "problems": [ { @@ -218,20 +192,11 @@ }, "stepLabel": "", "targets": [ - { - "abortReason": "", - "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxMjc=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", - "success": true, - "targetKind": "_copy_file rule", - "testSize": "UNKNOWN" - }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxMjg=", - "label": "//next.js/pages:specs", + "durationInMs": 3, + "id": "VGFyZ2V0UGFpcjoxMjc=", + "label": "//next.js/pages/api:api", "success": false, "targetKind": "ts_project rule", "testSize": "UNKNOWN" @@ -239,106 +204,97 @@ { "abortReason": "", "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxMjk=", - "label": "//react/src:src_typings", + "id": "VGFyZ2V0UGFpcjoxMjg=", + "label": "//next.js/public:public", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxMzA=", - "label": "//next.js/pages:_jest_test_bazel_sequencer", - "success": true, - "targetKind": "_copy_file rule", + "abortReason": "INCOMPLETE", + "durationInMs": 2, + "id": "VGFyZ2V0UGFpcjoxMjk=", + "label": "//next.js:next_js_binary", + "success": false, + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, + { + "abortReason": "INCOMPLETE", + "durationInMs": 2, + "id": "VGFyZ2V0UGFpcjoxMzA=", + "label": "//react/src:lint", + "success": false, + "targetKind": "eslint_test rule", + "testSize": "MEDIUM" + }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMzE=", - "label": "//next.js/public:public", + "label": "//react/src:src_typecheck", "success": true, - "targetKind": "js_library rule", + "targetKind": "filegroup rule", "testSize": "UNKNOWN" }, { - "abortReason": "INCOMPLETE", - "durationInMs": 1, + "abortReason": "", + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMzI=", - "label": "//next.js:eslintrc", - "success": false, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "label": "//react/src:test", + "success": true, + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMzM=", - "label": "//next.js/styles:styles", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { - "abortReason": "INCOMPLETE", + "abortReason": "", "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMzQ=", - "label": "//next.js:jest_config", - "success": false, + "label": "//next.js/styles:styles", + "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxMzU=", - "label": "//next.js:next_js_binary", - "success": false, - "targetKind": "js_binary rule", - "testSize": "UNKNOWN" - }, - { - "abortReason": "INCOMPLETE", - "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxMzY=", "label": "//next.js:tsconfig", "success": false, "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, - { - "abortReason": "INCOMPLETE", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxMzc=", - "label": "//next.js/pages/api:api", - "success": false, - "targetKind": "ts_project rule", - "testSize": "UNKNOWN" - }, { "abortReason": "", "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxMzg=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "id": "VGFyZ2V0UGFpcjoxMzY=", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxMzk=", - "label": "//next.js/pages:pages", + "durationInMs": 3, + "id": "VGFyZ2V0UGFpcjoxMzc=", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": false, - "targetKind": "ts_project rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxNDA=", + "durationInMs": 2, + "id": "VGFyZ2V0UGFpcjoxMzg=", "label": "//packages/one:one_ts", "success": false, "targetKind": "ts_project rule", @@ -346,8 +302,8 @@ }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxNDE=", + "durationInMs": 2, + "id": "VGFyZ2V0UGFpcjoxMzk=", "label": "//next.js:package_json", "success": false, "targetKind": "js_library rule", @@ -355,52 +311,70 @@ }, { "abortReason": "", - "durationInMs": 1, - "id": "VGFyZ2V0UGFpcjoxNDI=", - "label": "//react/public:public", + "durationInMs": 0, + "id": "VGFyZ2V0UGFpcjoxNDA=", + "label": "//react/src:assets", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxNDM=", - "label": "//react/src:src", + "durationInMs": 1, + "id": "VGFyZ2V0UGFpcjoxNDE=", + "label": "//react/src:src_transpile", "success": true, - "targetKind": "js_library rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, - "id": "VGFyZ2V0UGFpcjoxNDQ=", - "label": "//react/src:src_typecheck", + "id": "VGFyZ2V0UGFpcjoxNDI=", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "filegroup rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "INCOMPLETE", + "durationInMs": 3, + "id": "VGFyZ2V0UGFpcjoxNDM=", + "label": "//next.js/pages:pages", + "success": false, + "targetKind": "ts_project rule", + "testSize": "UNKNOWN" + }, + { + "abortReason": "INCOMPLETE", + "durationInMs": 3, + "id": "VGFyZ2V0UGFpcjoxNDQ=", + "label": "//next.js/pages:specs", + "success": false, + "targetKind": "ts_project rule", + "testSize": "UNKNOWN" + }, + { + "abortReason": "INCOMPLETE", + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxNDU=", - "label": "//react/src:test", - "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "label": "//next.js:eslintrc", + "success": false, + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxNDY=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//next.js:jest_config", "success": false, - "targetKind": "directory_path rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "INCOMPLETE", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxNDc=", "label": "//packages/one:one", "success": false, @@ -408,30 +382,30 @@ "testSize": "UNKNOWN" }, { - "abortReason": "INCOMPLETE", + "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNDg=", - "label": "//react/src:lint", - "success": false, - "targetKind": "eslint_test rule", - "testSize": "MEDIUM" + "label": "//react/public:public", + "success": true, + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxNDk=", - "label": "//react/src:assets", + "label": "//react/src:src", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxNTA=", - "label": "//react/src:src_transpile", + "label": "//react/src:src_typings", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { diff --git a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-failed-bazel-invocation.golden.json b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-failed-bazel-invocation.golden.json index c377785..85b21c5 100644 --- a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-failed-bazel-invocation.golden.json +++ b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-single-failed-bazel-invocation.golden.json @@ -12,204 +12,178 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjI=", "invocationID": "08ae089d-4c85-405c-83fc-dbe9fc1dc942", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 8, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjI=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 8, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjI=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjg=", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjk=", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjEw", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 2, - "id": "TWlzc0RldGFpOjEx", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjEy", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjEz", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 1, - "id": "TWlzc0RldGFpOjE0", - "reason": "NOT_CACHED" - } - ], - "misses": 3, - "saveTimeInMs": 0, - "sizeInBytes": 1548758 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695855664, - "id": "QWN0aW9uRGF0YTo0", - "lastEndedMs": 1714695855666, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 + "count": 0, + "id": "TWlzc0RldGFpOjg=", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695855711, - "id": "QWN0aW9uRGF0YTo1", - "lastEndedMs": 1714695855724, - "mnemonic": "CopyFile", - "systemTime": 1, - "userTime": 1 + "count": 0, + "id": "TWlzc0RldGFpOjk=", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695855725, - "id": "QWN0aW9uRGF0YTo2", - "lastEndedMs": 1714695856359, - "mnemonic": "TsProject", - "systemTime": 0, - "userTime": 0 - } - ], - "actionsCreated": 0, - "actionsCreatedNotIncludingAspects": 0, - "actionsExecuted": 3, - "id": "QWN0aW9uU3VtbWFyeToy", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 3, - "execKind": "", - "id": "UnVubmVyQ291bnQ6NQ==", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjEw", + "reason": "DIFFERENT_DEPS" }, { - "actionsExecuted": 2, - "execKind": "", - "id": "UnVubmVyQ291bnQ6Ng==", - "name": "internal" + "count": 2, + "id": "TWlzc0RldGFpOjEx", + "reason": "DIFFERENT_ENVIRONMENT" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6Nw==", - "name": "local" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjI=", - "outputArtifactsFromActionCache": [ - { - "count": 16, - "id": "RmlsZXNNZXRyaWM6Nw==", - "sizeInBytes": 3733 - } - ], - "outputArtifactsSeen": [ + "count": 0, + "id": "TWlzc0RldGFpOjEy", + "reason": "DIFFERENT_FILES" + }, { - "count": 19, - "id": "RmlsZXNNZXRyaWM6Ng==", - "sizeInBytes": 6359 - } - ], - "sourceArtifactsRead": [ + "count": 0, + "id": "TWlzc0RldGFpOjEz", + "reason": "CORRUPTED_CACHE_ENTRY" + }, { "count": 1, - "id": "RmlsZXNNZXRyaWM6NQ==", - "sizeInBytes": 2491 + "id": "TWlzc0RldGFpOjE0", + "reason": "NOT_CACHED" } ], - "topLevelArtifacts": [ - { - "count": 0, - "id": "RmlsZXNNZXRyaWM6OA==", - "sizeInBytes": 0 - } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 0, - "actionLookupValueCount": 0, - "actionLookupValueCountNotIncludingAspects": 0, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6Mg==", - "inputFileConfiguredTargetCount": 0, - "otherConfiguredTargetCount": 0, - "outputArtifactCount": 0, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117746 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6Mg==", - "numAnalyses": 10, - "numBuilds": 10 + "misses": 3, + "saveTimeInMs": 0, + "sizeInBytes": 1548758 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695855664, + "id": "QWN0aW9uRGF0YTo0", + "lastEndedMs": 1714695855666, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695855711, + "id": "QWN0aW9uRGF0YTo1", + "lastEndedMs": 1714695855724, + "mnemonic": "CopyFile", + "systemTime": 1, + "userTime": 1 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695855725, + "id": "QWN0aW9uRGF0YTo2", + "lastEndedMs": 1714695856359, + "mnemonic": "TsProject", + "systemTime": 0, + "userTime": 0 + } + ], + "actionsCreated": 0, + "actionsCreatedNotIncludingAspects": 0, + "actionsExecuted": 3, + "id": "QWN0aW9uU3VtbWFyeToy", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 3, + "execKind": "", + "id": "UnVubmVyQ291bnQ6NQ==", + "name": "total" + }, + { + "actionsExecuted": 2, + "execKind": "", + "id": "UnVubmVyQ291bnQ6Ng==", + "name": "internal" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6Nw==", + "name": "local" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjI=", + "outputArtifactsFromActionCache": { + "count": 16, + "id": "RmlsZXNNZXRyaWM6Nw==", + "sizeInBytes": 3733 + }, + "outputArtifactsSeen": { + "count": 19, + "id": "RmlsZXNNZXRyaWM6Ng==", + "sizeInBytes": 6359 + }, + "sourceArtifactsRead": { + "count": 1, + "id": "RmlsZXNNZXRyaWM6NQ==", + "sizeInBytes": 2491 + }, + "topLevelArtifacts": { + "count": 0, + "id": "RmlsZXNNZXRyaWM6OA==", + "sizeInBytes": 0 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 0, + "actionLookupValueCount": 0, + "actionLookupValueCountNotIncludingAspects": 0, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6Mg==", + "inputFileConfiguredTargetCount": 0, + "otherConfiguredTargetCount": 0, + "outputArtifactCount": 0, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117746 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6Mg==", + "numAnalyses": 10, + "numBuilds": 10 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczoy", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczoy", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6Mg==", - "packageLoadMetrics": [], - "packagesLoaded": 0 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczoy", - "targetsConfigured": 0, - "targetsConfiguredNotIncludingAspects": 0, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 55, - "cpuTimeInMs": 2792, - "executionPhaseTimeInMs": 702, - "id": "VGltaW5nTWV0cmljczoy", - "wallTimeInMs": 960 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczoy", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6Mg==", + "packageLoadMetrics": [], + "packagesLoaded": 0 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczoy", + "targetsConfigured": 0, + "targetsConfiguredNotIncludingAspects": 0, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 55, + "cpuTimeInMs": 2792, + "executionPhaseTimeInMs": 702, + "id": "VGltaW5nTWV0cmljczoy", + "wallTimeInMs": 960 + } }, "problems": [ { @@ -252,46 +226,46 @@ "stepLabel": "", "targets": [ { - "abortReason": "", - "durationInMs": 1, + "abortReason": "UNKNOWN", + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjo2NA==", - "label": "//react/src:test_lib_typecheck", - "success": true, - "targetKind": "filegroup rule", + "label": "//next.js:next_dev", + "success": false, + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo2NQ==", - "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", + "label": "//react/src:assets", "success": true, - "targetKind": "directory_path rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo2Ng==", - "label": "//vue/libraries/simple:types", + "label": "//react/src:src_transpile", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjo2Nw==", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//react-webpack:_dev_server_webpack_binary", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjo2OA==", - "label": "//react:build_smoke_test", + "label": "//react-webpack:build_smoke_test", "success": true, "targetKind": "js_test rule", "testSize": "MEDIUM" @@ -300,43 +274,43 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo2OQ==", - "label": "//react-webpack:dev_server", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3MA==", - "label": "//react/src:src_typecheck", + "label": "//next.js:jest_config", "success": true, - "targetKind": "filegroup rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjo3MQ==", - "label": "//react-webpack:bundle", - "success": true, - "targetKind": "_webpack_bundle rule", + "label": "//next.js:next", + "success": false, + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3Mg==", - "label": "//react/src:test_lib", + "label": "//next.js:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjo3Mw==", - "label": "//next.js:package_json", + "label": "//react:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -345,52 +319,52 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3NA==", - "label": "//react-webpack/src:transpile", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo3NQ==", - "label": "//vue:vite", + "label": "//react/src:test_lib_typecheck", "success": true, - "targetKind": "js_binary rule", + "targetKind": "filegroup rule", "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3Ng==", - "label": "//next.js:build_smoke_test", - "success": false, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "label": "//react:preview", + "success": true, + "targetKind": "js_binary rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjo3Nw==", - "label": "//react/src:src", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3OA==", - "label": "//vue/libraries/simple:simple", + "label": "//packages/one:one", "success": true, "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo3OQ==", - "label": "//vue/libraries/simple:vite.config", + "label": "//react/public:public", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -399,406 +373,406 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4MA==", - "label": "//next.js/public:public", + "label": "//react:vite.config", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4MQ==", - "label": "//next.js:next_start", - "success": false, - "targetKind": "_js_run_devserver rule", + "label": "//react:vite", + "success": true, + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4Mg==", - "label": "//react/src:src_typings", + "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", "success": true, - "targetKind": "ts_project rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4Mw==", - "label": "//react/src:test_lib_typings", + "label": "//react-webpack:bundle", "success": true, - "targetKind": "ts_project rule", + "targetKind": "_webpack_bundle rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4NA==", - "label": "//react:package_json", + "label": "//vue/libraries/simple:types", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", + "abortReason": "", "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo4NQ==", - "label": "//next.js:next", - "success": false, - "targetKind": "_run_binary rule", - "testSize": "UNKNOWN" + "label": "//vue:type-check", + "success": true, + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4Ng==", - "label": "//react:build", + "label": "//vue:vite", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4Nw==", - "label": "//packages/one:one", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "_npm_package rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 1, + "abortReason": "UNKNOWN", + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo4OA==", - "label": "//react/src:test_lib_typecheck_test", - "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "label": "//next.js/pages:jest_test", + "success": false, + "targetKind": "jest_test rule", + "testSize": "MEDIUM" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo4OQ==", - "label": "//react-webpack:_dev_server_webpack_binary", - "success": true, - "targetKind": "js_binary rule", - "testSize": "UNKNOWN" + "label": "//next.js:build_test", + "success": false, + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo5MA==", - "label": "//react/src:test_lib_transpile", + "label": "//react/src:src", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo5MQ==", - "label": "//next.js:jest_config", + "label": "//vue:build", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo5Mg==", - "label": "//react/src:assets", - "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "label": "//next.js:build_smoke_test", + "success": false, + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjo5Mw==", - "label": "//react:vite.config", - "success": true, - "targetKind": "js_library rule", + "label": "//next.js:next_start", + "success": false, + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo5NA==", - "label": "//vue/src:src", + "label": "//react:build", "success": true, - "targetKind": "_copy_to_bin rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo5NQ==", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//packages/one:one_ts", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjo5Ng==", - "label": "//next.js:eslintrc", + "label": "//react:build_smoke_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo5Nw==", - "label": "//react/public:public", + "label": "//react:write_swcrc", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo5OA==", - "label": "//react:preview", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo5OQ==", - "label": "//react-webpack:build_smoke_test", - "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "label": "//next.js/pages:specs", + "success": false, + "targetKind": "ts_project rule", + "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMDA=", - "label": "//next.js:build_test", - "success": false, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "label": "//next.js:tsconfig", + "success": true, + "targetKind": "ts_config rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMDE=", - "label": "//react/src:test", + "label": "//react/src:test_lib_typings", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "ts_project rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMDI=", - "label": "//react:start", + "label": "//vue/libraries/simple:vite.config", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoxMDM=", - "label": "//react-webpack:_bundle_webpack_binary", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "js_binary rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxMDQ=", - "label": "//react/src:lint", + "label": "//react:start", "success": true, - "targetKind": "eslint_test rule", - "testSize": "MEDIUM" + "targetKind": "_js_run_devserver rule", + "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMDU=", - "label": "//next.js/pages:specs", - "success": false, - "targetKind": "ts_project rule", + "label": "//vue/libraries/simple:simple", + "success": true, + "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxMDY=", - "label": "//react:write_swcrc", + "label": "//vue:build_test", "success": true, - "targetKind": "_run_binary rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoxMDc=", - "label": "//react-webpack:_bundle_webpack_binary_entrypoint", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "directory_path rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMDg=", - "label": "//vue/libraries/simple:build", + "label": "//vue/src:src", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_copy_to_bin rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMDk=", - "label": "//vue:build", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxMTA=", - "label": "//vue:build_test", + "label": "//react/src:src_typecheck_test", "success": true, "targetKind": "_empty_test rule", "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjoxMTE=", - "label": "//next.js/styles:styles", + "label": "//react/src:test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMTI=", - "label": "//next.js/pages:jest_test", - "success": false, - "targetKind": "jest_test rule", - "testSize": "MEDIUM" + "label": "//react/src:test_lib_transpile", + "success": true, + "targetKind": "swc_compile rule", + "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMTM=", - "label": "//next.js/pages:pages", - "success": false, - "targetKind": "ts_project rule", + "label": "//react-webpack/src:transpile", + "success": true, + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { - "abortReason": "UNKNOWN", - "durationInMs": 2, + "abortReason": "", + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxMTQ=", - "label": "//next.js:next_dev", - "success": false, - "targetKind": "_js_run_devserver rule", + "label": "//react-webpack:_bundle_webpack_binary", + "success": true, + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMTU=", - "label": "//packages/one:one_ts", + "label": "//react-webpack:_bundle_webpack_binary_entrypoint", "success": true, - "targetKind": "ts_project rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMTY=", - "label": "//react/src:src_transpile", + "label": "//react/src:src_typecheck", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "filegroup rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjoxMTc=", - "label": "//react/src:src_typecheck_test", + "label": "//react/src:lint", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "eslint_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMTg=", - "label": "//:eslint", + "label": "//react/src:src_typings", "success": true, - "targetKind": "js_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjoxMTk=", - "label": "//next.js/pages/api:api", + "label": "//react/src:test_lib", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxMjA=", - "label": "//react:vite", + "label": "//react-webpack:dev_server", "success": true, - "targetKind": "js_binary rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMjE=", - "label": "//vue:type-check", + "label": "//vue/libraries/simple:build", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_run_binary rule", + "testSize": "UNKNOWN" }, { - "abortReason": "", - "durationInMs": 0, + "abortReason": "UNKNOWN", + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjoxMjI=", - "label": "//next.js:next_js_binary", - "success": true, - "targetKind": "js_binary rule", + "label": "//next.js/pages:pages", + "success": false, + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMjM=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//next.js/public:public", "success": true, - "targetKind": "directory_path rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjoxMjQ=", - "label": "//next.js:tsconfig", + "label": "//react/src:test_lib_typecheck_test", "success": true, - "targetKind": "ts_config rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", @@ -811,11 +785,11 @@ }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMjY=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//:eslint", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" } ], @@ -823,72 +797,72 @@ { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1715000, + "durationMs": 191000, "id": "VGVzdENvbGxlY3Rpb246MTI=", - "label": "//react/src:test", + "label": "//react:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 170000, + "durationMs": 68000, "id": "VGVzdENvbGxlY3Rpb246MTM=", - "label": "//react-webpack:build_smoke_test", + "label": "//vue:build_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 85000, + "durationMs": 1180000, "id": "VGVzdENvbGxlY3Rpb246MTQ=", - "label": "//react/src:src_typecheck_test", + "label": "//vue:type-check", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 95000, + "durationMs": 112000, "id": "VGVzdENvbGxlY3Rpb246MTU=", - "label": "//react/src:test_lib_typecheck_test", + "label": "//react/src:lint", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 191000, + "durationMs": 1715000, "id": "VGVzdENvbGxlY3Rpb246MTY=", - "label": "//react:build_smoke_test", + "label": "//react/src:test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 68000, + "durationMs": 170000, "id": "VGVzdENvbGxlY3Rpb246MTc=", - "label": "//vue:build_test", + "label": "//react-webpack:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1180000, + "durationMs": 85000, "id": "VGVzdENvbGxlY3Rpb246MTg=", - "label": "//vue:type-check", + "label": "//react/src:src_typecheck_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 112000, + "durationMs": 95000, "id": "VGVzdENvbGxlY3Rpb246MTk=", - "label": "//react/src:lint", + "label": "//react/src:test_lib_typecheck_test", "overallStatus": "PASSED", "strategy": "" } diff --git a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-build.golden.json b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-build.golden.json index 57e8f52..a221e11 100644 --- a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-build.golden.json +++ b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-build.golden.json @@ -12,210 +12,184 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjE=", "invocationID": "fd03240f-697e-4b64-95bc-888e27445bf9", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 19, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjE=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 19, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjE=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjE=", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjI=", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjM=", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 1, - "id": "TWlzc0RldGFpOjQ=", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjU=", - "reason": "DIFFERENT_FILES" - }, - { - "count": 1, - "id": "TWlzc0RldGFpOjY=", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 1, - "id": "TWlzc0RldGFpOjc=", - "reason": "NOT_CACHED" - } - ], - "misses": 3, - "saveTimeInMs": 0, - "sizeInBytes": 1549126 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695868925, - "id": "QWN0aW9uRGF0YTox", - "lastEndedMs": 1714695868927, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 + "count": 0, + "id": "TWlzc0RldGFpOjE=", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695868965, - "id": "QWN0aW9uRGF0YToy", - "lastEndedMs": 1714695868981, - "mnemonic": "CopyFile", - "systemTime": 2, - "userTime": 1 + "count": 0, + "id": "TWlzc0RldGFpOjI=", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695868981, - "id": "QWN0aW9uRGF0YToz", - "lastEndedMs": 1714695869600, - "mnemonic": "TsProject", - "systemTime": 55, - "userTime": 1009 - } - ], - "actionsCreated": 0, - "actionsCreatedNotIncludingAspects": 0, - "actionsExecuted": 3, - "id": "QWN0aW9uU3VtbWFyeTox", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 3, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MQ==", - "name": "total" + "count": 0, + "id": "TWlzc0RldGFpOjM=", + "reason": "DIFFERENT_DEPS" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6Mg==", - "name": "internal" + "count": 1, + "id": "TWlzc0RldGFpOjQ=", + "reason": "DIFFERENT_ENVIRONMENT" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6Mw==", - "name": "darwin-sandbox" + "count": 0, + "id": "TWlzc0RldGFpOjU=", + "reason": "DIFFERENT_FILES" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6NA==", - "name": "local" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjE=", - "outputArtifactsFromActionCache": [ - { - "count": 78, - "id": "RmlsZXNNZXRyaWM6Mw==", - "sizeInBytes": 8553573 - } - ], - "outputArtifactsSeen": [ - { - "count": 85, - "id": "RmlsZXNNZXRyaWM6Mg==", - "sizeInBytes": 8560829 - } - ], - "sourceArtifactsRead": [ + "count": 1, + "id": "TWlzc0RldGFpOjY=", + "reason": "CORRUPTED_CACHE_ENTRY" + }, { "count": 1, - "id": "RmlsZXNNZXRyaWM6MQ==", - "sizeInBytes": 2521 + "id": "TWlzc0RldGFpOjc=", + "reason": "NOT_CACHED" } ], - "topLevelArtifacts": [ - { - "count": 40231, - "id": "RmlsZXNNZXRyaWM6NA==", - "sizeInBytes": 5731462997 - } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 0, - "actionLookupValueCount": 0, - "actionLookupValueCountNotIncludingAspects": 0, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6MQ==", - "inputFileConfiguredTargetCount": 0, - "otherConfiguredTargetCount": 0, - "outputArtifactCount": 0, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117836 - } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6MQ==", - "numAnalyses": 11, - "numBuilds": 11 + "misses": 3, + "saveTimeInMs": 0, + "sizeInBytes": 1549126 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695868925, + "id": "QWN0aW9uRGF0YTox", + "lastEndedMs": 1714695868927, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695868965, + "id": "QWN0aW9uRGF0YToy", + "lastEndedMs": 1714695868981, + "mnemonic": "CopyFile", + "systemTime": 2, + "userTime": 1 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695868981, + "id": "QWN0aW9uRGF0YToz", + "lastEndedMs": 1714695869600, + "mnemonic": "TsProject", + "systemTime": 55, + "userTime": 1009 + } + ], + "actionsCreated": 0, + "actionsCreatedNotIncludingAspects": 0, + "actionsExecuted": 3, + "id": "QWN0aW9uU3VtbWFyeTox", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 3, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MQ==", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6Mg==", + "name": "internal" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6Mw==", + "name": "darwin-sandbox" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6NA==", + "name": "local" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjE=", + "outputArtifactsFromActionCache": { + "count": 78, + "id": "RmlsZXNNZXRyaWM6Mw==", + "sizeInBytes": 8553573 + }, + "outputArtifactsSeen": { + "count": 85, + "id": "RmlsZXNNZXRyaWM6Mg==", + "sizeInBytes": 8560829 + }, + "sourceArtifactsRead": { + "count": 1, + "id": "RmlsZXNNZXRyaWM6MQ==", + "sizeInBytes": 2521 + }, + "topLevelArtifacts": { + "count": 40231, + "id": "RmlsZXNNZXRyaWM6NA==", + "sizeInBytes": 5731462997 } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 0, + "actionLookupValueCount": 0, + "actionLookupValueCountNotIncludingAspects": 0, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6MQ==", + "inputFileConfiguredTargetCount": 0, + "otherConfiguredTargetCount": 0, + "outputArtifactCount": 0, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117836 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6MQ==", + "numAnalyses": 11, + "numBuilds": 11 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczox", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczox", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6MQ==", - "packageLoadMetrics": [], - "packagesLoaded": 0 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczox", - "targetsConfigured": 0, - "targetsConfiguredNotIncludingAspects": 0, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 51, - "cpuTimeInMs": 2619, - "executionPhaseTimeInMs": 680, - "id": "VGltaW5nTWV0cmljczox", - "wallTimeInMs": 954 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczox", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6MQ==", + "packageLoadMetrics": [], + "packagesLoaded": 0 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczox", + "targetsConfigured": 0, + "targetsConfiguredNotIncludingAspects": 0, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 51, + "cpuTimeInMs": 2619, + "executionPhaseTimeInMs": 680, + "id": "VGltaW5nTWV0cmljczox", + "wallTimeInMs": 954 + } }, "problems": [], "profile": null, @@ -245,540 +219,540 @@ "targets": [ { "abortReason": "", - "durationInMs": 0, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjox", - "label": "//vue/libraries/simple:types", + "label": "//react/src:src_typecheck_test", "success": true, - "targetKind": "_run_binary rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoy", - "label": "//next.js/pages/api:api", + "label": "//react:build", "success": true, - "targetKind": "ts_project rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 12, "id": "VGFyZ2V0UGFpcjoz", - "label": "//packages/one:one", + "label": "//next.js:next_start", "success": true, - "targetKind": "_npm_package rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo0", - "label": "//react/src:test_lib_transpile", + "label": "//react:preview", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo1", - "label": "//react-webpack:_bundle_webpack_binary", + "label": "//vue:vite", "success": true, "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjo2", - "label": "//react/src:lint", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "eslint_test rule", - "testSize": "MEDIUM" + "targetKind": "directory_path rule", + "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo3", - "label": "//react:tsconfig", + "label": "//react/src:test_lib_transpile", "success": true, - "targetKind": "ts_config rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo4", - "label": "//react/src:src", + "label": "//react-webpack:_bundle_webpack_binary_entrypoint", "success": true, - "targetKind": "js_library rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo5", - "label": "//react/src:src_typecheck", + "label": "//next.js:jest_config", "success": true, - "targetKind": "filegroup rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxMA==", - "label": "//react/src:test_lib_typecheck", + "label": "//packages/one:one", "success": true, - "targetKind": "filegroup rule", + "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxMQ==", - "label": "//react-webpack/src:transpile", + "label": "//react/src:assets", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxMg==", - "label": "//vue:type-check", + "label": "//react/src:test_lib", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxMw==", - "label": "//vue:build_test", + "label": "//react:package_json", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNA==", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//react-webpack/src:transpile", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjoxNQ==", - "label": "//next.js:build_test", + "label": "//react-webpack:build_smoke_test", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNg==", - "label": "//react:preview", + "label": "//next.js/public:public", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxNw==", - "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", + "label": "//react/src:lint", "success": true, - "targetKind": "directory_path rule", - "testSize": "UNKNOWN" + "targetKind": "eslint_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxOA==", - "label": "//react:build_smoke_test", + "label": "//react/src:src_typecheck", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "filegroup rule", + "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxOQ==", - "label": "//react:write_swcrc", + "label": "//react:vite", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMA==", - "label": "//react:package_json", + "label": "//vue/libraries/simple:build", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMQ==", - "label": "//react:vite.config", + "label": "//react/src:src_transpile", "success": true, - "targetKind": "js_library rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMg==", - "label": "//next.js:next_dev", + "label": "//vue/libraries/simple:types", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMw==", - "label": "//react/src:src_typecheck_test", + "label": "//vue/src:src", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "_copy_to_bin rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyNA==", - "label": "//react-webpack:_bundle_webpack_binary_entrypoint", + "label": "//vue:build_test", "success": true, - "targetKind": "directory_path rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyNQ==", - "label": "//react-webpack:bundle", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "_webpack_bundle rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoyNg==", - "label": "//react/src:src_transpile", + "label": "//next.js:next_dev", "success": true, - "targetKind": "swc_compile rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyNw==", - "label": "//react/src:test", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_binary rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyOA==", - "label": "//react-webpack:dev_server", + "label": "//react:vite.config", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjoyOQ==", - "label": "//vue/libraries/simple:build", + "label": "//next.js/pages:pages", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjozMA==", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//next.js:tsconfig", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 3, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjozMQ==", - "label": "//next.js/pages:jest_test", + "label": "//react/src:src", "success": true, - "targetKind": "jest_test rule", - "testSize": "MEDIUM" + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjozMg==", - "label": "//next.js:build_smoke_test", + "label": "//react-webpack:_bundle_webpack_binary", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_binary rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjozMw==", - "label": "//next.js:eslintrc", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjozNA==", - "label": "//react/public:public", + "label": "//next.js:build_smoke_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjozNQ==", - "label": "//react/src:test_lib", + "label": "//next.js:build_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjozNg==", - "label": "//react-webpack:_dev_server_webpack_binary", + "label": "//react/src:src_typings", "success": true, - "targetKind": "js_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjozNw==", - "label": "//next.js/pages:specs", + "label": "//react:build_smoke_test", "success": true, - "targetKind": "ts_project rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 3, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjozOA==", - "label": "//next.js:next_start", + "label": "//react-webpack:_dev_server_webpack_binary", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjozOQ==", - "label": "//react:build", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo0MA==", - "label": "//react:vite", + "label": "//react/public:public", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo0MQ==", - "label": "//vue/libraries/simple:simple", + "label": "//react:write_swcrc", "success": true, - "targetKind": "_npm_package rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo0Mg==", - "label": "//vue:vite", + "label": "//vue/libraries/simple:vite.config", "success": true, - "targetKind": "js_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 13, "id": "VGFyZ2V0UGFpcjo0Mw==", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//next.js/pages:jest_test", "success": true, - "targetKind": "_copy_file rule", - "testSize": "UNKNOWN" + "targetKind": "jest_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjo0NA==", - "label": "//next.js/public:public", + "label": "//react/src:test_lib_typecheck_test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjo0NQ==", - "label": "//next.js:next_js_binary", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "js_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo0Ng==", - "label": "//react/src:src_typings", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "ts_project rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 3, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo0Nw==", - "label": "//next.js:next", + "label": "//next.js/pages:specs", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjo0OA==", - "label": "//react/src:assets", + "label": "//react/src:test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo0OQ==", - "label": "//react:start", + "label": "//react/src:test_lib_typecheck", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "filegroup rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo1MA==", - "label": "//:eslint", + "label": "//react/src:test_lib_typings", "success": true, - "targetKind": "js_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo1MQ==", - "label": "//next.js:tsconfig", + "label": "//vue:build", "success": true, - "targetKind": "ts_config rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo1Mg==", - "label": "//react/src:test_lib_typings", + "label": "//packages/one:one_ts", "success": true, "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjo1Mw==", - "label": "//vue:build", + "label": "//react:start", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo1NA==", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//vue:type-check", "success": true, - "targetKind": "directory_path rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo1NQ==", - "label": "//next.js/pages:pages", + "label": "//react-webpack:bundle", "success": true, - "targetKind": "ts_project rule", + "targetKind": "_webpack_bundle rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjo1Ng==", - "label": "//next.js/styles:styles", + "label": "//react-webpack:dev_server", "success": true, - "targetKind": "js_library rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo1Nw==", - "label": "//next.js:package_json", + "label": "//vue/libraries/simple:simple", "success": true, - "targetKind": "js_library rule", + "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjo1OA==", - "label": "//vue/libraries/simple:vite.config", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjo1OQ==", - "label": "//vue/src:src", + "label": "//next.js:next", "success": true, - "targetKind": "_copy_to_bin rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo2MA==", - "label": "//next.js:jest_config", + "label": "//next.js:package_json", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" @@ -787,127 +761,127 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo2MQ==", - "label": "//packages/one:one_ts", + "label": "//react:tsconfig", "success": true, - "targetKind": "ts_project rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjo2Mg==", - "label": "//react/src:test_lib_typecheck_test", + "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "directory_path rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjo2Mw==", - "label": "//react-webpack:build_smoke_test", + "label": "//:eslint", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_binary rule", + "testSize": "UNKNOWN" } ], "testCollection": [ { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1134000, + "durationMs": 85000, "id": "VGVzdENvbGxlY3Rpb246MQ==", - "label": "//next.js/pages:jest_test", + "label": "//react/src:src_typecheck_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 85000, + "durationMs": 95000, "id": "VGVzdENvbGxlY3Rpb246Mg==", - "label": "//react/src:src_typecheck_test", + "label": "//react/src:test_lib_typecheck_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 95000, + "durationMs": 191000, "id": "VGVzdENvbGxlY3Rpb246Mw==", - "label": "//react/src:test_lib_typecheck_test", + "label": "//react:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 191000, + "durationMs": 89000, "id": "VGVzdENvbGxlY3Rpb246NA==", - "label": "//react:build_smoke_test", + "label": "//next.js:build_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 170000, + "durationMs": 1134000, "id": "VGVzdENvbGxlY3Rpb246NQ==", - "label": "//react-webpack:build_smoke_test", + "label": "//next.js/pages:jest_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 89000, + "durationMs": 354000, "id": "VGVzdENvbGxlY3Rpb246Ng==", - "label": "//next.js:build_test", + "label": "//next.js:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 354000, + "durationMs": 68000, "id": "VGVzdENvbGxlY3Rpb246Nw==", - "label": "//next.js:build_smoke_test", + "label": "//vue:build_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 68000, + "durationMs": 1180000, "id": "VGVzdENvbGxlY3Rpb246OA==", - "label": "//vue:build_test", + "label": "//vue:type-check", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1180000, + "durationMs": 112000, "id": "VGVzdENvbGxlY3Rpb246OQ==", - "label": "//vue:type-check", + "label": "//react/src:lint", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 112000, + "durationMs": 1715000, "id": "VGVzdENvbGxlY3Rpb246MTA=", - "label": "//react/src:lint", + "label": "//react/src:test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1715000, + "durationMs": 170000, "id": "VGVzdENvbGxlY3Rpb246MTE=", - "label": "//react/src:test", + "label": "//react-webpack:build_smoke_test", "overallStatus": "PASSED", "strategy": "" } diff --git a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-test.golden.json b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-test.golden.json index b3d38d9..b0d63df 100644 --- a/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-test.golden.json +++ b/internal/graphql/testdata/snapshots/LoadFullBazelInvocationDetails/get-successful-bazel-test.golden.json @@ -12,220 +12,194 @@ "id": "QmF6ZWxJbnZvY2F0aW9uOjQ=", "invocationID": "10a37e86-6e2b-4adb-83dd-c2906f42bdd6", "metrics": { - "actionSummary": [ - { - "actionCacheStatistics": [ + "actionSummary": { + "actionCacheStatistics": { + "hits": 10, + "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjQ=", + "loadTimeInMs": 0, + "missDetails": [ { - "hits": 10, - "id": "QWN0aW9uQ2FjaGVTdGF0aXN0aWNzOjQ=", - "loadTimeInMs": 0, - "missDetails": [ - { - "count": 0, - "id": "TWlzc0RldGFpOjIy", - "reason": "UNKNOWN" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjIz", - "reason": "DIFFERENT_ACTION_KEY" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjI0", - "reason": "DIFFERENT_DEPS" - }, - { - "count": 3, - "id": "TWlzc0RldGFpOjI1", - "reason": "DIFFERENT_ENVIRONMENT" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjI2", - "reason": "DIFFERENT_FILES" - }, - { - "count": 0, - "id": "TWlzc0RldGFpOjI3", - "reason": "CORRUPTED_CACHE_ENTRY" - }, - { - "count": 2, - "id": "TWlzc0RldGFpOjI4", - "reason": "NOT_CACHED" - } - ], - "misses": 5, - "saveTimeInMs": 0, - "sizeInBytes": 1548500 - } - ], - "actionData": [ - { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695819112, - "id": "QWN0aW9uRGF0YTo4", - "lastEndedMs": 1714695820440, - "mnemonic": "TestRunner", - "systemTime": 356, - "userTime": 830 + "count": 0, + "id": "TWlzc0RldGFpOjIy", + "reason": "UNKNOWN" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695818173, - "id": "QWN0aW9uRGF0YTo5", - "lastEndedMs": 1714695818174, - "mnemonic": "BazelWorkspaceStatusAction", - "systemTime": 0, - "userTime": 0 + "count": 0, + "id": "TWlzc0RldGFpOjIz", + "reason": "DIFFERENT_ACTION_KEY" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695818220, - "id": "QWN0aW9uRGF0YToxMA==", - "lastEndedMs": 1714695818246, - "mnemonic": "CopyFile", - "systemTime": 3, - "userTime": 1 + "count": 0, + "id": "TWlzc0RldGFpOjI0", + "reason": "DIFFERENT_DEPS" }, { - "actionsCreated": 0, - "actionsExecuted": 1, - "firstStartedMs": 1714695818246, - "id": "QWN0aW9uRGF0YToxMQ==", - "lastEndedMs": 1714695819108, - "mnemonic": "TsProject", - "systemTime": 67, - "userTime": 978 - } - ], - "actionsCreated": 0, - "actionsCreatedNotIncludingAspects": 0, - "actionsExecuted": 4, - "id": "QWN0aW9uU3VtbWFyeTo0", - "remoteCacheHits": 0, - "runnerCount": [ - { - "actionsExecuted": 4, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTA=", - "name": "total" + "count": 3, + "id": "TWlzc0RldGFpOjI1", + "reason": "DIFFERENT_ENVIRONMENT" }, { - "actionsExecuted": 1, - "execKind": "", - "id": "UnVubmVyQ291bnQ6MTE=", - "name": "internal" + "count": 0, + "id": "TWlzc0RldGFpOjI2", + "reason": "DIFFERENT_FILES" }, { - "actionsExecuted": 2, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6MTI=", - "name": "darwin-sandbox" + "count": 0, + "id": "TWlzc0RldGFpOjI3", + "reason": "CORRUPTED_CACHE_ENTRY" }, { - "actionsExecuted": 1, - "execKind": "Local", - "id": "UnVubmVyQ291bnQ6MTM=", - "name": "local" - } - ] - } - ], - "artifactMetrics": [ - { - "id": "QXJ0aWZhY3RNZXRyaWNzOjQ=", - "outputArtifactsFromActionCache": [ - { - "count": 21, - "id": "RmlsZXNNZXRyaWM6MTU=", - "sizeInBytes": 4565 + "count": 2, + "id": "TWlzc0RldGFpOjI4", + "reason": "NOT_CACHED" } ], - "outputArtifactsSeen": [ - { - "count": 28, - "id": "RmlsZXNNZXRyaWM6MTQ=", - "sizeInBytes": 6768 - } - ], - "sourceArtifactsRead": [ - { - "count": 1, - "id": "RmlsZXNNZXRyaWM6MTM=", - "sizeInBytes": 466 - } - ], - "topLevelArtifacts": [ - { - "count": 21393, - "id": "RmlsZXNNZXRyaWM6MTY=", - "sizeInBytes": 2216329047 - } - ] - } - ], - "buildGraphMetrics": [ - { - "actionCount": 0, - "actionLookupValueCount": 0, - "actionLookupValueCountNotIncludingAspects": 0, - "id": "QnVpbGRHcmFwaE1ldHJpY3M6NA==", - "inputFileConfiguredTargetCount": 0, - "otherConfiguredTargetCount": 0, - "outputArtifactCount": 0, - "outputFileConfiguredTargetCount": 0, - "postInvocationSkyframeNodeCount": 117836 + "misses": 5, + "saveTimeInMs": 0, + "sizeInBytes": 1548500 + }, + "actionData": [ + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695819112, + "id": "QWN0aW9uRGF0YTo4", + "lastEndedMs": 1714695820440, + "mnemonic": "TestRunner", + "systemTime": 356, + "userTime": 830 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695818173, + "id": "QWN0aW9uRGF0YTo5", + "lastEndedMs": 1714695818174, + "mnemonic": "BazelWorkspaceStatusAction", + "systemTime": 0, + "userTime": 0 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695818220, + "id": "QWN0aW9uRGF0YToxMA==", + "lastEndedMs": 1714695818246, + "mnemonic": "CopyFile", + "systemTime": 3, + "userTime": 1 + }, + { + "actionsCreated": 0, + "actionsExecuted": 1, + "firstStartedMs": 1714695818246, + "id": "QWN0aW9uRGF0YToxMQ==", + "lastEndedMs": 1714695819108, + "mnemonic": "TsProject", + "systemTime": 67, + "userTime": 978 + } + ], + "actionsCreated": 0, + "actionsCreatedNotIncludingAspects": 0, + "actionsExecuted": 4, + "id": "QWN0aW9uU3VtbWFyeTo0", + "remoteCacheHits": 0, + "runnerCount": [ + { + "actionsExecuted": 4, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTA=", + "name": "total" + }, + { + "actionsExecuted": 1, + "execKind": "", + "id": "UnVubmVyQ291bnQ6MTE=", + "name": "internal" + }, + { + "actionsExecuted": 2, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6MTI=", + "name": "darwin-sandbox" + }, + { + "actionsExecuted": 1, + "execKind": "Local", + "id": "UnVubmVyQ291bnQ6MTM=", + "name": "local" + } + ] + }, + "artifactMetrics": { + "id": "QXJ0aWZhY3RNZXRyaWNzOjQ=", + "outputArtifactsFromActionCache": { + "count": 21, + "id": "RmlsZXNNZXRyaWM6MTU=", + "sizeInBytes": 4565 + }, + "outputArtifactsSeen": { + "count": 28, + "id": "RmlsZXNNZXRyaWM6MTQ=", + "sizeInBytes": 6768 + }, + "sourceArtifactsRead": { + "count": 1, + "id": "RmlsZXNNZXRyaWM6MTM=", + "sizeInBytes": 466 + }, + "topLevelArtifacts": { + "count": 21393, + "id": "RmlsZXNNZXRyaWM6MTY=", + "sizeInBytes": 2216329047 } - ], - "cumulativeMetrics": [ - { - "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NA==", - "numAnalyses": 9, - "numBuilds": 9 - } - ], - "dynamicExecutionMetrics": [], + }, + "buildGraphMetrics": { + "actionCount": 0, + "actionLookupValueCount": 0, + "actionLookupValueCountNotIncludingAspects": 0, + "id": "QnVpbGRHcmFwaE1ldHJpY3M6NA==", + "inputFileConfiguredTargetCount": 0, + "otherConfiguredTargetCount": 0, + "outputArtifactCount": 0, + "outputFileConfiguredTargetCount": 0, + "postInvocationSkyframeNodeCount": 117836 + }, + "cumulativeMetrics": { + "id": "Q3VtdWxhdGl2ZU1ldHJpY3M6NA==", + "numAnalyses": 9, + "numBuilds": 9 + }, + "dynamicExecutionMetrics": null, "id": "TWV0cmljczo0", - "memoryMetrics": [ - { - "garbageMetrics": [], - "id": "TWVtb3J5TWV0cmljczo0", - "peakPostGcHeapSize": 0, - "peakPostGcTenuredSpaceHeapSize": 0, - "usedHeapSizePostBuild": 0 - } - ], - "networkMetrics": [], - "packageMetrics": [ - { - "id": "UGFja2FnZU1ldHJpY3M6NA==", - "packageLoadMetrics": [], - "packagesLoaded": 0 - } - ], - "targetMetrics": [ - { - "id": "VGFyZ2V0TWV0cmljczo0", - "targetsConfigured": 0, - "targetsConfiguredNotIncludingAspects": 0, - "targetsLoaded": 0 - } - ], - "timingMetrics": [ - { - "actionsExecutionStartInMs": 0, - "analysisPhaseTimeInMs": 56, - "cpuTimeInMs": 3495, - "executionPhaseTimeInMs": 2268, - "id": "VGltaW5nTWV0cmljczo0", - "wallTimeInMs": 2565 - } - ] + "memoryMetrics": { + "garbageMetrics": [], + "id": "TWVtb3J5TWV0cmljczo0", + "peakPostGcHeapSize": 0, + "peakPostGcTenuredSpaceHeapSize": 0, + "usedHeapSizePostBuild": 0 + }, + "networkMetrics": null, + "packageMetrics": { + "id": "UGFja2FnZU1ldHJpY3M6NA==", + "packageLoadMetrics": [], + "packagesLoaded": 0 + }, + "targetMetrics": { + "id": "VGFyZ2V0TWV0cmljczo0", + "targetsConfigured": 0, + "targetsConfiguredNotIncludingAspects": 0, + "targetsLoaded": 0 + }, + "timingMetrics": { + "actionsExecutionStartInMs": 0, + "analysisPhaseTimeInMs": 56, + "cpuTimeInMs": 3495, + "executionPhaseTimeInMs": 2268, + "id": "VGltaW5nTWV0cmljczo0", + "wallTimeInMs": 2565 + } }, "problems": [], "profile": null, @@ -255,522 +229,522 @@ "targets": [ { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxNTI=", - "label": "//next.js/styles:styles", + "label": "//vue:vite", "success": true, - "targetKind": "js_library rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNTM=", - "label": "//react:tsconfig", + "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", "success": true, - "targetKind": "ts_config rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNTQ=", - "label": "//vue/libraries/simple:types", + "label": "//next.js/styles:styles", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNTU=", - "label": "//vue:type-check", + "label": "//react:preview", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_binary rule", + "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNTY=", - "label": "//react/src:src_typings", + "label": "//react:vite.config", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxNTc=", - "label": "//react/src:test_lib", + "label": "//react-webpack:_dev_server_webpack_binary", "success": true, - "targetKind": "js_library rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNTg=", - "label": "//react/src:test_lib_typecheck", + "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", "success": true, - "targetKind": "filegroup rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxNTk=", - "label": "//react/src:test_lib_typecheck_test", + "label": "//react/src:test_lib_typecheck", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "filegroup rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxNjA=", - "label": "//react-webpack:dev_server", + "label": "//react-webpack:_bundle_webpack_binary", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 4, "id": "VGFyZ2V0UGFpcjoxNjE=", - "label": "//next.js/pages:pages", + "label": "//:eslint", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 6, "id": "VGFyZ2V0UGFpcjoxNjI=", - "label": "//packages/one:one", + "label": "//react/src:src_typecheck_test", "success": true, - "targetKind": "_npm_package rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxNjM=", - "label": "//react/src:test_lib_transpile", + "label": "//react/src:test_lib_typecheck_test", "success": true, - "targetKind": "swc_compile rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNjQ=", - "label": "//react-webpack:_bundle_webpack_binary_entrypoint", + "label": "//next.js:next_dev", "success": true, - "targetKind": "directory_path rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNjU=", - "label": "//next.js/pages:_jest_test_bazel_sequencer", + "label": "//react/src:assets", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNjY=", - "label": "//next.js/pages:_jest_test_jest_entrypoint", + "label": "//react:write_swcrc", "success": true, - "targetKind": "directory_path rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNjc=", - "label": "//next.js/public:public", + "label": "//vue/libraries/simple:simple", "success": true, - "targetKind": "js_library rule", + "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNjg=", - "label": "//next.js/pages:specs", + "label": "//vue/libraries/simple:vite.config", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxNjk=", - "label": "//react:package_json", + "label": "//next.js/pages/api:api", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNzA=", - "label": "//react/src:src", + "label": "//next.js/public:public", "success": true, "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNzE=", - "label": "//next.js:build_smoke_test", + "label": "//next.js:jest_config", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxNzI=", - "label": "//react/src:assets", + "label": "//next.js:next", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxNzM=", - "label": "//react/src:lint", + "label": "//next.js:next_start", "success": true, - "targetKind": "eslint_test rule", - "testSize": "MEDIUM" + "targetKind": "_js_run_devserver rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 10, "id": "VGFyZ2V0UGFpcjoxNzQ=", - "label": "//react-webpack/src:transpile", + "label": "//react/src:lint", "success": true, - "targetKind": "swc_compile rule", - "testSize": "UNKNOWN" + "targetKind": "eslint_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxNzU=", - "label": "//react-webpack:bundle", + "label": "//react/src:test_lib", "success": true, - "targetKind": "_webpack_bundle rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoxNzY=", - "label": "//react:start", + "label": "//react:package_json", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNzc=", - "label": "//react-webpack:_bundle_webpack_binary", + "label": "//react-webpack/src:transpile", "success": true, - "targetKind": "js_binary rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxNzg=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_resolver", + "label": "//vue/libraries/simple:types", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxNzk=", - "label": "//react:vite", + "label": "//vue:build_test", "success": true, - "targetKind": "js_binary rule", - "testSize": "UNKNOWN" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjoxODA=", - "label": "//react-webpack:build_smoke_test", + "label": "//next.js/pages:jest_test", "success": true, - "targetKind": "js_test rule", + "targetKind": "jest_test rule", "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoxODE=", - "label": "//react/src:test", + "label": "//next.js:build_test", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_empty_test rule", + "testSize": "SMALL" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxODI=", - "label": "//react:write_swcrc", + "label": "//next.js:tsconfig", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "ts_config rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxODM=", - "label": "//:eslint", + "label": "//packages/one:one", "success": true, - "targetKind": "js_binary rule", + "targetKind": "_npm_package rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxODQ=", - "label": "//next.js:next_dev", + "label": "//react/src:test_lib_transpile", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxODU=", - "label": "//react:build", + "label": "//react:vite", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoxODY=", - "label": "//react:preview", + "label": "//vue:build", "success": true, - "targetKind": "js_binary rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxODc=", - "label": "//vue/libraries/simple:simple", + "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", "success": true, - "targetKind": "_npm_package rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxODg=", - "label": "//next.js:next", + "label": "//react/src:src_transpile", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "swc_compile rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoxODk=", - "label": "//next.js:package_json", + "label": "//react/src:src_typings", "success": true, - "targetKind": "js_library rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxOTA=", - "label": "//react/src:src_typecheck", + "label": "//react-webpack:build_smoke_test", "success": true, - "targetKind": "filegroup rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoxOTE=", - "label": "//react-webpack:_dev_server_webpack_binary", + "label": "//react-webpack:dev_server", "success": true, - "targetKind": "js_binary rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 5, "id": "VGFyZ2V0UGFpcjoxOTI=", - "label": "//next.js:eslintrc", + "label": "//vue:type-check", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxOTM=", - "label": "//next.js:build_test", + "label": "//next.js:package_json", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "js_library rule", + "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxOTQ=", - "label": "//next.js:next_start", + "label": "//packages/one:one_ts", "success": true, - "targetKind": "_js_run_devserver rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoxOTU=", - "label": "//next.js:tsconfig", + "label": "//vue/src:src", "success": true, - "targetKind": "ts_config rule", + "targetKind": "_copy_to_bin rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 9, "id": "VGFyZ2V0UGFpcjoxOTY=", - "label": "//react:vite.config", + "label": "//react/src:test", "success": true, - "targetKind": "js_library rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjoxOTc=", - "label": "//next.js:next_js_binary", + "label": "//next.js/pages:_jest_test_jest_entrypoint", "success": true, - "targetKind": "js_binary rule", + "targetKind": "directory_path rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoxOTg=", - "label": "//react/src:src_typecheck_test", + "label": "//next.js/pages:pages", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "ts_project rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjoxOTk=", - "label": "//react/src:test_lib_typings", + "label": "//next.js:eslintrc", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMDA=", - "label": "//vue/libraries/simple:vite.config", + "label": "//next.js:next_js_binary", "success": true, - "targetKind": "js_library rule", + "targetKind": "js_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 8, "id": "VGFyZ2V0UGFpcjoyMDE=", - "label": "//vue/libraries/simple:build", + "label": "//react:start", "success": true, - "targetKind": "_run_binary rule", + "targetKind": "_js_run_devserver rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoyMDI=", - "label": "//vue/src:src", + "label": "//next.js:build_smoke_test", "success": true, - "targetKind": "_copy_to_bin rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMDM=", - "label": "//vue:vite", + "label": "//react/src:test_lib_typings", "success": true, - "targetKind": "js_binary rule", + "targetKind": "ts_project rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 2, + "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMDQ=", - "label": "//next.js/pages:jest_test", + "label": "//react-webpack:_bundle_webpack_binary_entrypoint", "success": true, - "targetKind": "jest_test rule", - "testSize": "MEDIUM" + "targetKind": "directory_path rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMDU=", - "label": "//next.js:jest_config", + "label": "//vue/libraries/simple:build", "success": true, - "targetKind": "js_library rule", + "targetKind": "_run_binary rule", "testSize": "UNKNOWN" }, { "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMDY=", - "label": "//react/public:public", + "label": "//next.js/pages:_jest_test_bazel_sequencer", "success": true, - "targetKind": "js_library rule", + "targetKind": "_copy_file rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoyMDc=", - "label": "//react/src:src_transpile", + "label": "//react:build_smoke_test", "success": true, - "targetKind": "swc_compile rule", - "testSize": "UNKNOWN" + "targetKind": "js_test rule", + "testSize": "MEDIUM" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 7, "id": "VGFyZ2V0UGFpcjoyMDg=", - "label": "//react:build_smoke_test", + "label": "//react-webpack:bundle", "success": true, - "targetKind": "js_test rule", - "testSize": "MEDIUM" + "targetKind": "_webpack_bundle rule", + "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 1, + "durationInMs": 11, "id": "VGFyZ2V0UGFpcjoyMDk=", - "label": "//next.js/pages/api:api", + "label": "//next.js/pages:specs", "success": true, "targetKind": "ts_project rule", "testSize": "UNKNOWN" @@ -779,34 +753,34 @@ "abortReason": "", "durationInMs": 0, "id": "VGFyZ2V0UGFpcjoyMTA=", - "label": "//next.js/pages:_jest_test_bazel_snapshot_reporter", + "label": "//react/public:public", "success": true, - "targetKind": "_copy_file rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMTE=", - "label": "//packages/one:one_ts", + "label": "//react/src:src", "success": true, - "targetKind": "ts_project rule", + "targetKind": "js_library rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 3, "id": "VGFyZ2V0UGFpcjoyMTI=", - "label": "//react-webpack:_dev_server_webpack_binary_entrypoint", + "label": "//react/src:src_typecheck", "success": true, - "targetKind": "directory_path rule", + "targetKind": "filegroup rule", "testSize": "UNKNOWN" }, { "abortReason": "", - "durationInMs": 0, + "durationInMs": 2, "id": "VGFyZ2V0UGFpcjoyMTM=", - "label": "//vue:build", + "label": "//react:build", "success": true, "targetKind": "_run_binary rule", "testSize": "UNKNOWN" @@ -815,10 +789,10 @@ "abortReason": "", "durationInMs": 1, "id": "VGFyZ2V0UGFpcjoyMTQ=", - "label": "//vue:build_test", + "label": "//react:tsconfig", "success": true, - "targetKind": "_empty_test rule", - "testSize": "SMALL" + "targetKind": "ts_config rule", + "testSize": "UNKNOWN" } ], "testCollection": [ @@ -834,90 +808,90 @@ { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 170000, + "durationMs": 89000, "id": "VGVzdENvbGxlY3Rpb246MjE=", - "label": "//react-webpack:build_smoke_test", + "label": "//next.js:build_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1715000, + "durationMs": 191000, "id": "VGVzdENvbGxlY3Rpb246MjI=", - "label": "//react/src:test", + "label": "//react:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 112000, + "durationMs": 68000, "id": "VGVzdENvbGxlY3Rpb246MjM=", - "label": "//react/src:lint", + "label": "//vue:build_test", "overallStatus": "PASSED", "strategy": "" }, { - "cachedLocally": false, + "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1134000, + "durationMs": 1180000, "id": "VGVzdENvbGxlY3Rpb246MjQ=", - "label": "//next.js/pages:jest_test", + "label": "//vue:type-check", "overallStatus": "PASSED", - "strategy": "darwin-sandbox" + "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 354000, + "durationMs": 85000, "id": "VGVzdENvbGxlY3Rpb246MjU=", - "label": "//next.js:build_smoke_test", + "label": "//react/src:src_typecheck_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 85000, + "durationMs": 170000, "id": "VGVzdENvbGxlY3Rpb246MjY=", - "label": "//react/src:src_typecheck_test", + "label": "//react-webpack:build_smoke_test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 89000, + "durationMs": 1715000, "id": "VGVzdENvbGxlY3Rpb246Mjc=", - "label": "//next.js:build_test", + "label": "//react/src:test", "overallStatus": "PASSED", "strategy": "" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 191000, + "durationMs": 112000, "id": "VGVzdENvbGxlY3Rpb246Mjg=", - "label": "//react:build_smoke_test", + "label": "//react/src:lint", "overallStatus": "PASSED", "strategy": "" }, { - "cachedLocally": true, + "cachedLocally": false, "cachedRemotely": false, - "durationMs": 68000, + "durationMs": 1134000, "id": "VGVzdENvbGxlY3Rpb246Mjk=", - "label": "//vue:build_test", + "label": "//next.js/pages:jest_test", "overallStatus": "PASSED", - "strategy": "" + "strategy": "darwin-sandbox" }, { "cachedLocally": true, "cachedRemotely": false, - "durationMs": 1180000, + "durationMs": 354000, "id": "VGVzdENvbGxlY3Rpb246MzA=", - "label": "//vue:type-check", + "label": "//next.js:build_smoke_test", "overallStatus": "PASSED", "strategy": "" } diff --git a/pkg/processing/save.go b/pkg/processing/save.go index d980ff5..30e1350 100644 --- a/pkg/processing/save.go +++ b/pkg/processing/save.go @@ -32,26 +32,32 @@ type SaveActor struct { func (act SaveActor) SaveSummary(ctx context.Context, summary *summary.Summary) (*ent.BazelInvocation, error) { eventFile, err := act.saveEventFile(ctx, summary) if err != nil { + slog.ErrorContext(ctx, "failed to save event file", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save EventFile: %w", err) } buildRecord, err := act.findOrCreateBuild(ctx, summary) if err != nil { + slog.ErrorContext(ctx, "failed to find or create build", "id", summary.InvocationID, "err", err) return nil, err } metrics, err := act.saveMetrics(ctx, summary.Metrics) if err != nil { + slog.ErrorContext(ctx, "failed to save metrics", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save Metrics: %w", err) } targets, err := act.saveTargets(ctx, summary) if err != nil { + slog.ErrorContext(ctx, "failed to save targets", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save Targets: %w", err) } tests, err := act.saveTests(ctx, summary) if err != nil { + slog.ErrorContext(ctx, "failed to save tests", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save test results: %w", err) } bazelInvocation, err := act.saveBazelInvocation(ctx, summary, eventFile, buildRecord, metrics, tests, targets) if err != nil { + slog.ErrorContext(ctx, "failed to save bazel invocation", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save BazelInvocation: %w", err) } var detectedBlobs []detectors.BlobURI @@ -65,10 +71,12 @@ func (act SaveActor) SaveSummary(ctx context.Context, summary *summary.Summary) SetBazelInvocation(bazelInvocation) }).Exec(ctx) if err != nil { + slog.ErrorContext(ctx, "failed to save bazel invocation problems ", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save BazelInvocationProblems: %w", err) } missingBlobs, err := act.determineMissingBlobs(ctx, detectedBlobs) if err != nil { + slog.ErrorContext(ctx, "failed to determine missing blobs", "id", summary.InvocationID, "err", err) return nil, err } err = act.db.Blob.MapCreateBulk(missingBlobs, func(create *ent.BlobCreate, i int) { @@ -76,11 +84,13 @@ func (act SaveActor) SaveSummary(ctx context.Context, summary *summary.Summary) create.SetURI(string(b)) }).Exec(ctx) if err != nil { + slog.ErrorContext(ctx, "failed to save blobs", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("could not save Blobs: %w", err) } var archivedBlobs []ent.Blob archivedBlobs, err = act.blobArchiver.ArchiveBlobs(ctx, missingBlobs) if err != nil { + slog.ErrorContext(ctx, "failed to archive", "id", summary.InvocationID, "err", err) return nil, fmt.Errorf("failed to archive blobs: %w", err) } for _, archivedBlob := range archivedBlobs { @@ -97,6 +107,7 @@ func (act SaveActor) determineMissingBlobs(ctx context.Context, detectedBlobs [] } foundInDB, err := act.db.Blob.Query().Where(blob.URIIn(detectedBlobURIs...)).All(ctx) if err != nil { + slog.ErrorContext(ctx, "failed to query blobs", "err", err) return nil, fmt.Errorf("could not query Blobs: %w", err) } @@ -188,6 +199,7 @@ func (act SaveActor) saveTestFiles(ctx context.Context, files []summary.TestFile func (act SaveActor) saveOutputGroup(ctx context.Context, ouputGroup summary.OutputGroup) (*ent.OutputGroup, error) { inlineFiles, err := act.saveTestFiles(ctx, ouputGroup.InlineFiles) if err != nil { + slog.ErrorContext(ctx, "failed to save output group", "id", "err", err) return nil, err } @@ -303,6 +315,7 @@ func (act SaveActor) saveTimingChildren(ctx context.Context, children []summary. func (act SaveActor) saveTimingBreakdown(ctx context.Context, timingBreakdown summary.TimingBreakdown) (*ent.TimingBreakdown, error) { timingChildren, err := act.saveTimingChildren(ctx, timingBreakdown.Child) if err != nil { + slog.ErrorContext(ctx, "failed to save timing breakdown", "err", err) return nil, err } return act.db.TimingBreakdown.Create(). @@ -345,6 +358,7 @@ func (act SaveActor) saveTestResults(ctx context.Context, testResults []summary. testResult := testResults[i] executionInfo, err := act.saveExecutionInfo(ctx, testResult.ExecutionInfo) if err != nil { + slog.ErrorContext(ctx, "failed to save executioin info", "err", err) slog.Error("problem saving execution info object to database", "err", err) return } @@ -379,6 +393,7 @@ func (act SaveActor) saveTestCollection(ctx context.Context, testCollection summ SetCachedLocally(testCollection.CachedLocally). SetCachedRemotely(testCollection.CachedRemotely). SetDurationMs(testCollection.DurationMs). + SetFirstSeen((testCollection.FirstSeen)). Save(ctx) } @@ -461,7 +476,7 @@ func (act SaveActor) saveActionSummary(ctx context.Context, actionSummary summar SetActionsCreatedNotIncludingAspects(actionSummary.ActionsCreatedNotIncludingAspects). SetActionsExecuted(actionSummary.ActionsExecuted). SetRemoteCacheHits(actionSummary.RemoteCacheHits). - AddActionCacheStatistics(actionCacheStatistics). + SetActionCacheStatistics(actionCacheStatistics). AddRunnerCount(runnerCounts...). AddActionData(actionDatas...). Save(ctx) @@ -577,10 +592,10 @@ func (act SaveActor) saveArtifactMetrics(ctx context.Context, artifactMetrics su } return act.db.ArtifactMetrics.Create(). - AddSourceArtifactsRead(soureArtifactsRead). - AddOutputArtifactsSeen(outputArtifactsSeen). - AddOutputArtifactsFromActionCache(outputArtifactsFromActionCache). - AddTopLevelArtifacts(topLevelArtifacts). + SetSourceArtifactsRead(soureArtifactsRead). + SetOutputArtifactsSeen(outputArtifactsSeen). + SetOutputArtifactsFromActionCache(outputArtifactsFromActionCache). + SetTopLevelArtifacts(topLevelArtifacts). Save(ctx) } @@ -590,7 +605,7 @@ func (act SaveActor) saveNetworkMetrics(ctx context.Context, networkMetrics summ return nil, err } return act.db.NetworkMetrics.Create(). - AddSystemNetworkStats(systemNetworkStats). + SetSystemNetworkStats(systemNetworkStats). Save(ctx) } @@ -641,21 +656,21 @@ func (act SaveActor) saveMetrics(ctx context.Context, metrics summary.Metrics) ( return nil, err } create := act.db.Metrics.Create(). - AddActionSummary(actionSummary). - AddBuildGraphMetrics(buildGraphMetrics). - AddMemoryMetrics(memoryMetrics). - AddTargetMetrics(targetMetrics). - AddPackageMetrics(packageMetrics). - AddCumulativeMetrics(cumulativeMetrics). - AddTimingMetrics(timingMetrics). - AddArtifactMetrics(artifactMetrics) + SetActionSummary(actionSummary). + SetBuildGraphMetrics(buildGraphMetrics). + SetMemoryMetrics(memoryMetrics). + SetTargetMetrics(targetMetrics). + SetPackageMetrics(packageMetrics). + SetCumulativeMetrics(cumulativeMetrics). + SetTimingMetrics(timingMetrics). + SetArtifactMetrics(artifactMetrics) if metrics.NetworkMetrics.SystemNetworkStats != nil { networkMetrics, err := act.saveNetworkMetrics(ctx, metrics.NetworkMetrics) if err != nil { return nil, err } - create = create.AddNetworkMetrics(networkMetrics) + create = create.SetNetworkMetrics(networkMetrics) } return create.Save(ctx) diff --git a/pkg/summary/summarizer.go b/pkg/summary/summarizer.go index 2f2f9ba..eea85b2 100644 --- a/pkg/summary/summarizer.go +++ b/pkg/summary/summarizer.go @@ -83,7 +83,9 @@ func (s Summarizer) summarize(it *events.BuildEventIterator) (*Summary, error) { // FinishProcessing function func (s Summarizer) FinishProcessing() (*Summary, error) { // If problems are ignored for the exit code, return immediately. + slog.Debug("processing", "err", "none") if !shouldIgnoreProblems(s.summary.ExitCode) { + slog.Debug("problems found", "err", "none") // Add any detected test problems. problems, problemsErr := s.problemDetector.Problems() if problemsErr != nil { @@ -91,6 +93,7 @@ func (s Summarizer) FinishProcessing() (*Summary, error) { } s.summary.Problems = append(s.summary.Problems, problems...) } + slog.Debug("returning from FinishProcessing") return s.summary, nil } @@ -144,8 +147,6 @@ func (s Summarizer) ProcessEvent(buildEvent *events.BuildEvent) error { if err != nil { return err } - case *bes.BuildEventId_Progress: - s.handleProgress(buildEvent.GetProgress()) } s.summary.BEPCompleted = buildEvent.GetLastMessage() @@ -259,10 +260,12 @@ func (s Summarizer) handleTargetCompleted(target *bes.TargetComplete, label stri // handleTestResult func (s Summarizer) handleTestResult(testResult *bes.TestResult, label string) { if len(label) == 0 { - panic("missing label on TestResult event") + slog.Warn("missing label on TestResult event", "err", nil) + return } if testResult == nil { - panic(fmt.Sprintf("missing TestResult for label %s", label)) + slog.Warn("Missing Test Result for label %s", label, nil) + return } var testResults []TestResult if s.summary.Tests == nil { @@ -278,6 +281,7 @@ func (s Summarizer) handleTestResult(testResult *bes.TestResult, label string) { CachedLocally: true, CachedRemotely: true, Strategy: "INITIALIZED", + FirstSeen: time.Now(), // this is primarly used for sorting } testResults = make([]TestResult, 0) } @@ -603,6 +607,9 @@ func readActionSummary(actionSummaryData *bes.BuildMetrics_ActionSummary) Action // readActionCacheStatistics func readActionCacheStatistics(actionCacheStatisticsData *bescore.ActionCacheStatistics) ActionCacheStatistics { + if actionCacheStatisticsData == nil { + return ActionCacheStatistics{} + } missDetails := readMissDetails(actionCacheStatisticsData.MissDetails) actionCacheStatistics := ActionCacheStatistics{ SizeInBytes: actionCacheStatisticsData.SizeInBytes, @@ -756,12 +763,6 @@ func (s Summarizer) handleOptionsParsed(optionsParsed *bes.OptionsParsed) { s.summary.InvocationSummary.BazelCommandLine.Options = optionsParsed.GetExplicitCmdLine() } -// handleProgress -func (s Summarizer) handleProgress(progressMsg *bes.Progress) { - s.summary.BuildLogs.WriteString(progressMsg.GetStderr()) - s.summary.BuildLogs.WriteString(progressMsg.GetStdout()) -} - // handleBuildToolLogs func (s Summarizer) handleBuildToolLogs(buildToolLogs *bes.BuildToolLogs) error { for _, logs := range buildToolLogs.GetLog() { diff --git a/pkg/summary/summary.go b/pkg/summary/summary.go index 24aeb98..b327eea 100644 --- a/pkg/summary/summary.go +++ b/pkg/summary/summary.go @@ -523,6 +523,7 @@ type TestsCollection struct { CachedLocally bool CachedRemotely bool DurationMs int64 + FirstSeen time.Time } // TargetPair struct summary object for a target. diff --git a/pkg/summary/testdata/snapshots/nextjs_build.bep.ndjson.golden.json b/pkg/summary/testdata/snapshots/nextjs_build.bep.ndjson.golden.json index 86e7f79..b0433e7 100644 --- a/pkg/summary/testdata/snapshots/nextjs_build.bep.ndjson.golden.json +++ b/pkg/summary/testdata/snapshots/nextjs_build.bep.ndjson.golden.json @@ -253,7 +253,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.579117733Z" }, "//next.js:build_smoke_test": { "TestSummary": { @@ -313,7 +314,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.578731697Z" }, "//next.js:build_test": { "TestSummary": { @@ -373,7 +375,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.578686265Z" }, "//react-webpack:build_smoke_test": { "TestSummary": { @@ -433,7 +436,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.575992687Z" }, "//react/src:lint": { "TestSummary": { @@ -493,7 +497,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.575237584Z" }, "//react/src:src_typecheck_test": { "TestSummary": { @@ -553,7 +558,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.573621233Z" }, "//react/src:test": { "TestSummary": { @@ -613,7 +619,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.575515986Z" }, "//react/src:test_lib_typecheck_test": { "TestSummary": { @@ -673,7 +680,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.574122685Z" }, "//react:build_smoke_test": { "TestSummary": { @@ -733,7 +741,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.57446967Z" }, "//vue:build_test": { "TestSummary": { @@ -793,7 +802,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.574615247Z" }, "//vue:type-check": { "TestSummary": { @@ -853,7 +863,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.575000133Z" } }, "Targets": { diff --git a/pkg/summary/testdata/snapshots/nextjs_build_fail.bep.ndjson.golden.json b/pkg/summary/testdata/snapshots/nextjs_build_fail.bep.ndjson.golden.json index 7df9713..ef92ded 100644 --- a/pkg/summary/testdata/snapshots/nextjs_build_fail.bep.ndjson.golden.json +++ b/pkg/summary/testdata/snapshots/nextjs_build_fail.bep.ndjson.golden.json @@ -300,7 +300,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.655487441Z" }, "//react/src:lint": { "TestSummary": { @@ -360,7 +361,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.654707647Z" }, "//react/src:src_typecheck_test": { "TestSummary": { @@ -420,7 +422,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.653280255Z" }, "//react/src:test": { "TestSummary": { @@ -480,7 +483,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.654892565Z" }, "//react/src:test_lib_typecheck_test": { "TestSummary": { @@ -540,7 +544,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.653517645Z" }, "//react:build_smoke_test": { "TestSummary": { @@ -600,7 +605,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.653763947Z" }, "//vue:build_test": { "TestSummary": { @@ -660,7 +666,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.654131702Z" }, "//vue:type-check": { "TestSummary": { @@ -720,7 +727,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.654384523Z" } }, "Targets": { diff --git a/pkg/summary/testdata/snapshots/nextjs_test.bep.ndjson.golden.json b/pkg/summary/testdata/snapshots/nextjs_test.bep.ndjson.golden.json index d96a7e0..94aff8e 100644 --- a/pkg/summary/testdata/snapshots/nextjs_test.bep.ndjson.golden.json +++ b/pkg/summary/testdata/snapshots/nextjs_test.bep.ndjson.golden.json @@ -294,7 +294,8 @@ "Strategy": "darwin-sandbox", "CachedLocally": false, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.79980903Z" }, "//next.js:build_smoke_test": { "TestSummary": { @@ -354,7 +355,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.796408671Z" }, "//next.js:build_test": { "TestSummary": { @@ -414,7 +416,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.797348962Z" }, "//react-webpack:build_smoke_test": { "TestSummary": { @@ -474,7 +477,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.79754056Z" }, "//react/src:lint": { "TestSummary": { @@ -534,7 +538,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.798837177Z" }, "//react/src:src_typecheck_test": { "TestSummary": { @@ -594,7 +599,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.796676013Z" }, "//react/src:test": { "TestSummary": { @@ -654,7 +660,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.798601257Z" }, "//react/src:test_lib_typecheck_test": { "TestSummary": { @@ -714,7 +721,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.797164244Z" }, "//react:build_smoke_test": { "TestSummary": { @@ -774,7 +782,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.797812403Z" }, "//vue:build_test": { "TestSummary": { @@ -834,7 +843,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.797956609Z" }, "//vue:type-check": { "TestSummary": { @@ -894,7 +904,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.798160038Z" } }, "Targets": { diff --git a/pkg/summary/testdata/snapshots/nextjs_test_fail.bep.ndjson.golden.json b/pkg/summary/testdata/snapshots/nextjs_test_fail.bep.ndjson.golden.json index 4e8451a..e73918d 100644 --- a/pkg/summary/testdata/snapshots/nextjs_test_fail.bep.ndjson.golden.json +++ b/pkg/summary/testdata/snapshots/nextjs_test_fail.bep.ndjson.golden.json @@ -384,7 +384,8 @@ "Strategy": "darwin-sandbox", "CachedLocally": false, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.891310919Z" }, "//next.js:build_smoke_test": { "TestSummary": { @@ -444,7 +445,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.890823078Z" }, "//next.js:build_test": { "TestSummary": { @@ -504,7 +506,8 @@ "Strategy": "", "CachedLocally": true, "CachedRemotely": false, - "DurationMs": 0 + "DurationMs": 0, + "FirstSeen": "2024-10-10T18:40:35.890768125Z" } }, "Targets": { From 22acd83527a9d0e91e62a4cc562052358e4295e2 Mon Sep 17 00:00:00 2001 From: Trey Ivy Date: Fri, 11 Oct 2024 12:47:52 +0000 Subject: [PATCH 2/2] pnpm lockfile --- frontend/pnpm-lock.yaml | 45 +++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 0a4bac0..8af46da 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -53,9 +53,6 @@ importers: react: specifier: ^18 version: 18.3.1 - react-collapsible: - specifier: ^2.10.0 - version: 2.10.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react-countup: specifier: ^6.5.3 version: 6.5.3(react@18.3.1) @@ -96,6 +93,9 @@ importers: '@types/uuid': specifier: ^9.0.8 version: 9.0.8 + env-cmd: + specifier: ^10.1.0 + version: 10.1.0 eslint: specifier: ^8 version: 8.57.0 @@ -1324,6 +1324,10 @@ packages: colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + common-tags@1.8.2: resolution: {integrity: sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==} engines: {node: '>=4.0.0'} @@ -1533,6 +1537,11 @@ packages: resolution: {integrity: sha512-O+QWCviPNSSLAD9Ucn8Awv+poAkqn3T1XY5/N7kR7rQO9yfSGWkYZDwpJ+iKF7B8rxaQKWngSqACpgzeapSyoA==} engines: {node: '>=10.13.0'} + env-cmd@10.1.0: + resolution: {integrity: sha512-mMdWTT9XKN7yNth/6N6g2GuKuJTsKMDHlQFUDacb/heQRRWOTIZ42t1rMHnQu4jYxU1ajdTeJM+9eEETlqToMA==} + engines: {node: '>=8.0.0'} + hasBin: true + error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -2805,12 +2814,6 @@ packages: react: '>=16.9.0' react-dom: '>=16.9.0' - react-collapsible@2.10.0: - resolution: {integrity: sha512-kEVsmlFfXBMTCnU5gwIv19MdmPAhbIPzz5Er37TiJSzRKS0IHrqAKQyQeHEmtoGIQMTcVI46FzE4z3NlVTx77A==} - peerDependencies: - react: ~15 || ~16 || ~17 || ~18 - react-dom: ~15 || ~16 || ~17 || ~18 - react-countup@6.5.3: resolution: {integrity: sha512-udnqVQitxC7QWADSPDOxVWULkLvKUWrDapn5i53HE4DPRVgs+Y5rr4bo25qEl8jSh+0l2cToJgGMx+clxPM3+w==} peerDependencies: @@ -5081,6 +5084,8 @@ snapshots: colorette@2.0.20: {} + commander@4.1.1: {} + common-tags@1.8.2: {} compute-scroll-into-view@3.1.0: {} @@ -5271,6 +5276,11 @@ snapshots: graceful-fs: 4.2.11 tapable: 2.2.1 + env-cmd@10.1.0: + dependencies: + commander: 4.1.1 + cross-spawn: 7.0.3 + error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -5381,7 +5391,7 @@ snapshots: eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-jsx-a11y: 6.8.0(eslint@8.57.0) eslint-plugin-react: 7.34.1(eslint@8.57.0) eslint-plugin-react-hooks: 4.6.2(eslint@8.57.0) @@ -5404,8 +5414,8 @@ snapshots: debug: 4.3.4 enhanced-resolve: 5.16.0 eslint: 8.57.0 - eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) - eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) + eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) + eslint-plugin-import: 2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) fast-glob: 3.3.2 get-tsconfig: 4.7.3 is-core-module: 2.13.1 @@ -5416,7 +5426,7 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-module-utils@2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0): + eslint-module-utils@2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): dependencies: debug: 3.2.7 optionalDependencies: @@ -5427,7 +5437,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0): + eslint-plugin-import@2.29.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): dependencies: array-includes: 3.1.8 array.prototype.findlastindex: 1.2.5 @@ -5437,7 +5447,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.57.0))(eslint@8.57.0))(eslint@8.57.0) + eslint-module-utils: 2.8.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.4.5))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) hasown: 2.0.2 is-core-module: 2.13.1 is-glob: 4.0.3 @@ -6810,11 +6820,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-collapsible@2.10.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): - dependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-countup@6.5.3(react@18.3.1): dependencies: countup.js: 2.8.0